id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 51
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
247,600 | SFDO-Tooling/CumulusCI | cumulusci/core/keychain/BaseProjectKeychain.py | BaseProjectKeychain.set_service | def set_service(self, name, service_config, project=False):
""" Store a ServiceConfig in the keychain """
if not self.project_config.services or name not in self.project_config.services:
self._raise_service_not_valid(name)
self._validate_service(name, service_config)
self._set_service(name, service_config, project)
self._load_services() | python | def set_service(self, name, service_config, project=False):
if not self.project_config.services or name not in self.project_config.services:
self._raise_service_not_valid(name)
self._validate_service(name, service_config)
self._set_service(name, service_config, project)
self._load_services() | [
"def",
"set_service",
"(",
"self",
",",
"name",
",",
"service_config",
",",
"project",
"=",
"False",
")",
":",
"if",
"not",
"self",
".",
"project_config",
".",
"services",
"or",
"name",
"not",
"in",
"self",
".",
"project_config",
".",
"services",
":",
"self",
".",
"_raise_service_not_valid",
"(",
"name",
")",
"self",
".",
"_validate_service",
"(",
"name",
",",
"service_config",
")",
"self",
".",
"_set_service",
"(",
"name",
",",
"service_config",
",",
"project",
")",
"self",
".",
"_load_services",
"(",
")"
] | Store a ServiceConfig in the keychain | [
"Store",
"a",
"ServiceConfig",
"in",
"the",
"keychain"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/keychain/BaseProjectKeychain.py#L184-L190 |
247,601 | SFDO-Tooling/CumulusCI | cumulusci/core/keychain/BaseProjectKeychain.py | BaseProjectKeychain.get_service | def get_service(self, name):
""" Retrieve a stored ServiceConfig from the keychain or exception
:param name: the service name to retrieve
:type name: str
:rtype ServiceConfig
:return the configured Service
"""
self._convert_connected_app()
if not self.project_config.services or name not in self.project_config.services:
self._raise_service_not_valid(name)
if name not in self.services:
self._raise_service_not_configured(name)
return self._get_service(name) | python | def get_service(self, name):
self._convert_connected_app()
if not self.project_config.services or name not in self.project_config.services:
self._raise_service_not_valid(name)
if name not in self.services:
self._raise_service_not_configured(name)
return self._get_service(name) | [
"def",
"get_service",
"(",
"self",
",",
"name",
")",
":",
"self",
".",
"_convert_connected_app",
"(",
")",
"if",
"not",
"self",
".",
"project_config",
".",
"services",
"or",
"name",
"not",
"in",
"self",
".",
"project_config",
".",
"services",
":",
"self",
".",
"_raise_service_not_valid",
"(",
"name",
")",
"if",
"name",
"not",
"in",
"self",
".",
"services",
":",
"self",
".",
"_raise_service_not_configured",
"(",
"name",
")",
"return",
"self",
".",
"_get_service",
"(",
"name",
")"
] | Retrieve a stored ServiceConfig from the keychain or exception
:param name: the service name to retrieve
:type name: str
:rtype ServiceConfig
:return the configured Service | [
"Retrieve",
"a",
"stored",
"ServiceConfig",
"from",
"the",
"keychain",
"or",
"exception"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/keychain/BaseProjectKeychain.py#L195-L210 |
247,602 | SFDO-Tooling/CumulusCI | cumulusci/core/keychain/BaseProjectKeychain.py | BaseProjectKeychain.list_services | def list_services(self):
""" list the services configured in the keychain """
services = list(self.services.keys())
services.sort()
return services | python | def list_services(self):
services = list(self.services.keys())
services.sort()
return services | [
"def",
"list_services",
"(",
"self",
")",
":",
"services",
"=",
"list",
"(",
"self",
".",
"services",
".",
"keys",
"(",
")",
")",
"services",
".",
"sort",
"(",
")",
"return",
"services"
] | list the services configured in the keychain | [
"list",
"the",
"services",
"configured",
"in",
"the",
"keychain"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/keychain/BaseProjectKeychain.py#L240-L244 |
247,603 | SFDO-Tooling/CumulusCI | cumulusci/robotframework/utils.py | set_pdb_trace | def set_pdb_trace(pm=False):
"""Start the Python debugger when robotframework is running.
This makes sure that pdb can use stdin/stdout even though
robotframework has redirected I/O.
"""
import sys
import pdb
for attr in ("stdin", "stdout", "stderr"):
setattr(sys, attr, getattr(sys, "__%s__" % attr))
if pm:
# Post-mortem debugging of an exception
pdb.post_mortem()
else:
pdb.set_trace() | python | def set_pdb_trace(pm=False):
import sys
import pdb
for attr in ("stdin", "stdout", "stderr"):
setattr(sys, attr, getattr(sys, "__%s__" % attr))
if pm:
# Post-mortem debugging of an exception
pdb.post_mortem()
else:
pdb.set_trace() | [
"def",
"set_pdb_trace",
"(",
"pm",
"=",
"False",
")",
":",
"import",
"sys",
"import",
"pdb",
"for",
"attr",
"in",
"(",
"\"stdin\"",
",",
"\"stdout\"",
",",
"\"stderr\"",
")",
":",
"setattr",
"(",
"sys",
",",
"attr",
",",
"getattr",
"(",
"sys",
",",
"\"__%s__\"",
"%",
"attr",
")",
")",
"if",
"pm",
":",
"# Post-mortem debugging of an exception",
"pdb",
".",
"post_mortem",
"(",
")",
"else",
":",
"pdb",
".",
"set_trace",
"(",
")"
] | Start the Python debugger when robotframework is running.
This makes sure that pdb can use stdin/stdout even though
robotframework has redirected I/O. | [
"Start",
"the",
"Python",
"debugger",
"when",
"robotframework",
"is",
"running",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/robotframework/utils.py#L10-L25 |
247,604 | SFDO-Tooling/CumulusCI | cumulusci/robotframework/utils.py | selenium_retry | def selenium_retry(target=None, retry=True):
"""Decorator to turn on automatic retries of flaky selenium failures.
Decorate a robotframework library class to turn on retries for all
selenium calls from that library:
@selenium_retry
class MyLibrary(object):
# Decorate a method to turn it back off for that method
@selenium_retry(False)
def some_keyword(self):
self.selenium.click_button('foo')
Or turn it off by default but turn it on for some methods
(the class-level decorator is still required):
@selenium_retry(False)
class MyLibrary(object):
@selenium_retry(True)
def some_keyword(self):
self.selenium.click_button('foo')
"""
if isinstance(target, bool):
# Decorator was called with a single boolean argument
retry = target
target = None
def decorate(target):
if isinstance(target, type):
cls = target
# Metaclass time.
# We're going to generate a new subclass that:
# a) mixes in RetryingSeleniumLibraryMixin
# b) sets the initial value of `retry_selenium`
return type(
cls.__name__,
(cls, RetryingSeleniumLibraryMixin),
{"retry_selenium": retry, "__doc__": cls.__doc__},
)
func = target
@functools.wraps(func)
def run_with_retry(self, *args, **kwargs):
# Set the retry setting and run the original function.
old_retry = self.retry_selenium
self.retry = retry
try:
return func(self, *args, **kwargs)
finally:
# Restore the previous value
self.retry_selenium = old_retry
set_pdb_trace()
run_with_retry.is_selenium_retry_decorator = True
return run_with_retry
if target is None:
# Decorator is being used with arguments
return decorate
else:
# Decorator was used without arguments
return decorate(target) | python | def selenium_retry(target=None, retry=True):
if isinstance(target, bool):
# Decorator was called with a single boolean argument
retry = target
target = None
def decorate(target):
if isinstance(target, type):
cls = target
# Metaclass time.
# We're going to generate a new subclass that:
# a) mixes in RetryingSeleniumLibraryMixin
# b) sets the initial value of `retry_selenium`
return type(
cls.__name__,
(cls, RetryingSeleniumLibraryMixin),
{"retry_selenium": retry, "__doc__": cls.__doc__},
)
func = target
@functools.wraps(func)
def run_with_retry(self, *args, **kwargs):
# Set the retry setting and run the original function.
old_retry = self.retry_selenium
self.retry = retry
try:
return func(self, *args, **kwargs)
finally:
# Restore the previous value
self.retry_selenium = old_retry
set_pdb_trace()
run_with_retry.is_selenium_retry_decorator = True
return run_with_retry
if target is None:
# Decorator is being used with arguments
return decorate
else:
# Decorator was used without arguments
return decorate(target) | [
"def",
"selenium_retry",
"(",
"target",
"=",
"None",
",",
"retry",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"target",
",",
"bool",
")",
":",
"# Decorator was called with a single boolean argument",
"retry",
"=",
"target",
"target",
"=",
"None",
"def",
"decorate",
"(",
"target",
")",
":",
"if",
"isinstance",
"(",
"target",
",",
"type",
")",
":",
"cls",
"=",
"target",
"# Metaclass time.",
"# We're going to generate a new subclass that:",
"# a) mixes in RetryingSeleniumLibraryMixin",
"# b) sets the initial value of `retry_selenium`",
"return",
"type",
"(",
"cls",
".",
"__name__",
",",
"(",
"cls",
",",
"RetryingSeleniumLibraryMixin",
")",
",",
"{",
"\"retry_selenium\"",
":",
"retry",
",",
"\"__doc__\"",
":",
"cls",
".",
"__doc__",
"}",
",",
")",
"func",
"=",
"target",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"run_with_retry",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Set the retry setting and run the original function.",
"old_retry",
"=",
"self",
".",
"retry_selenium",
"self",
".",
"retry",
"=",
"retry",
"try",
":",
"return",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"# Restore the previous value",
"self",
".",
"retry_selenium",
"=",
"old_retry",
"set_pdb_trace",
"(",
")",
"run_with_retry",
".",
"is_selenium_retry_decorator",
"=",
"True",
"return",
"run_with_retry",
"if",
"target",
"is",
"None",
":",
"# Decorator is being used with arguments",
"return",
"decorate",
"else",
":",
"# Decorator was used without arguments",
"return",
"decorate",
"(",
"target",
")"
] | Decorator to turn on automatic retries of flaky selenium failures.
Decorate a robotframework library class to turn on retries for all
selenium calls from that library:
@selenium_retry
class MyLibrary(object):
# Decorate a method to turn it back off for that method
@selenium_retry(False)
def some_keyword(self):
self.selenium.click_button('foo')
Or turn it off by default but turn it on for some methods
(the class-level decorator is still required):
@selenium_retry(False)
class MyLibrary(object):
@selenium_retry(True)
def some_keyword(self):
self.selenium.click_button('foo') | [
"Decorator",
"to",
"turn",
"on",
"automatic",
"retries",
"of",
"flaky",
"selenium",
"failures",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/robotframework/utils.py#L137-L202 |
247,605 | SFDO-Tooling/CumulusCI | cumulusci/robotframework/utils.py | RetryingSeleniumLibraryMixin.selenium_execute_with_retry | def selenium_execute_with_retry(self, execute, command, params):
"""Run a single selenium command and retry once.
The retry happens for certain errors that are likely to be resolved
by retrying.
"""
try:
return execute(command, params)
except Exception as e:
if isinstance(e, ALWAYS_RETRY_EXCEPTIONS) or (
isinstance(e, WebDriverException)
and "Other element would receive the click" in str(e)
):
# Retry
self.builtin.log("Retrying {} command".format(command), level="WARN")
time.sleep(2)
return execute(command, params)
else:
raise | python | def selenium_execute_with_retry(self, execute, command, params):
try:
return execute(command, params)
except Exception as e:
if isinstance(e, ALWAYS_RETRY_EXCEPTIONS) or (
isinstance(e, WebDriverException)
and "Other element would receive the click" in str(e)
):
# Retry
self.builtin.log("Retrying {} command".format(command), level="WARN")
time.sleep(2)
return execute(command, params)
else:
raise | [
"def",
"selenium_execute_with_retry",
"(",
"self",
",",
"execute",
",",
"command",
",",
"params",
")",
":",
"try",
":",
"return",
"execute",
"(",
"command",
",",
"params",
")",
"except",
"Exception",
"as",
"e",
":",
"if",
"isinstance",
"(",
"e",
",",
"ALWAYS_RETRY_EXCEPTIONS",
")",
"or",
"(",
"isinstance",
"(",
"e",
",",
"WebDriverException",
")",
"and",
"\"Other element would receive the click\"",
"in",
"str",
"(",
"e",
")",
")",
":",
"# Retry",
"self",
".",
"builtin",
".",
"log",
"(",
"\"Retrying {} command\"",
".",
"format",
"(",
"command",
")",
",",
"level",
"=",
"\"WARN\"",
")",
"time",
".",
"sleep",
"(",
"2",
")",
"return",
"execute",
"(",
"command",
",",
"params",
")",
"else",
":",
"raise"
] | Run a single selenium command and retry once.
The retry happens for certain errors that are likely to be resolved
by retrying. | [
"Run",
"a",
"single",
"selenium",
"command",
"and",
"retry",
"once",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/robotframework/utils.py#L105-L123 |
247,606 | SFDO-Tooling/CumulusCI | cumulusci/tasks/release_notes/provider.py | GithubChangeNotesProvider._get_last_tag | def _get_last_tag(self):
""" Gets the last release tag before self.current_tag """
current_version = LooseVersion(
self._get_version_from_tag(self.release_notes_generator.current_tag)
)
versions = []
for tag in self.repo.tags():
if not tag.name.startswith(self.github_info["prefix_prod"]):
continue
version = LooseVersion(self._get_version_from_tag(tag.name))
if version >= current_version:
continue
versions.append(version)
if versions:
versions.sort()
return "{}{}".format(self.github_info["prefix_prod"], versions[-1]) | python | def _get_last_tag(self):
current_version = LooseVersion(
self._get_version_from_tag(self.release_notes_generator.current_tag)
)
versions = []
for tag in self.repo.tags():
if not tag.name.startswith(self.github_info["prefix_prod"]):
continue
version = LooseVersion(self._get_version_from_tag(tag.name))
if version >= current_version:
continue
versions.append(version)
if versions:
versions.sort()
return "{}{}".format(self.github_info["prefix_prod"], versions[-1]) | [
"def",
"_get_last_tag",
"(",
"self",
")",
":",
"current_version",
"=",
"LooseVersion",
"(",
"self",
".",
"_get_version_from_tag",
"(",
"self",
".",
"release_notes_generator",
".",
"current_tag",
")",
")",
"versions",
"=",
"[",
"]",
"for",
"tag",
"in",
"self",
".",
"repo",
".",
"tags",
"(",
")",
":",
"if",
"not",
"tag",
".",
"name",
".",
"startswith",
"(",
"self",
".",
"github_info",
"[",
"\"prefix_prod\"",
"]",
")",
":",
"continue",
"version",
"=",
"LooseVersion",
"(",
"self",
".",
"_get_version_from_tag",
"(",
"tag",
".",
"name",
")",
")",
"if",
"version",
">=",
"current_version",
":",
"continue",
"versions",
".",
"append",
"(",
"version",
")",
"if",
"versions",
":",
"versions",
".",
"sort",
"(",
")",
"return",
"\"{}{}\"",
".",
"format",
"(",
"self",
".",
"github_info",
"[",
"\"prefix_prod\"",
"]",
",",
"versions",
"[",
"-",
"1",
"]",
")"
] | Gets the last release tag before self.current_tag | [
"Gets",
"the",
"last",
"release",
"tag",
"before",
"self",
".",
"current_tag"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/tasks/release_notes/provider.py#L134-L151 |
247,607 | SFDO-Tooling/CumulusCI | cumulusci/tasks/release_notes/provider.py | GithubChangeNotesProvider._get_pull_requests | def _get_pull_requests(self):
""" Gets all pull requests from the repo since we can't do a filtered
date merged search """
for pull in self.repo.pull_requests(
state="closed", base=self.github_info["master_branch"], direction="asc"
):
if self._include_pull_request(pull):
yield pull | python | def _get_pull_requests(self):
for pull in self.repo.pull_requests(
state="closed", base=self.github_info["master_branch"], direction="asc"
):
if self._include_pull_request(pull):
yield pull | [
"def",
"_get_pull_requests",
"(",
"self",
")",
":",
"for",
"pull",
"in",
"self",
".",
"repo",
".",
"pull_requests",
"(",
"state",
"=",
"\"closed\"",
",",
"base",
"=",
"self",
".",
"github_info",
"[",
"\"master_branch\"",
"]",
",",
"direction",
"=",
"\"asc\"",
")",
":",
"if",
"self",
".",
"_include_pull_request",
"(",
"pull",
")",
":",
"yield",
"pull"
] | Gets all pull requests from the repo since we can't do a filtered
date merged search | [
"Gets",
"all",
"pull",
"requests",
"from",
"the",
"repo",
"since",
"we",
"can",
"t",
"do",
"a",
"filtered",
"date",
"merged",
"search"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/tasks/release_notes/provider.py#L153-L160 |
247,608 | SFDO-Tooling/CumulusCI | cumulusci/tasks/release_notes/provider.py | GithubChangeNotesProvider._include_pull_request | def _include_pull_request(self, pull_request):
""" Checks if the given pull_request was merged to the default branch
between self.start_date and self.end_date """
merged_date = pull_request.merged_at
if not merged_date:
return False
if self.last_tag:
last_tag_sha = self.last_tag_info["commit"].sha
if pull_request.merge_commit_sha == last_tag_sha:
# Github commit dates can be different from the merged_at date
return False
current_tag_sha = self.current_tag_info["commit"].sha
if pull_request.merge_commit_sha == current_tag_sha:
return True
# include PRs before current tag
if merged_date <= self.start_date:
if self.end_date:
# include PRs after last tag
if (
merged_date > self.end_date
and pull_request.merge_commit_sha != last_tag_sha
):
return True
else:
# no last tag, include all PRs before current tag
return True
return False | python | def _include_pull_request(self, pull_request):
merged_date = pull_request.merged_at
if not merged_date:
return False
if self.last_tag:
last_tag_sha = self.last_tag_info["commit"].sha
if pull_request.merge_commit_sha == last_tag_sha:
# Github commit dates can be different from the merged_at date
return False
current_tag_sha = self.current_tag_info["commit"].sha
if pull_request.merge_commit_sha == current_tag_sha:
return True
# include PRs before current tag
if merged_date <= self.start_date:
if self.end_date:
# include PRs after last tag
if (
merged_date > self.end_date
and pull_request.merge_commit_sha != last_tag_sha
):
return True
else:
# no last tag, include all PRs before current tag
return True
return False | [
"def",
"_include_pull_request",
"(",
"self",
",",
"pull_request",
")",
":",
"merged_date",
"=",
"pull_request",
".",
"merged_at",
"if",
"not",
"merged_date",
":",
"return",
"False",
"if",
"self",
".",
"last_tag",
":",
"last_tag_sha",
"=",
"self",
".",
"last_tag_info",
"[",
"\"commit\"",
"]",
".",
"sha",
"if",
"pull_request",
".",
"merge_commit_sha",
"==",
"last_tag_sha",
":",
"# Github commit dates can be different from the merged_at date",
"return",
"False",
"current_tag_sha",
"=",
"self",
".",
"current_tag_info",
"[",
"\"commit\"",
"]",
".",
"sha",
"if",
"pull_request",
".",
"merge_commit_sha",
"==",
"current_tag_sha",
":",
"return",
"True",
"# include PRs before current tag",
"if",
"merged_date",
"<=",
"self",
".",
"start_date",
":",
"if",
"self",
".",
"end_date",
":",
"# include PRs after last tag",
"if",
"(",
"merged_date",
">",
"self",
".",
"end_date",
"and",
"pull_request",
".",
"merge_commit_sha",
"!=",
"last_tag_sha",
")",
":",
"return",
"True",
"else",
":",
"# no last tag, include all PRs before current tag",
"return",
"True",
"return",
"False"
] | Checks if the given pull_request was merged to the default branch
between self.start_date and self.end_date | [
"Checks",
"if",
"the",
"given",
"pull_request",
"was",
"merged",
"to",
"the",
"default",
"branch",
"between",
"self",
".",
"start_date",
"and",
"self",
".",
"end_date"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/tasks/release_notes/provider.py#L162-L192 |
247,609 | SFDO-Tooling/CumulusCI | cumulusci/tasks/robotframework/robotframework.py | patch_statusreporter | def patch_statusreporter():
"""Monkey patch robotframework to do postmortem debugging
"""
from robot.running.statusreporter import StatusReporter
orig_exit = StatusReporter.__exit__
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val and isinstance(exc_val, Exception):
set_pdb_trace(pm=True)
return orig_exit(self, exc_type, exc_val, exc_tb)
StatusReporter.__exit__ = __exit__ | python | def patch_statusreporter():
from robot.running.statusreporter import StatusReporter
orig_exit = StatusReporter.__exit__
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val and isinstance(exc_val, Exception):
set_pdb_trace(pm=True)
return orig_exit(self, exc_type, exc_val, exc_tb)
StatusReporter.__exit__ = __exit__ | [
"def",
"patch_statusreporter",
"(",
")",
":",
"from",
"robot",
".",
"running",
".",
"statusreporter",
"import",
"StatusReporter",
"orig_exit",
"=",
"StatusReporter",
".",
"__exit__",
"def",
"__exit__",
"(",
"self",
",",
"exc_type",
",",
"exc_val",
",",
"exc_tb",
")",
":",
"if",
"exc_val",
"and",
"isinstance",
"(",
"exc_val",
",",
"Exception",
")",
":",
"set_pdb_trace",
"(",
"pm",
"=",
"True",
")",
"return",
"orig_exit",
"(",
"self",
",",
"exc_type",
",",
"exc_val",
",",
"exc_tb",
")",
"StatusReporter",
".",
"__exit__",
"=",
"__exit__"
] | Monkey patch robotframework to do postmortem debugging | [
"Monkey",
"patch",
"robotframework",
"to",
"do",
"postmortem",
"debugging"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/tasks/robotframework/robotframework.py#L92-L104 |
247,610 | SFDO-Tooling/CumulusCI | cumulusci/tasks/metadata/package.py | MetadataXmlElementParser.get_item_name | def get_item_name(self, item, parent):
""" Returns the value of the first name element found inside of element """
names = self.get_name_elements(item)
if not names:
raise MissingNameElementError
name = names[0].text
prefix = self.item_name_prefix(parent)
if prefix:
name = prefix + name
return name | python | def get_item_name(self, item, parent):
names = self.get_name_elements(item)
if not names:
raise MissingNameElementError
name = names[0].text
prefix = self.item_name_prefix(parent)
if prefix:
name = prefix + name
return name | [
"def",
"get_item_name",
"(",
"self",
",",
"item",
",",
"parent",
")",
":",
"names",
"=",
"self",
".",
"get_name_elements",
"(",
"item",
")",
"if",
"not",
"names",
":",
"raise",
"MissingNameElementError",
"name",
"=",
"names",
"[",
"0",
"]",
".",
"text",
"prefix",
"=",
"self",
".",
"item_name_prefix",
"(",
"parent",
")",
"if",
"prefix",
":",
"name",
"=",
"prefix",
"+",
"name",
"return",
"name"
] | Returns the value of the first name element found inside of element | [
"Returns",
"the",
"value",
"of",
"the",
"first",
"name",
"element",
"found",
"inside",
"of",
"element"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/tasks/metadata/package.py#L298-L309 |
247,611 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | StepSpec.for_display | def for_display(self):
""" Step details formatted for logging output. """
skip = ""
if self.skip:
skip = " [SKIP]"
result = "{step_num}: {path}{skip}".format(
step_num=self.step_num, path=self.path, skip=skip
)
description = self.task_config.get("description")
if description:
result += ": {}".format(description)
return result | python | def for_display(self):
skip = ""
if self.skip:
skip = " [SKIP]"
result = "{step_num}: {path}{skip}".format(
step_num=self.step_num, path=self.path, skip=skip
)
description = self.task_config.get("description")
if description:
result += ": {}".format(description)
return result | [
"def",
"for_display",
"(",
"self",
")",
":",
"skip",
"=",
"\"\"",
"if",
"self",
".",
"skip",
":",
"skip",
"=",
"\" [SKIP]\"",
"result",
"=",
"\"{step_num}: {path}{skip}\"",
".",
"format",
"(",
"step_num",
"=",
"self",
".",
"step_num",
",",
"path",
"=",
"self",
".",
"path",
",",
"skip",
"=",
"skip",
")",
"description",
"=",
"self",
".",
"task_config",
".",
"get",
"(",
"\"description\"",
")",
"if",
"description",
":",
"result",
"+=",
"\": {}\"",
".",
"format",
"(",
"description",
")",
"return",
"result"
] | Step details formatted for logging output. | [
"Step",
"details",
"formatted",
"for",
"logging",
"output",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L123-L134 |
247,612 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | TaskRunner.run_step | def run_step(self):
"""
Run a step.
:return: StepResult
"""
# Resolve ^^task_name.return_value style option syntax
task_config = self.step.task_config.copy()
task_config["options"] = task_config["options"].copy()
self.flow.resolve_return_value_options(task_config["options"])
exc = None
try:
task = self.step.task_class(
self.project_config,
TaskConfig(task_config),
org_config=self.org_config,
name=self.step.task_name,
stepnum=self.step.step_num,
flow=self.flow,
)
self._log_options(task)
task()
except Exception as e:
self.flow.logger.exception(
"Exception in task {}".format(self.step.task_name)
)
exc = e
return StepResult(
self.step.step_num,
self.step.task_name,
self.step.path,
task.result,
task.return_values,
exc,
) | python | def run_step(self):
# Resolve ^^task_name.return_value style option syntax
task_config = self.step.task_config.copy()
task_config["options"] = task_config["options"].copy()
self.flow.resolve_return_value_options(task_config["options"])
exc = None
try:
task = self.step.task_class(
self.project_config,
TaskConfig(task_config),
org_config=self.org_config,
name=self.step.task_name,
stepnum=self.step.step_num,
flow=self.flow,
)
self._log_options(task)
task()
except Exception as e:
self.flow.logger.exception(
"Exception in task {}".format(self.step.task_name)
)
exc = e
return StepResult(
self.step.step_num,
self.step.task_name,
self.step.path,
task.result,
task.return_values,
exc,
) | [
"def",
"run_step",
"(",
"self",
")",
":",
"# Resolve ^^task_name.return_value style option syntax",
"task_config",
"=",
"self",
".",
"step",
".",
"task_config",
".",
"copy",
"(",
")",
"task_config",
"[",
"\"options\"",
"]",
"=",
"task_config",
"[",
"\"options\"",
"]",
".",
"copy",
"(",
")",
"self",
".",
"flow",
".",
"resolve_return_value_options",
"(",
"task_config",
"[",
"\"options\"",
"]",
")",
"exc",
"=",
"None",
"try",
":",
"task",
"=",
"self",
".",
"step",
".",
"task_class",
"(",
"self",
".",
"project_config",
",",
"TaskConfig",
"(",
"task_config",
")",
",",
"org_config",
"=",
"self",
".",
"org_config",
",",
"name",
"=",
"self",
".",
"step",
".",
"task_name",
",",
"stepnum",
"=",
"self",
".",
"step",
".",
"step_num",
",",
"flow",
"=",
"self",
".",
"flow",
",",
")",
"self",
".",
"_log_options",
"(",
"task",
")",
"task",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"flow",
".",
"logger",
".",
"exception",
"(",
"\"Exception in task {}\"",
".",
"format",
"(",
"self",
".",
"step",
".",
"task_name",
")",
")",
"exc",
"=",
"e",
"return",
"StepResult",
"(",
"self",
".",
"step",
".",
"step_num",
",",
"self",
".",
"step",
".",
"task_name",
",",
"self",
".",
"step",
".",
"path",
",",
"task",
".",
"result",
",",
"task",
".",
"return_values",
",",
"exc",
",",
")"
] | Run a step.
:return: StepResult | [
"Run",
"a",
"step",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L187-L223 |
247,613 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | FlowCoordinator._init_steps | def _init_steps(self,):
"""
Given the flow config and everything else, create a list of steps to run, sorted by step number.
:return: List[StepSpec]
"""
self._check_old_yaml_format()
config_steps = self.flow_config.steps
self._check_infinite_flows(config_steps)
steps = []
for number, step_config in config_steps.items():
specs = self._visit_step(number, step_config)
steps.extend(specs)
return sorted(steps, key=attrgetter("step_num")) | python | def _init_steps(self,):
self._check_old_yaml_format()
config_steps = self.flow_config.steps
self._check_infinite_flows(config_steps)
steps = []
for number, step_config in config_steps.items():
specs = self._visit_step(number, step_config)
steps.extend(specs)
return sorted(steps, key=attrgetter("step_num")) | [
"def",
"_init_steps",
"(",
"self",
",",
")",
":",
"self",
".",
"_check_old_yaml_format",
"(",
")",
"config_steps",
"=",
"self",
".",
"flow_config",
".",
"steps",
"self",
".",
"_check_infinite_flows",
"(",
"config_steps",
")",
"steps",
"=",
"[",
"]",
"for",
"number",
",",
"step_config",
"in",
"config_steps",
".",
"items",
"(",
")",
":",
"specs",
"=",
"self",
".",
"_visit_step",
"(",
"number",
",",
"step_config",
")",
"steps",
".",
"extend",
"(",
"specs",
")",
"return",
"sorted",
"(",
"steps",
",",
"key",
"=",
"attrgetter",
"(",
"\"step_num\"",
")",
")"
] | Given the flow config and everything else, create a list of steps to run, sorted by step number.
:return: List[StepSpec] | [
"Given",
"the",
"flow",
"config",
"and",
"everything",
"else",
"create",
"a",
"list",
"of",
"steps",
"to",
"run",
"sorted",
"by",
"step",
"number",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L346-L362 |
247,614 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | FlowCoordinator._check_infinite_flows | def _check_infinite_flows(self, steps, flows=None):
"""
Recursively loop through the flow_config and check if there are any cycles.
:param steps: Set of step definitions to loop through
:param flows: Flows already visited.
:return: None
"""
if flows is None:
flows = []
for step in steps.values():
if "flow" in step:
flow = step["flow"]
if flow == "None":
continue
if flow in flows:
raise FlowInfiniteLoopError(
"Infinite flows detected with flow {}".format(flow)
)
flows.append(flow)
flow_config = self.project_config.get_flow(flow)
self._check_infinite_flows(flow_config.steps, flows) | python | def _check_infinite_flows(self, steps, flows=None):
if flows is None:
flows = []
for step in steps.values():
if "flow" in step:
flow = step["flow"]
if flow == "None":
continue
if flow in flows:
raise FlowInfiniteLoopError(
"Infinite flows detected with flow {}".format(flow)
)
flows.append(flow)
flow_config = self.project_config.get_flow(flow)
self._check_infinite_flows(flow_config.steps, flows) | [
"def",
"_check_infinite_flows",
"(",
"self",
",",
"steps",
",",
"flows",
"=",
"None",
")",
":",
"if",
"flows",
"is",
"None",
":",
"flows",
"=",
"[",
"]",
"for",
"step",
"in",
"steps",
".",
"values",
"(",
")",
":",
"if",
"\"flow\"",
"in",
"step",
":",
"flow",
"=",
"step",
"[",
"\"flow\"",
"]",
"if",
"flow",
"==",
"\"None\"",
":",
"continue",
"if",
"flow",
"in",
"flows",
":",
"raise",
"FlowInfiniteLoopError",
"(",
"\"Infinite flows detected with flow {}\"",
".",
"format",
"(",
"flow",
")",
")",
"flows",
".",
"append",
"(",
"flow",
")",
"flow_config",
"=",
"self",
".",
"project_config",
".",
"get_flow",
"(",
"flow",
")",
"self",
".",
"_check_infinite_flows",
"(",
"flow_config",
".",
"steps",
",",
"flows",
")"
] | Recursively loop through the flow_config and check if there are any cycles.
:param steps: Set of step definitions to loop through
:param flows: Flows already visited.
:return: None | [
"Recursively",
"loop",
"through",
"the",
"flow_config",
"and",
"check",
"if",
"there",
"are",
"any",
"cycles",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L505-L526 |
247,615 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | FlowCoordinator._init_org | def _init_org(self):
""" Test and refresh credentials to the org specified. """
self.logger.info(
"Verifying and refreshing credentials for the specified org: {}.".format(
self.org_config.name
)
)
orig_config = self.org_config.config.copy()
# attempt to refresh the token, this can throw...
self.org_config.refresh_oauth_token(self.project_config.keychain)
if self.org_config.config != orig_config:
self.logger.info("Org info has changed, updating org in keychain")
self.project_config.keychain.set_org(self.org_config) | python | def _init_org(self):
self.logger.info(
"Verifying and refreshing credentials for the specified org: {}.".format(
self.org_config.name
)
)
orig_config = self.org_config.config.copy()
# attempt to refresh the token, this can throw...
self.org_config.refresh_oauth_token(self.project_config.keychain)
if self.org_config.config != orig_config:
self.logger.info("Org info has changed, updating org in keychain")
self.project_config.keychain.set_org(self.org_config) | [
"def",
"_init_org",
"(",
"self",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Verifying and refreshing credentials for the specified org: {}.\"",
".",
"format",
"(",
"self",
".",
"org_config",
".",
"name",
")",
")",
"orig_config",
"=",
"self",
".",
"org_config",
".",
"config",
".",
"copy",
"(",
")",
"# attempt to refresh the token, this can throw...",
"self",
".",
"org_config",
".",
"refresh_oauth_token",
"(",
"self",
".",
"project_config",
".",
"keychain",
")",
"if",
"self",
".",
"org_config",
".",
"config",
"!=",
"orig_config",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Org info has changed, updating org in keychain\"",
")",
"self",
".",
"project_config",
".",
"keychain",
".",
"set_org",
"(",
"self",
".",
"org_config",
")"
] | Test and refresh credentials to the org specified. | [
"Test",
"and",
"refresh",
"credentials",
"to",
"the",
"org",
"specified",
"."
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L528-L542 |
247,616 | SFDO-Tooling/CumulusCI | cumulusci/core/flowrunner.py | FlowCoordinator.resolve_return_value_options | def resolve_return_value_options(self, options):
"""Handle dynamic option value lookups in the format ^^task_name.attr"""
for key, value in options.items():
if isinstance(value, str) and value.startswith(RETURN_VALUE_OPTION_PREFIX):
path, name = value[len(RETURN_VALUE_OPTION_PREFIX) :].rsplit(".", 1)
result = self._find_result_by_path(path)
options[key] = result.return_values.get(name) | python | def resolve_return_value_options(self, options):
for key, value in options.items():
if isinstance(value, str) and value.startswith(RETURN_VALUE_OPTION_PREFIX):
path, name = value[len(RETURN_VALUE_OPTION_PREFIX) :].rsplit(".", 1)
result = self._find_result_by_path(path)
options[key] = result.return_values.get(name) | [
"def",
"resolve_return_value_options",
"(",
"self",
",",
"options",
")",
":",
"for",
"key",
",",
"value",
"in",
"options",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"str",
")",
"and",
"value",
".",
"startswith",
"(",
"RETURN_VALUE_OPTION_PREFIX",
")",
":",
"path",
",",
"name",
"=",
"value",
"[",
"len",
"(",
"RETURN_VALUE_OPTION_PREFIX",
")",
":",
"]",
".",
"rsplit",
"(",
"\".\"",
",",
"1",
")",
"result",
"=",
"self",
".",
"_find_result_by_path",
"(",
"path",
")",
"options",
"[",
"key",
"]",
"=",
"result",
".",
"return_values",
".",
"get",
"(",
"name",
")"
] | Handle dynamic option value lookups in the format ^^task_name.attr | [
"Handle",
"dynamic",
"option",
"value",
"lookups",
"in",
"the",
"format",
"^^task_name",
".",
"attr"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/flowrunner.py#L544-L550 |
247,617 | SFDO-Tooling/CumulusCI | cumulusci/cli/logger.py | init_logger | def init_logger(log_requests=False):
""" Initialize the logger """
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger() | python | def init_logger(log_requests=False):
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger() | [
"def",
"init_logger",
"(",
"log_requests",
"=",
"False",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
")",
"for",
"handler",
"in",
"logger",
".",
"handlers",
":",
"# pragma: nocover",
"logger",
".",
"removeHandler",
"(",
"handler",
")",
"formatter",
"=",
"coloredlogs",
".",
"ColoredFormatter",
"(",
"fmt",
"=",
"\"%(asctime)s: %(message)s\"",
")",
"handler",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"handler",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"handler",
".",
"setFormatter",
"(",
"formatter",
")",
"logger",
".",
"addHandler",
"(",
"handler",
")",
"logger",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"logger",
".",
"propagate",
"=",
"False",
"if",
"log_requests",
":",
"requests",
".",
"packages",
".",
"urllib3",
".",
"add_stderr_logger",
"(",
")"
] | Initialize the logger | [
"Initialize",
"the",
"logger"
] | e19047921ca771a297e045f22f0bb201651bb6f7 | https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/cli/logger.py#L10-L26 |
247,618 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | register_new_node | def register_new_node(suffix_node_id=None):
"""Factory method, registers new node.
"""
node_id = uuid4()
event = Node.Created(originator_id=node_id, suffix_node_id=suffix_node_id)
entity = Node.mutate(event=event)
publish(event)
return entity | python | def register_new_node(suffix_node_id=None):
node_id = uuid4()
event = Node.Created(originator_id=node_id, suffix_node_id=suffix_node_id)
entity = Node.mutate(event=event)
publish(event)
return entity | [
"def",
"register_new_node",
"(",
"suffix_node_id",
"=",
"None",
")",
":",
"node_id",
"=",
"uuid4",
"(",
")",
"event",
"=",
"Node",
".",
"Created",
"(",
"originator_id",
"=",
"node_id",
",",
"suffix_node_id",
"=",
"suffix_node_id",
")",
"entity",
"=",
"Node",
".",
"mutate",
"(",
"event",
"=",
"event",
")",
"publish",
"(",
"event",
")",
"return",
"entity"
] | Factory method, registers new node. | [
"Factory",
"method",
"registers",
"new",
"node",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L318-L325 |
247,619 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | register_new_edge | def register_new_edge(edge_id, first_char_index, last_char_index, source_node_id, dest_node_id):
"""Factory method, registers new edge.
"""
event = Edge.Created(
originator_id=edge_id,
first_char_index=first_char_index,
last_char_index=last_char_index,
source_node_id=source_node_id,
dest_node_id=dest_node_id,
)
entity = Edge.mutate(event=event)
publish(event)
return entity | python | def register_new_edge(edge_id, first_char_index, last_char_index, source_node_id, dest_node_id):
event = Edge.Created(
originator_id=edge_id,
first_char_index=first_char_index,
last_char_index=last_char_index,
source_node_id=source_node_id,
dest_node_id=dest_node_id,
)
entity = Edge.mutate(event=event)
publish(event)
return entity | [
"def",
"register_new_edge",
"(",
"edge_id",
",",
"first_char_index",
",",
"last_char_index",
",",
"source_node_id",
",",
"dest_node_id",
")",
":",
"event",
"=",
"Edge",
".",
"Created",
"(",
"originator_id",
"=",
"edge_id",
",",
"first_char_index",
"=",
"first_char_index",
",",
"last_char_index",
"=",
"last_char_index",
",",
"source_node_id",
"=",
"source_node_id",
",",
"dest_node_id",
"=",
"dest_node_id",
",",
")",
"entity",
"=",
"Edge",
".",
"mutate",
"(",
"event",
"=",
"event",
")",
"publish",
"(",
"event",
")",
"return",
"entity"
] | Factory method, registers new edge. | [
"Factory",
"method",
"registers",
"new",
"edge",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L334-L346 |
247,620 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | register_new_suffix_tree | def register_new_suffix_tree(case_insensitive=False):
"""Factory method, returns new suffix tree object.
"""
assert isinstance(case_insensitive, bool)
root_node = register_new_node()
suffix_tree_id = uuid4()
event = SuffixTree.Created(
originator_id=suffix_tree_id,
root_node_id=root_node.id,
case_insensitive=case_insensitive,
)
entity = SuffixTree.mutate(event=event)
assert isinstance(entity, SuffixTree)
entity.nodes[root_node.id] = root_node
publish(event)
return entity | python | def register_new_suffix_tree(case_insensitive=False):
assert isinstance(case_insensitive, bool)
root_node = register_new_node()
suffix_tree_id = uuid4()
event = SuffixTree.Created(
originator_id=suffix_tree_id,
root_node_id=root_node.id,
case_insensitive=case_insensitive,
)
entity = SuffixTree.mutate(event=event)
assert isinstance(entity, SuffixTree)
entity.nodes[root_node.id] = root_node
publish(event)
return entity | [
"def",
"register_new_suffix_tree",
"(",
"case_insensitive",
"=",
"False",
")",
":",
"assert",
"isinstance",
"(",
"case_insensitive",
",",
"bool",
")",
"root_node",
"=",
"register_new_node",
"(",
")",
"suffix_tree_id",
"=",
"uuid4",
"(",
")",
"event",
"=",
"SuffixTree",
".",
"Created",
"(",
"originator_id",
"=",
"suffix_tree_id",
",",
"root_node_id",
"=",
"root_node",
".",
"id",
",",
"case_insensitive",
"=",
"case_insensitive",
",",
")",
"entity",
"=",
"SuffixTree",
".",
"mutate",
"(",
"event",
"=",
"event",
")",
"assert",
"isinstance",
"(",
"entity",
",",
"SuffixTree",
")",
"entity",
".",
"nodes",
"[",
"root_node",
".",
"id",
"]",
"=",
"root_node",
"publish",
"(",
"event",
")",
"return",
"entity"
] | Factory method, returns new suffix tree object. | [
"Factory",
"method",
"returns",
"new",
"suffix",
"tree",
"object",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L349-L369 |
247,621 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | find_substring | def find_substring(substring, suffix_tree, edge_repo):
"""Returns the index if substring in tree, otherwise -1.
"""
assert isinstance(substring, str)
assert isinstance(suffix_tree, SuffixTree)
assert isinstance(edge_repo, EventSourcedRepository)
if not substring:
return -1
if suffix_tree.case_insensitive:
substring = substring.lower()
curr_node_id = suffix_tree.root_node_id
i = 0
while i < len(substring):
edge_id = make_edge_id(curr_node_id, substring[i])
try:
edge = edge_repo[edge_id]
except RepositoryKeyError:
return -1
ln = min(edge.length + 1, len(substring) - i)
if substring[i:i + ln] != suffix_tree.string[edge.first_char_index:edge.first_char_index + ln]:
return -1
i += edge.length + 1
curr_node_id = edge.dest_node_id
return edge.first_char_index - len(substring) + ln | python | def find_substring(substring, suffix_tree, edge_repo):
assert isinstance(substring, str)
assert isinstance(suffix_tree, SuffixTree)
assert isinstance(edge_repo, EventSourcedRepository)
if not substring:
return -1
if suffix_tree.case_insensitive:
substring = substring.lower()
curr_node_id = suffix_tree.root_node_id
i = 0
while i < len(substring):
edge_id = make_edge_id(curr_node_id, substring[i])
try:
edge = edge_repo[edge_id]
except RepositoryKeyError:
return -1
ln = min(edge.length + 1, len(substring) - i)
if substring[i:i + ln] != suffix_tree.string[edge.first_char_index:edge.first_char_index + ln]:
return -1
i += edge.length + 1
curr_node_id = edge.dest_node_id
return edge.first_char_index - len(substring) + ln | [
"def",
"find_substring",
"(",
"substring",
",",
"suffix_tree",
",",
"edge_repo",
")",
":",
"assert",
"isinstance",
"(",
"substring",
",",
"str",
")",
"assert",
"isinstance",
"(",
"suffix_tree",
",",
"SuffixTree",
")",
"assert",
"isinstance",
"(",
"edge_repo",
",",
"EventSourcedRepository",
")",
"if",
"not",
"substring",
":",
"return",
"-",
"1",
"if",
"suffix_tree",
".",
"case_insensitive",
":",
"substring",
"=",
"substring",
".",
"lower",
"(",
")",
"curr_node_id",
"=",
"suffix_tree",
".",
"root_node_id",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"substring",
")",
":",
"edge_id",
"=",
"make_edge_id",
"(",
"curr_node_id",
",",
"substring",
"[",
"i",
"]",
")",
"try",
":",
"edge",
"=",
"edge_repo",
"[",
"edge_id",
"]",
"except",
"RepositoryKeyError",
":",
"return",
"-",
"1",
"ln",
"=",
"min",
"(",
"edge",
".",
"length",
"+",
"1",
",",
"len",
"(",
"substring",
")",
"-",
"i",
")",
"if",
"substring",
"[",
"i",
":",
"i",
"+",
"ln",
"]",
"!=",
"suffix_tree",
".",
"string",
"[",
"edge",
".",
"first_char_index",
":",
"edge",
".",
"first_char_index",
"+",
"ln",
"]",
":",
"return",
"-",
"1",
"i",
"+=",
"edge",
".",
"length",
"+",
"1",
"curr_node_id",
"=",
"edge",
".",
"dest_node_id",
"return",
"edge",
".",
"first_char_index",
"-",
"len",
"(",
"substring",
")",
"+",
"ln"
] | Returns the index if substring in tree, otherwise -1. | [
"Returns",
"the",
"index",
"if",
"substring",
"in",
"tree",
"otherwise",
"-",
"1",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L374-L397 |
247,622 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | SuffixTree._add_prefix | def _add_prefix(self, last_char_index):
"""The core construction method.
"""
last_parent_node_id = None
while True:
parent_node_id = self.active.source_node_id
if self.active.explicit():
edge_id = make_edge_id(self.active.source_node_id, self.string[last_char_index])
if edge_id in self.edges:
# prefix is already in tree
break
else:
edge_id = make_edge_id(self.active.source_node_id, self.string[self.active.first_char_index])
e = self.edges[edge_id]
if self.string[e.first_char_index + self.active.length + 1] == self.string[last_char_index]:
# prefix is already in tree
break
parent_node_id = self._split_edge(e, self.active)
node = register_new_node()
self.nodes[node.id] = node
edge_id = make_edge_id(parent_node_id, self.string[last_char_index])
e = register_new_edge(
edge_id=edge_id,
first_char_index=last_char_index,
last_char_index=self.N,
source_node_id=parent_node_id,
dest_node_id=node.id,
)
self._insert_edge(e)
if last_parent_node_id is not None:
self.nodes[last_parent_node_id].suffix_node_id = parent_node_id
last_parent_node_id = parent_node_id
if self.active.source_node_id == self.root_node_id:
self.active.first_char_index += 1
else:
self.active.source_node_id = self.nodes[self.active.source_node_id].suffix_node_id
self._canonize_suffix(self.active)
if last_parent_node_id is not None:
self.nodes[last_parent_node_id].suffix_node_id = parent_node_id
self.active.last_char_index += 1
self._canonize_suffix(self.active) | python | def _add_prefix(self, last_char_index):
last_parent_node_id = None
while True:
parent_node_id = self.active.source_node_id
if self.active.explicit():
edge_id = make_edge_id(self.active.source_node_id, self.string[last_char_index])
if edge_id in self.edges:
# prefix is already in tree
break
else:
edge_id = make_edge_id(self.active.source_node_id, self.string[self.active.first_char_index])
e = self.edges[edge_id]
if self.string[e.first_char_index + self.active.length + 1] == self.string[last_char_index]:
# prefix is already in tree
break
parent_node_id = self._split_edge(e, self.active)
node = register_new_node()
self.nodes[node.id] = node
edge_id = make_edge_id(parent_node_id, self.string[last_char_index])
e = register_new_edge(
edge_id=edge_id,
first_char_index=last_char_index,
last_char_index=self.N,
source_node_id=parent_node_id,
dest_node_id=node.id,
)
self._insert_edge(e)
if last_parent_node_id is not None:
self.nodes[last_parent_node_id].suffix_node_id = parent_node_id
last_parent_node_id = parent_node_id
if self.active.source_node_id == self.root_node_id:
self.active.first_char_index += 1
else:
self.active.source_node_id = self.nodes[self.active.source_node_id].suffix_node_id
self._canonize_suffix(self.active)
if last_parent_node_id is not None:
self.nodes[last_parent_node_id].suffix_node_id = parent_node_id
self.active.last_char_index += 1
self._canonize_suffix(self.active) | [
"def",
"_add_prefix",
"(",
"self",
",",
"last_char_index",
")",
":",
"last_parent_node_id",
"=",
"None",
"while",
"True",
":",
"parent_node_id",
"=",
"self",
".",
"active",
".",
"source_node_id",
"if",
"self",
".",
"active",
".",
"explicit",
"(",
")",
":",
"edge_id",
"=",
"make_edge_id",
"(",
"self",
".",
"active",
".",
"source_node_id",
",",
"self",
".",
"string",
"[",
"last_char_index",
"]",
")",
"if",
"edge_id",
"in",
"self",
".",
"edges",
":",
"# prefix is already in tree",
"break",
"else",
":",
"edge_id",
"=",
"make_edge_id",
"(",
"self",
".",
"active",
".",
"source_node_id",
",",
"self",
".",
"string",
"[",
"self",
".",
"active",
".",
"first_char_index",
"]",
")",
"e",
"=",
"self",
".",
"edges",
"[",
"edge_id",
"]",
"if",
"self",
".",
"string",
"[",
"e",
".",
"first_char_index",
"+",
"self",
".",
"active",
".",
"length",
"+",
"1",
"]",
"==",
"self",
".",
"string",
"[",
"last_char_index",
"]",
":",
"# prefix is already in tree",
"break",
"parent_node_id",
"=",
"self",
".",
"_split_edge",
"(",
"e",
",",
"self",
".",
"active",
")",
"node",
"=",
"register_new_node",
"(",
")",
"self",
".",
"nodes",
"[",
"node",
".",
"id",
"]",
"=",
"node",
"edge_id",
"=",
"make_edge_id",
"(",
"parent_node_id",
",",
"self",
".",
"string",
"[",
"last_char_index",
"]",
")",
"e",
"=",
"register_new_edge",
"(",
"edge_id",
"=",
"edge_id",
",",
"first_char_index",
"=",
"last_char_index",
",",
"last_char_index",
"=",
"self",
".",
"N",
",",
"source_node_id",
"=",
"parent_node_id",
",",
"dest_node_id",
"=",
"node",
".",
"id",
",",
")",
"self",
".",
"_insert_edge",
"(",
"e",
")",
"if",
"last_parent_node_id",
"is",
"not",
"None",
":",
"self",
".",
"nodes",
"[",
"last_parent_node_id",
"]",
".",
"suffix_node_id",
"=",
"parent_node_id",
"last_parent_node_id",
"=",
"parent_node_id",
"if",
"self",
".",
"active",
".",
"source_node_id",
"==",
"self",
".",
"root_node_id",
":",
"self",
".",
"active",
".",
"first_char_index",
"+=",
"1",
"else",
":",
"self",
".",
"active",
".",
"source_node_id",
"=",
"self",
".",
"nodes",
"[",
"self",
".",
"active",
".",
"source_node_id",
"]",
".",
"suffix_node_id",
"self",
".",
"_canonize_suffix",
"(",
"self",
".",
"active",
")",
"if",
"last_parent_node_id",
"is",
"not",
"None",
":",
"self",
".",
"nodes",
"[",
"last_parent_node_id",
"]",
".",
"suffix_node_id",
"=",
"parent_node_id",
"self",
".",
"active",
".",
"last_char_index",
"+=",
"1",
"self",
".",
"_canonize_suffix",
"(",
"self",
".",
"active",
")"
] | The core construction method. | [
"The",
"core",
"construction",
"method",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L86-L129 |
247,623 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py | SuffixTree._canonize_suffix | def _canonize_suffix(self, suffix):
"""This canonizes the suffix, walking along its suffix string until it
is explicit or there are no more matched nodes.
"""
if not suffix.explicit():
edge_id = make_edge_id(suffix.source_node_id, self.string[suffix.first_char_index])
e = self.edges[edge_id]
if e.length <= suffix.length:
suffix.first_char_index += e.length + 1
suffix.source_node_id = e.dest_node_id
self._canonize_suffix(suffix) | python | def _canonize_suffix(self, suffix):
if not suffix.explicit():
edge_id = make_edge_id(suffix.source_node_id, self.string[suffix.first_char_index])
e = self.edges[edge_id]
if e.length <= suffix.length:
suffix.first_char_index += e.length + 1
suffix.source_node_id = e.dest_node_id
self._canonize_suffix(suffix) | [
"def",
"_canonize_suffix",
"(",
"self",
",",
"suffix",
")",
":",
"if",
"not",
"suffix",
".",
"explicit",
"(",
")",
":",
"edge_id",
"=",
"make_edge_id",
"(",
"suffix",
".",
"source_node_id",
",",
"self",
".",
"string",
"[",
"suffix",
".",
"first_char_index",
"]",
")",
"e",
"=",
"self",
".",
"edges",
"[",
"edge_id",
"]",
"if",
"e",
".",
"length",
"<=",
"suffix",
".",
"length",
":",
"suffix",
".",
"first_char_index",
"+=",
"e",
".",
"length",
"+",
"1",
"suffix",
".",
"source_node_id",
"=",
"e",
".",
"dest_node_id",
"self",
".",
"_canonize_suffix",
"(",
"suffix",
")"
] | This canonizes the suffix, walking along its suffix string until it
is explicit or there are no more matched nodes. | [
"This",
"canonizes",
"the",
"suffix",
"walking",
"along",
"its",
"suffix",
"string",
"until",
"it",
"is",
"explicit",
"or",
"there",
"are",
"no",
"more",
"matched",
"nodes",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py#L170-L180 |
247,624 | johnbywater/eventsourcing | eventsourcing/infrastructure/snapshotting.py | entity_from_snapshot | def entity_from_snapshot(snapshot):
"""
Reconstructs domain entity from given snapshot.
"""
assert isinstance(snapshot, AbstractSnapshop), type(snapshot)
if snapshot.state is not None:
entity_class = resolve_topic(snapshot.topic)
return reconstruct_object(entity_class, snapshot.state) | python | def entity_from_snapshot(snapshot):
assert isinstance(snapshot, AbstractSnapshop), type(snapshot)
if snapshot.state is not None:
entity_class = resolve_topic(snapshot.topic)
return reconstruct_object(entity_class, snapshot.state) | [
"def",
"entity_from_snapshot",
"(",
"snapshot",
")",
":",
"assert",
"isinstance",
"(",
"snapshot",
",",
"AbstractSnapshop",
")",
",",
"type",
"(",
"snapshot",
")",
"if",
"snapshot",
".",
"state",
"is",
"not",
"None",
":",
"entity_class",
"=",
"resolve_topic",
"(",
"snapshot",
".",
"topic",
")",
"return",
"reconstruct_object",
"(",
"entity_class",
",",
"snapshot",
".",
"state",
")"
] | Reconstructs domain entity from given snapshot. | [
"Reconstructs",
"domain",
"entity",
"from",
"given",
"snapshot",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/snapshotting.py#L69-L76 |
247,625 | johnbywater/eventsourcing | eventsourcing/infrastructure/snapshotting.py | EventSourcedSnapshotStrategy.get_snapshot | def get_snapshot(self, entity_id, lt=None, lte=None):
"""
Gets the last snapshot for entity, optionally until a particular version number.
:rtype: Snapshot
"""
snapshots = self.snapshot_store.get_domain_events(entity_id, lt=lt, lte=lte, limit=1, is_ascending=False)
if len(snapshots) == 1:
return snapshots[0] | python | def get_snapshot(self, entity_id, lt=None, lte=None):
snapshots = self.snapshot_store.get_domain_events(entity_id, lt=lt, lte=lte, limit=1, is_ascending=False)
if len(snapshots) == 1:
return snapshots[0] | [
"def",
"get_snapshot",
"(",
"self",
",",
"entity_id",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
")",
":",
"snapshots",
"=",
"self",
".",
"snapshot_store",
".",
"get_domain_events",
"(",
"entity_id",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"1",
",",
"is_ascending",
"=",
"False",
")",
"if",
"len",
"(",
"snapshots",
")",
"==",
"1",
":",
"return",
"snapshots",
"[",
"0",
"]"
] | Gets the last snapshot for entity, optionally until a particular version number.
:rtype: Snapshot | [
"Gets",
"the",
"last",
"snapshot",
"for",
"entity",
"optionally",
"until",
"a",
"particular",
"version",
"number",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/snapshotting.py#L36-L44 |
247,626 | johnbywater/eventsourcing | eventsourcing/infrastructure/snapshotting.py | EventSourcedSnapshotStrategy.take_snapshot | def take_snapshot(self, entity_id, entity, last_event_version):
"""
Creates a Snapshot from the given state, and appends it
to the snapshot store.
:rtype: Snapshot
"""
# Create the snapshot.
snapshot = Snapshot(
originator_id=entity_id,
originator_version=last_event_version,
topic=get_topic(entity.__class__),
state=None if entity is None else deepcopy(entity.__dict__)
)
self.snapshot_store.store(snapshot)
# Return the snapshot.
return snapshot | python | def take_snapshot(self, entity_id, entity, last_event_version):
# Create the snapshot.
snapshot = Snapshot(
originator_id=entity_id,
originator_version=last_event_version,
topic=get_topic(entity.__class__),
state=None if entity is None else deepcopy(entity.__dict__)
)
self.snapshot_store.store(snapshot)
# Return the snapshot.
return snapshot | [
"def",
"take_snapshot",
"(",
"self",
",",
"entity_id",
",",
"entity",
",",
"last_event_version",
")",
":",
"# Create the snapshot.",
"snapshot",
"=",
"Snapshot",
"(",
"originator_id",
"=",
"entity_id",
",",
"originator_version",
"=",
"last_event_version",
",",
"topic",
"=",
"get_topic",
"(",
"entity",
".",
"__class__",
")",
",",
"state",
"=",
"None",
"if",
"entity",
"is",
"None",
"else",
"deepcopy",
"(",
"entity",
".",
"__dict__",
")",
")",
"self",
".",
"snapshot_store",
".",
"store",
"(",
"snapshot",
")",
"# Return the snapshot.",
"return",
"snapshot"
] | Creates a Snapshot from the given state, and appends it
to the snapshot store.
:rtype: Snapshot | [
"Creates",
"a",
"Snapshot",
"from",
"the",
"given",
"state",
"and",
"appends",
"it",
"to",
"the",
"snapshot",
"store",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/snapshotting.py#L47-L66 |
247,627 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventsourcedrepository.py | EventSourcedRepository.get_entity | def get_entity(self, entity_id, at=None):
"""
Returns entity with given ID, optionally until position.
"""
# Get a snapshot (None if none exist).
if self._snapshot_strategy is not None:
snapshot = self._snapshot_strategy.get_snapshot(entity_id, lte=at)
else:
snapshot = None
# Decide the initial state of the entity, and the
# version of the last item applied to the entity.
if snapshot is None:
initial_state = None
gt = None
else:
initial_state = entity_from_snapshot(snapshot)
gt = snapshot.originator_version
# Obtain and return current state.
return self.get_and_project_events(entity_id, gt=gt, lte=at, initial_state=initial_state) | python | def get_entity(self, entity_id, at=None):
# Get a snapshot (None if none exist).
if self._snapshot_strategy is not None:
snapshot = self._snapshot_strategy.get_snapshot(entity_id, lte=at)
else:
snapshot = None
# Decide the initial state of the entity, and the
# version of the last item applied to the entity.
if snapshot is None:
initial_state = None
gt = None
else:
initial_state = entity_from_snapshot(snapshot)
gt = snapshot.originator_version
# Obtain and return current state.
return self.get_and_project_events(entity_id, gt=gt, lte=at, initial_state=initial_state) | [
"def",
"get_entity",
"(",
"self",
",",
"entity_id",
",",
"at",
"=",
"None",
")",
":",
"# Get a snapshot (None if none exist).",
"if",
"self",
".",
"_snapshot_strategy",
"is",
"not",
"None",
":",
"snapshot",
"=",
"self",
".",
"_snapshot_strategy",
".",
"get_snapshot",
"(",
"entity_id",
",",
"lte",
"=",
"at",
")",
"else",
":",
"snapshot",
"=",
"None",
"# Decide the initial state of the entity, and the",
"# version of the last item applied to the entity.",
"if",
"snapshot",
"is",
"None",
":",
"initial_state",
"=",
"None",
"gt",
"=",
"None",
"else",
":",
"initial_state",
"=",
"entity_from_snapshot",
"(",
"snapshot",
")",
"gt",
"=",
"snapshot",
".",
"originator_version",
"# Obtain and return current state.",
"return",
"self",
".",
"get_and_project_events",
"(",
"entity_id",
",",
"gt",
"=",
"gt",
",",
"lte",
"=",
"at",
",",
"initial_state",
"=",
"initial_state",
")"
] | Returns entity with given ID, optionally until position. | [
"Returns",
"entity",
"with",
"given",
"ID",
"optionally",
"until",
"position",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventsourcedrepository.py#L37-L58 |
247,628 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventsourcedrepository.py | EventSourcedRepository.get_and_project_events | def get_and_project_events(self, entity_id, gt=None, gte=None, lt=None, lte=None, limit=None, initial_state=None,
query_descending=False):
"""
Reconstitutes requested domain entity from domain events found in event store.
"""
# Decide if query is in ascending order.
# - A "speed up" for when events are stored in descending order (e.g.
# in Cassandra) and it is faster to get them in that order.
# - This isn't useful when 'until' or 'after' or 'limit' are set,
# because the inclusiveness or exclusiveness of until and after
# and the end of the stream that is truncated by limit both depend on
# the direction of the query. Also paging backwards isn't useful, because
# all the events are needed eventually, so it would probably slow things
# down. Paging is intended to support replaying longer event streams, and
# only makes sense to work in ascending order.
if gt is None and gte is None and lt is None and lte is None and self.__page_size__ is None:
is_ascending = False
else:
is_ascending = not query_descending
# Get entity's domain events from the event store.
domain_events = self.event_store.get_domain_events(
originator_id=entity_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
is_ascending=is_ascending,
page_size=self.__page_size__
)
# The events will be replayed in ascending order.
if not is_ascending:
domain_events = list(reversed(list(domain_events)))
# Project the domain events onto the initial state.
return self.project_events(initial_state, domain_events) | python | def get_and_project_events(self, entity_id, gt=None, gte=None, lt=None, lte=None, limit=None, initial_state=None,
query_descending=False):
# Decide if query is in ascending order.
# - A "speed up" for when events are stored in descending order (e.g.
# in Cassandra) and it is faster to get them in that order.
# - This isn't useful when 'until' or 'after' or 'limit' are set,
# because the inclusiveness or exclusiveness of until and after
# and the end of the stream that is truncated by limit both depend on
# the direction of the query. Also paging backwards isn't useful, because
# all the events are needed eventually, so it would probably slow things
# down. Paging is intended to support replaying longer event streams, and
# only makes sense to work in ascending order.
if gt is None and gte is None and lt is None and lte is None and self.__page_size__ is None:
is_ascending = False
else:
is_ascending = not query_descending
# Get entity's domain events from the event store.
domain_events = self.event_store.get_domain_events(
originator_id=entity_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
is_ascending=is_ascending,
page_size=self.__page_size__
)
# The events will be replayed in ascending order.
if not is_ascending:
domain_events = list(reversed(list(domain_events)))
# Project the domain events onto the initial state.
return self.project_events(initial_state, domain_events) | [
"def",
"get_and_project_events",
"(",
"self",
",",
"entity_id",
",",
"gt",
"=",
"None",
",",
"gte",
"=",
"None",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"initial_state",
"=",
"None",
",",
"query_descending",
"=",
"False",
")",
":",
"# Decide if query is in ascending order.",
"# - A \"speed up\" for when events are stored in descending order (e.g.",
"# in Cassandra) and it is faster to get them in that order.",
"# - This isn't useful when 'until' or 'after' or 'limit' are set,",
"# because the inclusiveness or exclusiveness of until and after",
"# and the end of the stream that is truncated by limit both depend on",
"# the direction of the query. Also paging backwards isn't useful, because",
"# all the events are needed eventually, so it would probably slow things",
"# down. Paging is intended to support replaying longer event streams, and",
"# only makes sense to work in ascending order.",
"if",
"gt",
"is",
"None",
"and",
"gte",
"is",
"None",
"and",
"lt",
"is",
"None",
"and",
"lte",
"is",
"None",
"and",
"self",
".",
"__page_size__",
"is",
"None",
":",
"is_ascending",
"=",
"False",
"else",
":",
"is_ascending",
"=",
"not",
"query_descending",
"# Get entity's domain events from the event store.",
"domain_events",
"=",
"self",
".",
"event_store",
".",
"get_domain_events",
"(",
"originator_id",
"=",
"entity_id",
",",
"gt",
"=",
"gt",
",",
"gte",
"=",
"gte",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"limit",
",",
"is_ascending",
"=",
"is_ascending",
",",
"page_size",
"=",
"self",
".",
"__page_size__",
")",
"# The events will be replayed in ascending order.",
"if",
"not",
"is_ascending",
":",
"domain_events",
"=",
"list",
"(",
"reversed",
"(",
"list",
"(",
"domain_events",
")",
")",
")",
"# Project the domain events onto the initial state.",
"return",
"self",
".",
"project_events",
"(",
"initial_state",
",",
"domain_events",
")"
] | Reconstitutes requested domain entity from domain events found in event store. | [
"Reconstitutes",
"requested",
"domain",
"entity",
"from",
"domain",
"events",
"found",
"in",
"event",
"store",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventsourcedrepository.py#L60-L97 |
247,629 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventsourcedrepository.py | EventSourcedRepository.take_snapshot | def take_snapshot(self, entity_id, lt=None, lte=None):
"""
Takes a snapshot of the entity as it existed after the most recent
event, optionally less than, or less than or equal to, a particular position.
"""
snapshot = None
if self._snapshot_strategy:
# Get the latest event (optionally until a particular position).
latest_event = self.event_store.get_most_recent_event(entity_id, lt=lt, lte=lte)
# If there is something to snapshot, then look for a snapshot
# taken before or at the entity version of the latest event. Please
# note, the snapshot might have a smaller version number than
# the latest event if events occurred since the latest snapshot was taken.
if latest_event is not None:
latest_snapshot = self._snapshot_strategy.get_snapshot(
entity_id, lt=lt, lte=lte
)
latest_version = latest_event.originator_version
if latest_snapshot and latest_snapshot.originator_version == latest_version:
# If up-to-date snapshot exists, there's nothing to do.
snapshot = latest_snapshot
else:
# Otherwise recover entity state from latest snapshot.
if latest_snapshot:
initial_state = entity_from_snapshot(latest_snapshot)
gt = latest_snapshot.originator_version
else:
initial_state = None
gt = None
# Fast-forward entity state to latest version.
entity = self.get_and_project_events(
entity_id=entity_id,
gt=gt,
lte=latest_version,
initial_state=initial_state,
)
# Take snapshot from entity.
snapshot = self._snapshot_strategy.take_snapshot(entity_id, entity, latest_version)
return snapshot | python | def take_snapshot(self, entity_id, lt=None, lte=None):
snapshot = None
if self._snapshot_strategy:
# Get the latest event (optionally until a particular position).
latest_event = self.event_store.get_most_recent_event(entity_id, lt=lt, lte=lte)
# If there is something to snapshot, then look for a snapshot
# taken before or at the entity version of the latest event. Please
# note, the snapshot might have a smaller version number than
# the latest event if events occurred since the latest snapshot was taken.
if latest_event is not None:
latest_snapshot = self._snapshot_strategy.get_snapshot(
entity_id, lt=lt, lte=lte
)
latest_version = latest_event.originator_version
if latest_snapshot and latest_snapshot.originator_version == latest_version:
# If up-to-date snapshot exists, there's nothing to do.
snapshot = latest_snapshot
else:
# Otherwise recover entity state from latest snapshot.
if latest_snapshot:
initial_state = entity_from_snapshot(latest_snapshot)
gt = latest_snapshot.originator_version
else:
initial_state = None
gt = None
# Fast-forward entity state to latest version.
entity = self.get_and_project_events(
entity_id=entity_id,
gt=gt,
lte=latest_version,
initial_state=initial_state,
)
# Take snapshot from entity.
snapshot = self._snapshot_strategy.take_snapshot(entity_id, entity, latest_version)
return snapshot | [
"def",
"take_snapshot",
"(",
"self",
",",
"entity_id",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
")",
":",
"snapshot",
"=",
"None",
"if",
"self",
".",
"_snapshot_strategy",
":",
"# Get the latest event (optionally until a particular position).",
"latest_event",
"=",
"self",
".",
"event_store",
".",
"get_most_recent_event",
"(",
"entity_id",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
")",
"# If there is something to snapshot, then look for a snapshot",
"# taken before or at the entity version of the latest event. Please",
"# note, the snapshot might have a smaller version number than",
"# the latest event if events occurred since the latest snapshot was taken.",
"if",
"latest_event",
"is",
"not",
"None",
":",
"latest_snapshot",
"=",
"self",
".",
"_snapshot_strategy",
".",
"get_snapshot",
"(",
"entity_id",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
")",
"latest_version",
"=",
"latest_event",
".",
"originator_version",
"if",
"latest_snapshot",
"and",
"latest_snapshot",
".",
"originator_version",
"==",
"latest_version",
":",
"# If up-to-date snapshot exists, there's nothing to do.",
"snapshot",
"=",
"latest_snapshot",
"else",
":",
"# Otherwise recover entity state from latest snapshot.",
"if",
"latest_snapshot",
":",
"initial_state",
"=",
"entity_from_snapshot",
"(",
"latest_snapshot",
")",
"gt",
"=",
"latest_snapshot",
".",
"originator_version",
"else",
":",
"initial_state",
"=",
"None",
"gt",
"=",
"None",
"# Fast-forward entity state to latest version.",
"entity",
"=",
"self",
".",
"get_and_project_events",
"(",
"entity_id",
"=",
"entity_id",
",",
"gt",
"=",
"gt",
",",
"lte",
"=",
"latest_version",
",",
"initial_state",
"=",
"initial_state",
",",
")",
"# Take snapshot from entity.",
"snapshot",
"=",
"self",
".",
"_snapshot_strategy",
".",
"take_snapshot",
"(",
"entity_id",
",",
"entity",
",",
"latest_version",
")",
"return",
"snapshot"
] | Takes a snapshot of the entity as it existed after the most recent
event, optionally less than, or less than or equal to, a particular position. | [
"Takes",
"a",
"snapshot",
"of",
"the",
"entity",
"as",
"it",
"existed",
"after",
"the",
"most",
"recent",
"event",
"optionally",
"less",
"than",
"or",
"less",
"than",
"or",
"equal",
"to",
"a",
"particular",
"position",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventsourcedrepository.py#L100-L143 |
247,630 | johnbywater/eventsourcing | eventsourcing/example/application.py | ExampleApplication.create_new_example | def create_new_example(self, foo='', a='', b=''):
"""Entity object factory."""
return create_new_example(foo=foo, a=a, b=b) | python | def create_new_example(self, foo='', a='', b=''):
return create_new_example(foo=foo, a=a, b=b) | [
"def",
"create_new_example",
"(",
"self",
",",
"foo",
"=",
"''",
",",
"a",
"=",
"''",
",",
"b",
"=",
"''",
")",
":",
"return",
"create_new_example",
"(",
"foo",
"=",
"foo",
",",
"a",
"=",
"a",
",",
"b",
"=",
"b",
")"
] | Entity object factory. | [
"Entity",
"object",
"factory",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/example/application.py#L170-L172 |
247,631 | johnbywater/eventsourcing | eventsourcing/utils/times.py | timestamp_long_from_uuid | def timestamp_long_from_uuid(uuid_arg):
"""
Returns an integer value representing a unix timestamp in tenths of microseconds.
:param uuid_arg:
:return: Unix timestamp integer in tenths of microseconds.
:rtype: int
"""
if isinstance(uuid_arg, str):
uuid_arg = UUID(uuid_arg)
assert isinstance(uuid_arg, UUID), uuid_arg
uuid_time = uuid_arg.time
return uuid_time - 0x01B21DD213814000 | python | def timestamp_long_from_uuid(uuid_arg):
if isinstance(uuid_arg, str):
uuid_arg = UUID(uuid_arg)
assert isinstance(uuid_arg, UUID), uuid_arg
uuid_time = uuid_arg.time
return uuid_time - 0x01B21DD213814000 | [
"def",
"timestamp_long_from_uuid",
"(",
"uuid_arg",
")",
":",
"if",
"isinstance",
"(",
"uuid_arg",
",",
"str",
")",
":",
"uuid_arg",
"=",
"UUID",
"(",
"uuid_arg",
")",
"assert",
"isinstance",
"(",
"uuid_arg",
",",
"UUID",
")",
",",
"uuid_arg",
"uuid_time",
"=",
"uuid_arg",
".",
"time",
"return",
"uuid_time",
"-",
"0x01B21DD213814000"
] | Returns an integer value representing a unix timestamp in tenths of microseconds.
:param uuid_arg:
:return: Unix timestamp integer in tenths of microseconds.
:rtype: int | [
"Returns",
"an",
"integer",
"value",
"representing",
"a",
"unix",
"timestamp",
"in",
"tenths",
"of",
"microseconds",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/utils/times.py#L20-L32 |
247,632 | johnbywater/eventsourcing | eventsourcing/domain/model/decorators.py | subscribe_to | def subscribe_to(*event_classes):
"""
Decorator for making a custom event handler function subscribe to a certain class of event.
The decorated function will be called once for each matching event that is published, and will
be given one argument, the event, when it is called. If events are published in lists, for
example the AggregateRoot publishes a list of pending events when its __save__() method is called,
then the decorated function will be called once for each event that is an instance of the given event_class.
Please note, this decorator isn't suitable for use with object class methods. The decorator receives
in Python 3 an unbound function, and defines a handler which it subscribes that calls the decorated
function for each matching event. However the method isn't called on the object, so the object instance
is never available in the decorator, so the decorator can't call a normal object method because it
doesn't have a value for 'self'.
event_class: type used to match published events, an event matches if it is an instance of this type
The following example shows a custom handler that reacts to Todo.Created
event and saves a projection of a Todo model object.
.. code::
@subscribe_to(Todo.Created)
def new_todo_projection(event):
todo = TodoProjection(id=event.originator_id, title=event.title)
todo.save()
"""
event_classes = list(event_classes)
def wrap(func):
def handler(event):
if isinstance(event, (list, tuple)):
for e in event:
handler(e)
elif not event_classes or isinstance(event, tuple(event_classes)):
func(event)
subscribe(handler=handler, predicate=lambda _: True)
return func
if len(event_classes) == 1 and isfunction(event_classes[0]):
func = event_classes.pop()
return wrap(func)
else:
return wrap | python | def subscribe_to(*event_classes):
event_classes = list(event_classes)
def wrap(func):
def handler(event):
if isinstance(event, (list, tuple)):
for e in event:
handler(e)
elif not event_classes or isinstance(event, tuple(event_classes)):
func(event)
subscribe(handler=handler, predicate=lambda _: True)
return func
if len(event_classes) == 1 and isfunction(event_classes[0]):
func = event_classes.pop()
return wrap(func)
else:
return wrap | [
"def",
"subscribe_to",
"(",
"*",
"event_classes",
")",
":",
"event_classes",
"=",
"list",
"(",
"event_classes",
")",
"def",
"wrap",
"(",
"func",
")",
":",
"def",
"handler",
"(",
"event",
")",
":",
"if",
"isinstance",
"(",
"event",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"e",
"in",
"event",
":",
"handler",
"(",
"e",
")",
"elif",
"not",
"event_classes",
"or",
"isinstance",
"(",
"event",
",",
"tuple",
"(",
"event_classes",
")",
")",
":",
"func",
"(",
"event",
")",
"subscribe",
"(",
"handler",
"=",
"handler",
",",
"predicate",
"=",
"lambda",
"_",
":",
"True",
")",
"return",
"func",
"if",
"len",
"(",
"event_classes",
")",
"==",
"1",
"and",
"isfunction",
"(",
"event_classes",
"[",
"0",
"]",
")",
":",
"func",
"=",
"event_classes",
".",
"pop",
"(",
")",
"return",
"wrap",
"(",
"func",
")",
"else",
":",
"return",
"wrap"
] | Decorator for making a custom event handler function subscribe to a certain class of event.
The decorated function will be called once for each matching event that is published, and will
be given one argument, the event, when it is called. If events are published in lists, for
example the AggregateRoot publishes a list of pending events when its __save__() method is called,
then the decorated function will be called once for each event that is an instance of the given event_class.
Please note, this decorator isn't suitable for use with object class methods. The decorator receives
in Python 3 an unbound function, and defines a handler which it subscribes that calls the decorated
function for each matching event. However the method isn't called on the object, so the object instance
is never available in the decorator, so the decorator can't call a normal object method because it
doesn't have a value for 'self'.
event_class: type used to match published events, an event matches if it is an instance of this type
The following example shows a custom handler that reacts to Todo.Created
event and saves a projection of a Todo model object.
.. code::
@subscribe_to(Todo.Created)
def new_todo_projection(event):
todo = TodoProjection(id=event.originator_id, title=event.title)
todo.save() | [
"Decorator",
"for",
"making",
"a",
"custom",
"event",
"handler",
"function",
"subscribe",
"to",
"a",
"certain",
"class",
"of",
"event",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/domain/model/decorators.py#L10-L54 |
247,633 | johnbywater/eventsourcing | eventsourcing/domain/model/decorators.py | mutator | def mutator(arg=None):
"""Structures mutator functions by allowing handlers
to be registered for different types of event. When
the decorated function is called with an initial
value and an event, it will call the handler that
has been registered for that type of event.
It works like singledispatch, which it uses. The
difference is that when the decorated function is
called, this decorator dispatches according to the
type of last call arg, which fits better with reduce().
The builtin Python function reduce() is used by the
library to replay a sequence of events against an
initial state. If a mutator function is given to reduce(),
along with a list of events and an initializer, reduce()
will call the mutator function once for each event in the
list, but the initializer will be the first value, and the
event will be the last argument, and we want to dispatch
according to the type of the event. It happens that
singledispatch is coded to switch on the type of the first
argument, which makes it unsuitable for structuring a mutator
function without the modifications introduced here.
The other aspect introduced by this decorator function is the
option to set the type of the handled entity in the decorator.
When an entity is replayed from scratch, in other words when
all its events are replayed, the initial state is None. The
handler which handles the first event in the sequence will
probably construct an object instance. It is possible to write
the type into the handler, but that makes the entity more difficult
to subclass because you will also need to write a handler for it.
If the decorator is invoked with the type, when the initial
value passed as a call arg to the mutator function is None,
the handler will instead receive the type of the entity, which
it can use to construct the entity object.
.. code::
class Entity(object):
class Created(object):
pass
@mutator(Entity)
def mutate(initial, event):
raise NotImplementedError(type(event))
@mutate.register(Entity.Created)
def _(initial, event):
return initial(**event.__dict__)
entity = mutate(None, Entity.Created())
"""
domain_class = None
def _mutator(func):
wrapped = singledispatch(func)
@wraps(wrapped)
def wrapper(initial, event):
initial = initial or domain_class
return wrapped.dispatch(type(event))(initial, event)
wrapper.register = wrapped.register
return wrapper
if isfunction(arg):
return _mutator(arg)
else:
domain_class = arg
return _mutator | python | def mutator(arg=None):
domain_class = None
def _mutator(func):
wrapped = singledispatch(func)
@wraps(wrapped)
def wrapper(initial, event):
initial = initial or domain_class
return wrapped.dispatch(type(event))(initial, event)
wrapper.register = wrapped.register
return wrapper
if isfunction(arg):
return _mutator(arg)
else:
domain_class = arg
return _mutator | [
"def",
"mutator",
"(",
"arg",
"=",
"None",
")",
":",
"domain_class",
"=",
"None",
"def",
"_mutator",
"(",
"func",
")",
":",
"wrapped",
"=",
"singledispatch",
"(",
"func",
")",
"@",
"wraps",
"(",
"wrapped",
")",
"def",
"wrapper",
"(",
"initial",
",",
"event",
")",
":",
"initial",
"=",
"initial",
"or",
"domain_class",
"return",
"wrapped",
".",
"dispatch",
"(",
"type",
"(",
"event",
")",
")",
"(",
"initial",
",",
"event",
")",
"wrapper",
".",
"register",
"=",
"wrapped",
".",
"register",
"return",
"wrapper",
"if",
"isfunction",
"(",
"arg",
")",
":",
"return",
"_mutator",
"(",
"arg",
")",
"else",
":",
"domain_class",
"=",
"arg",
"return",
"_mutator"
] | Structures mutator functions by allowing handlers
to be registered for different types of event. When
the decorated function is called with an initial
value and an event, it will call the handler that
has been registered for that type of event.
It works like singledispatch, which it uses. The
difference is that when the decorated function is
called, this decorator dispatches according to the
type of last call arg, which fits better with reduce().
The builtin Python function reduce() is used by the
library to replay a sequence of events against an
initial state. If a mutator function is given to reduce(),
along with a list of events and an initializer, reduce()
will call the mutator function once for each event in the
list, but the initializer will be the first value, and the
event will be the last argument, and we want to dispatch
according to the type of the event. It happens that
singledispatch is coded to switch on the type of the first
argument, which makes it unsuitable for structuring a mutator
function without the modifications introduced here.
The other aspect introduced by this decorator function is the
option to set the type of the handled entity in the decorator.
When an entity is replayed from scratch, in other words when
all its events are replayed, the initial state is None. The
handler which handles the first event in the sequence will
probably construct an object instance. It is possible to write
the type into the handler, but that makes the entity more difficult
to subclass because you will also need to write a handler for it.
If the decorator is invoked with the type, when the initial
value passed as a call arg to the mutator function is None,
the handler will instead receive the type of the entity, which
it can use to construct the entity object.
.. code::
class Entity(object):
class Created(object):
pass
@mutator(Entity)
def mutate(initial, event):
raise NotImplementedError(type(event))
@mutate.register(Entity.Created)
def _(initial, event):
return initial(**event.__dict__)
entity = mutate(None, Entity.Created()) | [
"Structures",
"mutator",
"functions",
"by",
"allowing",
"handlers",
"to",
"be",
"registered",
"for",
"different",
"types",
"of",
"event",
".",
"When",
"the",
"decorated",
"function",
"is",
"called",
"with",
"an",
"initial",
"value",
"and",
"an",
"event",
"it",
"will",
"call",
"the",
"handler",
"that",
"has",
"been",
"registered",
"for",
"that",
"type",
"of",
"event",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/domain/model/decorators.py#L57-L128 |
247,634 | johnbywater/eventsourcing | eventsourcing/utils/cipher/aes.py | AESCipher.encrypt | def encrypt(self, plaintext):
"""Return ciphertext for given plaintext."""
# String to bytes.
plainbytes = plaintext.encode('utf8')
# Compress plaintext bytes.
compressed = zlib.compress(plainbytes)
# Construct AES-GCM cipher, with 96-bit nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce=random_bytes(12))
# Encrypt and digest.
encrypted, tag = cipher.encrypt_and_digest(compressed)
# Combine with nonce.
combined = cipher.nonce + tag + encrypted
# Encode as Base64.
cipherbytes = base64.b64encode(combined)
# Bytes to string.
ciphertext = cipherbytes.decode('utf8')
# Return ciphertext.
return ciphertext | python | def encrypt(self, plaintext):
# String to bytes.
plainbytes = plaintext.encode('utf8')
# Compress plaintext bytes.
compressed = zlib.compress(plainbytes)
# Construct AES-GCM cipher, with 96-bit nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce=random_bytes(12))
# Encrypt and digest.
encrypted, tag = cipher.encrypt_and_digest(compressed)
# Combine with nonce.
combined = cipher.nonce + tag + encrypted
# Encode as Base64.
cipherbytes = base64.b64encode(combined)
# Bytes to string.
ciphertext = cipherbytes.decode('utf8')
# Return ciphertext.
return ciphertext | [
"def",
"encrypt",
"(",
"self",
",",
"plaintext",
")",
":",
"# String to bytes.",
"plainbytes",
"=",
"plaintext",
".",
"encode",
"(",
"'utf8'",
")",
"# Compress plaintext bytes.",
"compressed",
"=",
"zlib",
".",
"compress",
"(",
"plainbytes",
")",
"# Construct AES-GCM cipher, with 96-bit nonce.",
"cipher",
"=",
"AES",
".",
"new",
"(",
"self",
".",
"cipher_key",
",",
"AES",
".",
"MODE_GCM",
",",
"nonce",
"=",
"random_bytes",
"(",
"12",
")",
")",
"# Encrypt and digest.",
"encrypted",
",",
"tag",
"=",
"cipher",
".",
"encrypt_and_digest",
"(",
"compressed",
")",
"# Combine with nonce.",
"combined",
"=",
"cipher",
".",
"nonce",
"+",
"tag",
"+",
"encrypted",
"# Encode as Base64.",
"cipherbytes",
"=",
"base64",
".",
"b64encode",
"(",
"combined",
")",
"# Bytes to string.",
"ciphertext",
"=",
"cipherbytes",
".",
"decode",
"(",
"'utf8'",
")",
"# Return ciphertext.",
"return",
"ciphertext"
] | Return ciphertext for given plaintext. | [
"Return",
"ciphertext",
"for",
"given",
"plaintext",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/utils/cipher/aes.py#L24-L49 |
247,635 | johnbywater/eventsourcing | eventsourcing/utils/cipher/aes.py | AESCipher.decrypt | def decrypt(self, ciphertext):
"""Return plaintext for given ciphertext."""
# String to bytes.
cipherbytes = ciphertext.encode('utf8')
# Decode from Base64.
try:
combined = base64.b64decode(cipherbytes)
except (base64.binascii.Error, TypeError) as e:
# base64.binascii.Error for Python 3.
# TypeError for Python 2.
raise DataIntegrityError("Cipher text is damaged: {}".format(e))
# Split out the nonce, tag, and encrypted data.
nonce = combined[:12]
if len(nonce) != 12:
raise DataIntegrityError("Cipher text is damaged: invalid nonce length")
tag = combined[12:28]
if len(tag) != 16:
raise DataIntegrityError("Cipher text is damaged: invalid tag length")
encrypted = combined[28:]
# Construct AES cipher, with old nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce)
# Decrypt and verify.
try:
compressed = cipher.decrypt_and_verify(encrypted, tag)
except ValueError as e:
raise DataIntegrityError("Cipher text is damaged: {}".format(e))
# Decompress plaintext bytes.
plainbytes = zlib.decompress(compressed)
# Bytes to string.
plaintext = plainbytes.decode('utf8')
# Return plaintext.
return plaintext | python | def decrypt(self, ciphertext):
# String to bytes.
cipherbytes = ciphertext.encode('utf8')
# Decode from Base64.
try:
combined = base64.b64decode(cipherbytes)
except (base64.binascii.Error, TypeError) as e:
# base64.binascii.Error for Python 3.
# TypeError for Python 2.
raise DataIntegrityError("Cipher text is damaged: {}".format(e))
# Split out the nonce, tag, and encrypted data.
nonce = combined[:12]
if len(nonce) != 12:
raise DataIntegrityError("Cipher text is damaged: invalid nonce length")
tag = combined[12:28]
if len(tag) != 16:
raise DataIntegrityError("Cipher text is damaged: invalid tag length")
encrypted = combined[28:]
# Construct AES cipher, with old nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce)
# Decrypt and verify.
try:
compressed = cipher.decrypt_and_verify(encrypted, tag)
except ValueError as e:
raise DataIntegrityError("Cipher text is damaged: {}".format(e))
# Decompress plaintext bytes.
plainbytes = zlib.decompress(compressed)
# Bytes to string.
plaintext = plainbytes.decode('utf8')
# Return plaintext.
return plaintext | [
"def",
"decrypt",
"(",
"self",
",",
"ciphertext",
")",
":",
"# String to bytes.",
"cipherbytes",
"=",
"ciphertext",
".",
"encode",
"(",
"'utf8'",
")",
"# Decode from Base64.",
"try",
":",
"combined",
"=",
"base64",
".",
"b64decode",
"(",
"cipherbytes",
")",
"except",
"(",
"base64",
".",
"binascii",
".",
"Error",
",",
"TypeError",
")",
"as",
"e",
":",
"# base64.binascii.Error for Python 3.",
"# TypeError for Python 2.",
"raise",
"DataIntegrityError",
"(",
"\"Cipher text is damaged: {}\"",
".",
"format",
"(",
"e",
")",
")",
"# Split out the nonce, tag, and encrypted data.",
"nonce",
"=",
"combined",
"[",
":",
"12",
"]",
"if",
"len",
"(",
"nonce",
")",
"!=",
"12",
":",
"raise",
"DataIntegrityError",
"(",
"\"Cipher text is damaged: invalid nonce length\"",
")",
"tag",
"=",
"combined",
"[",
"12",
":",
"28",
"]",
"if",
"len",
"(",
"tag",
")",
"!=",
"16",
":",
"raise",
"DataIntegrityError",
"(",
"\"Cipher text is damaged: invalid tag length\"",
")",
"encrypted",
"=",
"combined",
"[",
"28",
":",
"]",
"# Construct AES cipher, with old nonce.",
"cipher",
"=",
"AES",
".",
"new",
"(",
"self",
".",
"cipher_key",
",",
"AES",
".",
"MODE_GCM",
",",
"nonce",
")",
"# Decrypt and verify.",
"try",
":",
"compressed",
"=",
"cipher",
".",
"decrypt_and_verify",
"(",
"encrypted",
",",
"tag",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"DataIntegrityError",
"(",
"\"Cipher text is damaged: {}\"",
".",
"format",
"(",
"e",
")",
")",
"# Decompress plaintext bytes.",
"plainbytes",
"=",
"zlib",
".",
"decompress",
"(",
"compressed",
")",
"# Bytes to string.",
"plaintext",
"=",
"plainbytes",
".",
"decode",
"(",
"'utf8'",
")",
"# Return plaintext.",
"return",
"plaintext"
] | Return plaintext for given ciphertext. | [
"Return",
"plaintext",
"for",
"given",
"ciphertext",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/utils/cipher/aes.py#L51-L92 |
247,636 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.store | def store(self, domain_event_or_events):
"""
Appends given domain event, or list of domain events, to their sequence.
:param domain_event_or_events: domain event, or list of domain events
"""
# Convert to sequenced item.
sequenced_item_or_items = self.item_from_event(domain_event_or_events)
# Append to the sequenced item(s) to the sequence.
try:
self.record_manager.record_sequenced_items(sequenced_item_or_items)
except RecordConflictError as e:
raise ConcurrencyError(e) | python | def store(self, domain_event_or_events):
# Convert to sequenced item.
sequenced_item_or_items = self.item_from_event(domain_event_or_events)
# Append to the sequenced item(s) to the sequence.
try:
self.record_manager.record_sequenced_items(sequenced_item_or_items)
except RecordConflictError as e:
raise ConcurrencyError(e) | [
"def",
"store",
"(",
"self",
",",
"domain_event_or_events",
")",
":",
"# Convert to sequenced item.",
"sequenced_item_or_items",
"=",
"self",
".",
"item_from_event",
"(",
"domain_event_or_events",
")",
"# Append to the sequenced item(s) to the sequence.",
"try",
":",
"self",
".",
"record_manager",
".",
"record_sequenced_items",
"(",
"sequenced_item_or_items",
")",
"except",
"RecordConflictError",
"as",
"e",
":",
"raise",
"ConcurrencyError",
"(",
"e",
")"
] | Appends given domain event, or list of domain events, to their sequence.
:param domain_event_or_events: domain event, or list of domain events | [
"Appends",
"given",
"domain",
"event",
"or",
"list",
"of",
"domain",
"events",
"to",
"their",
"sequence",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L72-L86 |
247,637 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.item_from_event | def item_from_event(self, domain_event_or_events):
"""
Maps domain event to sequenced item namedtuple.
:param domain_event_or_events: application-level object (or list)
:return: namedtuple: sequence item namedtuple (or list)
"""
# Convert the domain event(s) to sequenced item(s).
if isinstance(domain_event_or_events, (list, tuple)):
return [self.item_from_event(e) for e in domain_event_or_events]
else:
return self.mapper.item_from_event(domain_event_or_events) | python | def item_from_event(self, domain_event_or_events):
# Convert the domain event(s) to sequenced item(s).
if isinstance(domain_event_or_events, (list, tuple)):
return [self.item_from_event(e) for e in domain_event_or_events]
else:
return self.mapper.item_from_event(domain_event_or_events) | [
"def",
"item_from_event",
"(",
"self",
",",
"domain_event_or_events",
")",
":",
"# Convert the domain event(s) to sequenced item(s).",
"if",
"isinstance",
"(",
"domain_event_or_events",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"return",
"[",
"self",
".",
"item_from_event",
"(",
"e",
")",
"for",
"e",
"in",
"domain_event_or_events",
"]",
"else",
":",
"return",
"self",
".",
"mapper",
".",
"item_from_event",
"(",
"domain_event_or_events",
")"
] | Maps domain event to sequenced item namedtuple.
:param domain_event_or_events: application-level object (or list)
:return: namedtuple: sequence item namedtuple (or list) | [
"Maps",
"domain",
"event",
"to",
"sequenced",
"item",
"namedtuple",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L88-L99 |
247,638 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.get_domain_events | def get_domain_events(self, originator_id, gt=None, gte=None, lt=None, lte=None, limit=None, is_ascending=True,
page_size=None):
"""
Gets domain events from the sequence identified by `originator_id`.
:param originator_id: ID of a sequence of events
:param gt: get items after this position
:param gte: get items at or after this position
:param lt: get items before this position
:param lte: get items before or at this position
:param limit: get limited number of items
:param is_ascending: get items from lowest position
:param page_size: restrict and repeat database query
:return: list of domain events
"""
if page_size:
sequenced_items = self.iterator_class(
record_manager=self.record_manager,
sequence_id=originator_id,
page_size=page_size,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
is_ascending=is_ascending,
)
else:
sequenced_items = self.record_manager.get_items(
sequence_id=originator_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
query_ascending=is_ascending,
results_ascending=is_ascending,
)
# Deserialize to domain events.
domain_events = map(self.mapper.event_from_item, sequenced_items)
return list(domain_events) | python | def get_domain_events(self, originator_id, gt=None, gte=None, lt=None, lte=None, limit=None, is_ascending=True,
page_size=None):
if page_size:
sequenced_items = self.iterator_class(
record_manager=self.record_manager,
sequence_id=originator_id,
page_size=page_size,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
is_ascending=is_ascending,
)
else:
sequenced_items = self.record_manager.get_items(
sequence_id=originator_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
query_ascending=is_ascending,
results_ascending=is_ascending,
)
# Deserialize to domain events.
domain_events = map(self.mapper.event_from_item, sequenced_items)
return list(domain_events) | [
"def",
"get_domain_events",
"(",
"self",
",",
"originator_id",
",",
"gt",
"=",
"None",
",",
"gte",
"=",
"None",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"is_ascending",
"=",
"True",
",",
"page_size",
"=",
"None",
")",
":",
"if",
"page_size",
":",
"sequenced_items",
"=",
"self",
".",
"iterator_class",
"(",
"record_manager",
"=",
"self",
".",
"record_manager",
",",
"sequence_id",
"=",
"originator_id",
",",
"page_size",
"=",
"page_size",
",",
"gt",
"=",
"gt",
",",
"gte",
"=",
"gte",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"limit",
",",
"is_ascending",
"=",
"is_ascending",
",",
")",
"else",
":",
"sequenced_items",
"=",
"self",
".",
"record_manager",
".",
"get_items",
"(",
"sequence_id",
"=",
"originator_id",
",",
"gt",
"=",
"gt",
",",
"gte",
"=",
"gte",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"limit",
",",
"query_ascending",
"=",
"is_ascending",
",",
"results_ascending",
"=",
"is_ascending",
",",
")",
"# Deserialize to domain events.",
"domain_events",
"=",
"map",
"(",
"self",
".",
"mapper",
".",
"event_from_item",
",",
"sequenced_items",
")",
"return",
"list",
"(",
"domain_events",
")"
] | Gets domain events from the sequence identified by `originator_id`.
:param originator_id: ID of a sequence of events
:param gt: get items after this position
:param gte: get items at or after this position
:param lt: get items before this position
:param lte: get items before or at this position
:param limit: get limited number of items
:param is_ascending: get items from lowest position
:param page_size: restrict and repeat database query
:return: list of domain events | [
"Gets",
"domain",
"events",
"from",
"the",
"sequence",
"identified",
"by",
"originator_id",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L101-L142 |
247,639 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.get_domain_event | def get_domain_event(self, originator_id, position):
"""
Gets a domain event from the sequence identified by `originator_id`
at position `eq`.
:param originator_id: ID of a sequence of events
:param position: get item at this position
:return: domain event
"""
sequenced_item = self.record_manager.get_item(
sequence_id=originator_id,
position=position,
)
return self.mapper.event_from_item(sequenced_item) | python | def get_domain_event(self, originator_id, position):
sequenced_item = self.record_manager.get_item(
sequence_id=originator_id,
position=position,
)
return self.mapper.event_from_item(sequenced_item) | [
"def",
"get_domain_event",
"(",
"self",
",",
"originator_id",
",",
"position",
")",
":",
"sequenced_item",
"=",
"self",
".",
"record_manager",
".",
"get_item",
"(",
"sequence_id",
"=",
"originator_id",
",",
"position",
"=",
"position",
",",
")",
"return",
"self",
".",
"mapper",
".",
"event_from_item",
"(",
"sequenced_item",
")"
] | Gets a domain event from the sequence identified by `originator_id`
at position `eq`.
:param originator_id: ID of a sequence of events
:param position: get item at this position
:return: domain event | [
"Gets",
"a",
"domain",
"event",
"from",
"the",
"sequence",
"identified",
"by",
"originator_id",
"at",
"position",
"eq",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L144-L158 |
247,640 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.get_most_recent_event | def get_most_recent_event(self, originator_id, lt=None, lte=None):
"""
Gets a domain event from the sequence identified by `originator_id`
at the highest position.
:param originator_id: ID of a sequence of events
:param lt: get highest before this position
:param lte: get highest at or before this position
:return: domain event
"""
events = self.get_domain_events(originator_id=originator_id, lt=lt, lte=lte, limit=1, is_ascending=False)
events = list(events)
try:
return events[0]
except IndexError:
pass | python | def get_most_recent_event(self, originator_id, lt=None, lte=None):
events = self.get_domain_events(originator_id=originator_id, lt=lt, lte=lte, limit=1, is_ascending=False)
events = list(events)
try:
return events[0]
except IndexError:
pass | [
"def",
"get_most_recent_event",
"(",
"self",
",",
"originator_id",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
")",
":",
"events",
"=",
"self",
".",
"get_domain_events",
"(",
"originator_id",
"=",
"originator_id",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"1",
",",
"is_ascending",
"=",
"False",
")",
"events",
"=",
"list",
"(",
"events",
")",
"try",
":",
"return",
"events",
"[",
"0",
"]",
"except",
"IndexError",
":",
"pass"
] | Gets a domain event from the sequence identified by `originator_id`
at the highest position.
:param originator_id: ID of a sequence of events
:param lt: get highest before this position
:param lte: get highest at or before this position
:return: domain event | [
"Gets",
"a",
"domain",
"event",
"from",
"the",
"sequence",
"identified",
"by",
"originator_id",
"at",
"the",
"highest",
"position",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L160-L175 |
247,641 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventstore.py | EventStore.all_domain_events | def all_domain_events(self):
"""
Yields all domain events in the event store.
"""
for originator_id in self.record_manager.all_sequence_ids():
for domain_event in self.get_domain_events(originator_id=originator_id, page_size=100):
yield domain_event | python | def all_domain_events(self):
for originator_id in self.record_manager.all_sequence_ids():
for domain_event in self.get_domain_events(originator_id=originator_id, page_size=100):
yield domain_event | [
"def",
"all_domain_events",
"(",
"self",
")",
":",
"for",
"originator_id",
"in",
"self",
".",
"record_manager",
".",
"all_sequence_ids",
"(",
")",
":",
"for",
"domain_event",
"in",
"self",
".",
"get_domain_events",
"(",
"originator_id",
"=",
"originator_id",
",",
"page_size",
"=",
"100",
")",
":",
"yield",
"domain_event"
] | Yields all domain events in the event store. | [
"Yields",
"all",
"domain",
"events",
"in",
"the",
"event",
"store",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventstore.py#L177-L183 |
247,642 | johnbywater/eventsourcing | eventsourcing/application/process.py | ProcessApplication.publish_prompt | def publish_prompt(self, event=None):
"""
Publishes prompt for a given event.
Used to prompt downstream process application when an event
is published by this application's model, which can happen
when application command methods, rather than the process policy,
are called.
Wraps exceptions with PromptFailed, to avoid application policy exceptions being
seen directly in other applications when running synchronously in single thread.
"""
prompt = Prompt(self.name, self.pipeline_id)
try:
publish(prompt)
except PromptFailed:
raise
except Exception as e:
raise PromptFailed("{}: {}".format(type(e), str(e))) | python | def publish_prompt(self, event=None):
prompt = Prompt(self.name, self.pipeline_id)
try:
publish(prompt)
except PromptFailed:
raise
except Exception as e:
raise PromptFailed("{}: {}".format(type(e), str(e))) | [
"def",
"publish_prompt",
"(",
"self",
",",
"event",
"=",
"None",
")",
":",
"prompt",
"=",
"Prompt",
"(",
"self",
".",
"name",
",",
"self",
".",
"pipeline_id",
")",
"try",
":",
"publish",
"(",
"prompt",
")",
"except",
"PromptFailed",
":",
"raise",
"except",
"Exception",
"as",
"e",
":",
"raise",
"PromptFailed",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"type",
"(",
"e",
")",
",",
"str",
"(",
"e",
")",
")",
")"
] | Publishes prompt for a given event.
Used to prompt downstream process application when an event
is published by this application's model, which can happen
when application command methods, rather than the process policy,
are called.
Wraps exceptions with PromptFailed, to avoid application policy exceptions being
seen directly in other applications when running synchronously in single thread. | [
"Publishes",
"prompt",
"for",
"a",
"given",
"event",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/application/process.py#L52-L71 |
247,643 | johnbywater/eventsourcing | eventsourcing/infrastructure/django/manager.py | DjangoRecordManager._prepare_insert | def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False):
"""
With transaction isolation level of "read committed" this should
generate records with a contiguous sequence of integer IDs, using
an indexed ID column, the database-side SQL max function, the
insert-select-from form, and optimistic concurrency control.
"""
field_names = list(field_names)
if hasattr(record_class, 'application_name') and 'application_name' not in field_names:
field_names.append('application_name')
if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names:
field_names.append('pipeline_id')
if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names:
field_names.append('causal_dependencies')
if placeholder_for_id:
if self.notification_id_name:
if self.notification_id_name not in field_names:
field_names.append('id')
statement = tmpl.format(
tablename=self.get_record_table_name(record_class),
columns=", ".join(field_names),
placeholders=", ".join(['%s' for _ in field_names]),
notification_id=self.notification_id_name
)
return statement | python | def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False):
field_names = list(field_names)
if hasattr(record_class, 'application_name') and 'application_name' not in field_names:
field_names.append('application_name')
if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names:
field_names.append('pipeline_id')
if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names:
field_names.append('causal_dependencies')
if placeholder_for_id:
if self.notification_id_name:
if self.notification_id_name not in field_names:
field_names.append('id')
statement = tmpl.format(
tablename=self.get_record_table_name(record_class),
columns=", ".join(field_names),
placeholders=", ".join(['%s' for _ in field_names]),
notification_id=self.notification_id_name
)
return statement | [
"def",
"_prepare_insert",
"(",
"self",
",",
"tmpl",
",",
"record_class",
",",
"field_names",
",",
"placeholder_for_id",
"=",
"False",
")",
":",
"field_names",
"=",
"list",
"(",
"field_names",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'application_name'",
")",
"and",
"'application_name'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'application_name'",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'pipeline_id'",
")",
"and",
"'pipeline_id'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'pipeline_id'",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'causal_dependencies'",
")",
"and",
"'causal_dependencies'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'causal_dependencies'",
")",
"if",
"placeholder_for_id",
":",
"if",
"self",
".",
"notification_id_name",
":",
"if",
"self",
".",
"notification_id_name",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'id'",
")",
"statement",
"=",
"tmpl",
".",
"format",
"(",
"tablename",
"=",
"self",
".",
"get_record_table_name",
"(",
"record_class",
")",
",",
"columns",
"=",
"\", \"",
".",
"join",
"(",
"field_names",
")",
",",
"placeholders",
"=",
"\", \"",
".",
"join",
"(",
"[",
"'%s'",
"for",
"_",
"in",
"field_names",
"]",
")",
",",
"notification_id",
"=",
"self",
".",
"notification_id_name",
")",
"return",
"statement"
] | With transaction isolation level of "read committed" this should
generate records with a contiguous sequence of integer IDs, using
an indexed ID column, the database-side SQL max function, the
insert-select-from form, and optimistic concurrency control. | [
"With",
"transaction",
"isolation",
"level",
"of",
"read",
"committed",
"this",
"should",
"generate",
"records",
"with",
"a",
"contiguous",
"sequence",
"of",
"integer",
"IDs",
"using",
"an",
"indexed",
"ID",
"column",
"the",
"database",
"-",
"side",
"SQL",
"max",
"function",
"the",
"insert",
"-",
"select",
"-",
"from",
"form",
"and",
"optimistic",
"concurrency",
"control",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/django/manager.py#L68-L93 |
247,644 | johnbywater/eventsourcing | eventsourcing/infrastructure/django/manager.py | DjangoRecordManager.get_notifications | def get_notifications(self, start=None, stop=None, *args, **kwargs):
"""
Returns all records in the table.
"""
filter_kwargs = {}
# Todo: Also support sequencing by 'position' if items are sequenced by timestamp?
if start is not None:
filter_kwargs['%s__gte' % self.notification_id_name] = start + 1
if stop is not None:
filter_kwargs['%s__lt' % self.notification_id_name] = stop + 1
objects = self.record_class.objects.filter(**filter_kwargs)
if hasattr(self.record_class, 'application_name'):
objects = objects.filter(application_name=self.application_name)
if hasattr(self.record_class, 'pipeline_id'):
objects = objects.filter(pipeline_id=self.pipeline_id)
objects = objects.order_by('%s' % self.notification_id_name)
return objects.all() | python | def get_notifications(self, start=None, stop=None, *args, **kwargs):
filter_kwargs = {}
# Todo: Also support sequencing by 'position' if items are sequenced by timestamp?
if start is not None:
filter_kwargs['%s__gte' % self.notification_id_name] = start + 1
if stop is not None:
filter_kwargs['%s__lt' % self.notification_id_name] = stop + 1
objects = self.record_class.objects.filter(**filter_kwargs)
if hasattr(self.record_class, 'application_name'):
objects = objects.filter(application_name=self.application_name)
if hasattr(self.record_class, 'pipeline_id'):
objects = objects.filter(pipeline_id=self.pipeline_id)
objects = objects.order_by('%s' % self.notification_id_name)
return objects.all() | [
"def",
"get_notifications",
"(",
"self",
",",
"start",
"=",
"None",
",",
"stop",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"filter_kwargs",
"=",
"{",
"}",
"# Todo: Also support sequencing by 'position' if items are sequenced by timestamp?",
"if",
"start",
"is",
"not",
"None",
":",
"filter_kwargs",
"[",
"'%s__gte'",
"%",
"self",
".",
"notification_id_name",
"]",
"=",
"start",
"+",
"1",
"if",
"stop",
"is",
"not",
"None",
":",
"filter_kwargs",
"[",
"'%s__lt'",
"%",
"self",
".",
"notification_id_name",
"]",
"=",
"stop",
"+",
"1",
"objects",
"=",
"self",
".",
"record_class",
".",
"objects",
".",
"filter",
"(",
"*",
"*",
"filter_kwargs",
")",
"if",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'application_name'",
")",
":",
"objects",
"=",
"objects",
".",
"filter",
"(",
"application_name",
"=",
"self",
".",
"application_name",
")",
"if",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'pipeline_id'",
")",
":",
"objects",
"=",
"objects",
".",
"filter",
"(",
"pipeline_id",
"=",
"self",
".",
"pipeline_id",
")",
"objects",
"=",
"objects",
".",
"order_by",
"(",
"'%s'",
"%",
"self",
".",
"notification_id_name",
")",
"return",
"objects",
".",
"all",
"(",
")"
] | Returns all records in the table. | [
"Returns",
"all",
"records",
"in",
"the",
"table",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/django/manager.py#L151-L169 |
247,645 | johnbywater/eventsourcing | eventsourcing/application/actors.py | ActorModelRunner.start | def start(self):
"""
Starts all the actors to run a system of process applications.
"""
# Subscribe to broadcast prompts published by a process
# application in the parent operating system process.
subscribe(handler=self.forward_prompt, predicate=self.is_prompt)
# Initialise the system actor.
msg = SystemInitRequest(
self.system.process_classes,
self.infrastructure_class,
self.system.followings,
self.pipeline_ids
)
response = self.actor_system.ask(self.system_actor, msg)
# Keep the pipeline actor addresses, to send prompts directly.
assert isinstance(response, SystemInitResponse), type(response)
assert list(response.pipeline_actors.keys()) == self.pipeline_ids, (
"Configured pipeline IDs mismatch initialised system {} {}").format(
list(self.pipeline_actors.keys()), self.pipeline_ids
)
self.pipeline_actors = response.pipeline_actors | python | def start(self):
# Subscribe to broadcast prompts published by a process
# application in the parent operating system process.
subscribe(handler=self.forward_prompt, predicate=self.is_prompt)
# Initialise the system actor.
msg = SystemInitRequest(
self.system.process_classes,
self.infrastructure_class,
self.system.followings,
self.pipeline_ids
)
response = self.actor_system.ask(self.system_actor, msg)
# Keep the pipeline actor addresses, to send prompts directly.
assert isinstance(response, SystemInitResponse), type(response)
assert list(response.pipeline_actors.keys()) == self.pipeline_ids, (
"Configured pipeline IDs mismatch initialised system {} {}").format(
list(self.pipeline_actors.keys()), self.pipeline_ids
)
self.pipeline_actors = response.pipeline_actors | [
"def",
"start",
"(",
"self",
")",
":",
"# Subscribe to broadcast prompts published by a process",
"# application in the parent operating system process.",
"subscribe",
"(",
"handler",
"=",
"self",
".",
"forward_prompt",
",",
"predicate",
"=",
"self",
".",
"is_prompt",
")",
"# Initialise the system actor.",
"msg",
"=",
"SystemInitRequest",
"(",
"self",
".",
"system",
".",
"process_classes",
",",
"self",
".",
"infrastructure_class",
",",
"self",
".",
"system",
".",
"followings",
",",
"self",
".",
"pipeline_ids",
")",
"response",
"=",
"self",
".",
"actor_system",
".",
"ask",
"(",
"self",
".",
"system_actor",
",",
"msg",
")",
"# Keep the pipeline actor addresses, to send prompts directly.",
"assert",
"isinstance",
"(",
"response",
",",
"SystemInitResponse",
")",
",",
"type",
"(",
"response",
")",
"assert",
"list",
"(",
"response",
".",
"pipeline_actors",
".",
"keys",
"(",
")",
")",
"==",
"self",
".",
"pipeline_ids",
",",
"(",
"\"Configured pipeline IDs mismatch initialised system {} {}\"",
")",
".",
"format",
"(",
"list",
"(",
"self",
".",
"pipeline_actors",
".",
"keys",
"(",
")",
")",
",",
"self",
".",
"pipeline_ids",
")",
"self",
".",
"pipeline_actors",
"=",
"response",
".",
"pipeline_actors"
] | Starts all the actors to run a system of process applications. | [
"Starts",
"all",
"the",
"actors",
"to",
"run",
"a",
"system",
"of",
"process",
"applications",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/application/actors.py#L80-L105 |
247,646 | johnbywater/eventsourcing | eventsourcing/application/actors.py | ActorModelRunner.close | def close(self):
"""Stops all the actors running a system of process applications."""
super(ActorModelRunner, self).close()
unsubscribe(handler=self.forward_prompt, predicate=self.is_prompt)
if self.shutdown_on_close:
self.shutdown() | python | def close(self):
super(ActorModelRunner, self).close()
unsubscribe(handler=self.forward_prompt, predicate=self.is_prompt)
if self.shutdown_on_close:
self.shutdown() | [
"def",
"close",
"(",
"self",
")",
":",
"super",
"(",
"ActorModelRunner",
",",
"self",
")",
".",
"close",
"(",
")",
"unsubscribe",
"(",
"handler",
"=",
"self",
".",
"forward_prompt",
",",
"predicate",
"=",
"self",
".",
"is_prompt",
")",
"if",
"self",
".",
"shutdown_on_close",
":",
"self",
".",
"shutdown",
"(",
")"
] | Stops all the actors running a system of process applications. | [
"Stops",
"all",
"the",
"actors",
"running",
"a",
"system",
"of",
"process",
"applications",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/application/actors.py#L122-L127 |
247,647 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/application.py | SuffixTreeApplication.register_new_suffix_tree | def register_new_suffix_tree(self, case_insensitive=False):
"""Returns a new suffix tree entity.
"""
suffix_tree = register_new_suffix_tree(case_insensitive=case_insensitive)
suffix_tree._node_repo = self.node_repo
suffix_tree._node_child_collection_repo = self.node_child_collection_repo
suffix_tree._edge_repo = self.edge_repo
suffix_tree._stringid_collection_repo = self.stringid_collection_repo
return suffix_tree | python | def register_new_suffix_tree(self, case_insensitive=False):
suffix_tree = register_new_suffix_tree(case_insensitive=case_insensitive)
suffix_tree._node_repo = self.node_repo
suffix_tree._node_child_collection_repo = self.node_child_collection_repo
suffix_tree._edge_repo = self.edge_repo
suffix_tree._stringid_collection_repo = self.stringid_collection_repo
return suffix_tree | [
"def",
"register_new_suffix_tree",
"(",
"self",
",",
"case_insensitive",
"=",
"False",
")",
":",
"suffix_tree",
"=",
"register_new_suffix_tree",
"(",
"case_insensitive",
"=",
"case_insensitive",
")",
"suffix_tree",
".",
"_node_repo",
"=",
"self",
".",
"node_repo",
"suffix_tree",
".",
"_node_child_collection_repo",
"=",
"self",
".",
"node_child_collection_repo",
"suffix_tree",
".",
"_edge_repo",
"=",
"self",
".",
"edge_repo",
"suffix_tree",
".",
"_stringid_collection_repo",
"=",
"self",
".",
"stringid_collection_repo",
"return",
"suffix_tree"
] | Returns a new suffix tree entity. | [
"Returns",
"a",
"new",
"suffix",
"tree",
"entity",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/application.py#L29-L37 |
247,648 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/application.py | SuffixTreeApplication.find_string_ids | def find_string_ids(self, substring, suffix_tree_id, limit=None):
"""Returns a set of IDs for strings that contain the given substring.
"""
# Find an edge for the substring.
edge, ln = self.find_substring_edge(substring=substring, suffix_tree_id=suffix_tree_id)
# If there isn't an edge, return an empty set.
if edge is None:
return set()
# Get all the string IDs beneath the edge's destination node.
string_ids = get_string_ids(
node_id=edge.dest_node_id,
node_repo=self.node_repo,
node_child_collection_repo=self.node_child_collection_repo,
stringid_collection_repo=self.stringid_collection_repo,
length_until_end=edge.length + 1 - ln,
limit=limit
)
# Return a set of string IDs.
return set(string_ids) | python | def find_string_ids(self, substring, suffix_tree_id, limit=None):
# Find an edge for the substring.
edge, ln = self.find_substring_edge(substring=substring, suffix_tree_id=suffix_tree_id)
# If there isn't an edge, return an empty set.
if edge is None:
return set()
# Get all the string IDs beneath the edge's destination node.
string_ids = get_string_ids(
node_id=edge.dest_node_id,
node_repo=self.node_repo,
node_child_collection_repo=self.node_child_collection_repo,
stringid_collection_repo=self.stringid_collection_repo,
length_until_end=edge.length + 1 - ln,
limit=limit
)
# Return a set of string IDs.
return set(string_ids) | [
"def",
"find_string_ids",
"(",
"self",
",",
"substring",
",",
"suffix_tree_id",
",",
"limit",
"=",
"None",
")",
":",
"# Find an edge for the substring.",
"edge",
",",
"ln",
"=",
"self",
".",
"find_substring_edge",
"(",
"substring",
"=",
"substring",
",",
"suffix_tree_id",
"=",
"suffix_tree_id",
")",
"# If there isn't an edge, return an empty set.",
"if",
"edge",
"is",
"None",
":",
"return",
"set",
"(",
")",
"# Get all the string IDs beneath the edge's destination node.",
"string_ids",
"=",
"get_string_ids",
"(",
"node_id",
"=",
"edge",
".",
"dest_node_id",
",",
"node_repo",
"=",
"self",
".",
"node_repo",
",",
"node_child_collection_repo",
"=",
"self",
".",
"node_child_collection_repo",
",",
"stringid_collection_repo",
"=",
"self",
".",
"stringid_collection_repo",
",",
"length_until_end",
"=",
"edge",
".",
"length",
"+",
"1",
"-",
"ln",
",",
"limit",
"=",
"limit",
")",
"# Return a set of string IDs.",
"return",
"set",
"(",
"string_ids",
")"
] | Returns a set of IDs for strings that contain the given substring. | [
"Returns",
"a",
"set",
"of",
"IDs",
"for",
"strings",
"that",
"contain",
"the",
"given",
"substring",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/application.py#L50-L72 |
247,649 | johnbywater/eventsourcing | eventsourcing/contrib/suffixtrees/application.py | SuffixTreeApplication.find_substring_edge | def find_substring_edge(self, substring, suffix_tree_id):
"""Returns an edge that matches the given substring.
"""
suffix_tree = self.suffix_tree_repo[suffix_tree_id]
started = datetime.datetime.now()
edge, ln = find_substring_edge(substring=substring, suffix_tree=suffix_tree, edge_repo=self.edge_repo)
# if edge is not None:
# print("Got edge for substring '{}': {}".format(substring, edge))
# else:
# print("No edge for substring '{}'".format(substring))
print(" - searched for edge in {} for substring: '{}'".format(datetime.datetime.now() - started, substring))
return edge, ln | python | def find_substring_edge(self, substring, suffix_tree_id):
suffix_tree = self.suffix_tree_repo[suffix_tree_id]
started = datetime.datetime.now()
edge, ln = find_substring_edge(substring=substring, suffix_tree=suffix_tree, edge_repo=self.edge_repo)
# if edge is not None:
# print("Got edge for substring '{}': {}".format(substring, edge))
# else:
# print("No edge for substring '{}'".format(substring))
print(" - searched for edge in {} for substring: '{}'".format(datetime.datetime.now() - started, substring))
return edge, ln | [
"def",
"find_substring_edge",
"(",
"self",
",",
"substring",
",",
"suffix_tree_id",
")",
":",
"suffix_tree",
"=",
"self",
".",
"suffix_tree_repo",
"[",
"suffix_tree_id",
"]",
"started",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"edge",
",",
"ln",
"=",
"find_substring_edge",
"(",
"substring",
"=",
"substring",
",",
"suffix_tree",
"=",
"suffix_tree",
",",
"edge_repo",
"=",
"self",
".",
"edge_repo",
")",
"# if edge is not None:",
"# print(\"Got edge for substring '{}': {}\".format(substring, edge))",
"# else:",
"# print(\"No edge for substring '{}'\".format(substring))",
"print",
"(",
"\" - searched for edge in {} for substring: '{}'\"",
".",
"format",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"started",
",",
"substring",
")",
")",
"return",
"edge",
",",
"ln"
] | Returns an edge that matches the given substring. | [
"Returns",
"an",
"edge",
"that",
"matches",
"the",
"given",
"substring",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/suffixtrees/application.py#L74-L85 |
247,650 | johnbywater/eventsourcing | eventsourcing/application/system.py | SingleThreadedRunner.run_followers | def run_followers(self, prompt):
"""
First caller adds a prompt to queue and
runs followers until there are no more
pending prompts.
Subsequent callers just add a prompt
to the queue, avoiding recursion.
"""
assert isinstance(prompt, Prompt)
# Put the prompt on the queue.
self.pending_prompts.put(prompt)
if self.iteration_lock.acquire(False):
start_time = time.time()
i = 0
try:
while True:
try:
prompt = self.pending_prompts.get(False)
except Empty:
break
else:
followers = self.system.followers[prompt.process_name]
for follower_name in followers:
follower = self.system.processes[follower_name]
follower.run(prompt)
i += 1
self.pending_prompts.task_done()
finally:
run_frequency = i / (time.time() - start_time)
# print(f"Run frequency: {run_frequency}")
self.iteration_lock.release() | python | def run_followers(self, prompt):
assert isinstance(prompt, Prompt)
# Put the prompt on the queue.
self.pending_prompts.put(prompt)
if self.iteration_lock.acquire(False):
start_time = time.time()
i = 0
try:
while True:
try:
prompt = self.pending_prompts.get(False)
except Empty:
break
else:
followers = self.system.followers[prompt.process_name]
for follower_name in followers:
follower = self.system.processes[follower_name]
follower.run(prompt)
i += 1
self.pending_prompts.task_done()
finally:
run_frequency = i / (time.time() - start_time)
# print(f"Run frequency: {run_frequency}")
self.iteration_lock.release() | [
"def",
"run_followers",
"(",
"self",
",",
"prompt",
")",
":",
"assert",
"isinstance",
"(",
"prompt",
",",
"Prompt",
")",
"# Put the prompt on the queue.",
"self",
".",
"pending_prompts",
".",
"put",
"(",
"prompt",
")",
"if",
"self",
".",
"iteration_lock",
".",
"acquire",
"(",
"False",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"i",
"=",
"0",
"try",
":",
"while",
"True",
":",
"try",
":",
"prompt",
"=",
"self",
".",
"pending_prompts",
".",
"get",
"(",
"False",
")",
"except",
"Empty",
":",
"break",
"else",
":",
"followers",
"=",
"self",
".",
"system",
".",
"followers",
"[",
"prompt",
".",
"process_name",
"]",
"for",
"follower_name",
"in",
"followers",
":",
"follower",
"=",
"self",
".",
"system",
".",
"processes",
"[",
"follower_name",
"]",
"follower",
".",
"run",
"(",
"prompt",
")",
"i",
"+=",
"1",
"self",
".",
"pending_prompts",
".",
"task_done",
"(",
")",
"finally",
":",
"run_frequency",
"=",
"i",
"/",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
")",
"# print(f\"Run frequency: {run_frequency}\")",
"self",
".",
"iteration_lock",
".",
"release",
"(",
")"
] | First caller adds a prompt to queue and
runs followers until there are no more
pending prompts.
Subsequent callers just add a prompt
to the queue, avoiding recursion. | [
"First",
"caller",
"adds",
"a",
"prompt",
"to",
"queue",
"and",
"runs",
"followers",
"until",
"there",
"are",
"no",
"more",
"pending",
"prompts",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/application/system.py#L223-L255 |
247,651 | johnbywater/eventsourcing | eventsourcing/example/domainmodel.py | create_new_example | def create_new_example(foo='', a='', b=''):
"""
Factory method for example entities.
:rtype: Example
"""
return Example.__create__(foo=foo, a=a, b=b) | python | def create_new_example(foo='', a='', b=''):
return Example.__create__(foo=foo, a=a, b=b) | [
"def",
"create_new_example",
"(",
"foo",
"=",
"''",
",",
"a",
"=",
"''",
",",
"b",
"=",
"''",
")",
":",
"return",
"Example",
".",
"__create__",
"(",
"foo",
"=",
"foo",
",",
"a",
"=",
"a",
",",
"b",
"=",
"b",
")"
] | Factory method for example entities.
:rtype: Example | [
"Factory",
"method",
"for",
"example",
"entities",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/example/domainmodel.py#L62-L68 |
247,652 | johnbywater/eventsourcing | eventsourcing/application/decorators.py | applicationpolicy | def applicationpolicy(arg=None):
"""
Decorator for application policy method.
Allows policy to be built up from methods
registered for different event classes.
"""
def _mutator(func):
wrapped = singledispatch(func)
@wraps(wrapped)
def wrapper(*args, **kwargs):
event = kwargs.get('event') or args[-1]
return wrapped.dispatch(type(event))(*args, **kwargs)
wrapper.register = wrapped.register
return wrapper
assert isfunction(arg), arg
return _mutator(arg) | python | def applicationpolicy(arg=None):
def _mutator(func):
wrapped = singledispatch(func)
@wraps(wrapped)
def wrapper(*args, **kwargs):
event = kwargs.get('event') or args[-1]
return wrapped.dispatch(type(event))(*args, **kwargs)
wrapper.register = wrapped.register
return wrapper
assert isfunction(arg), arg
return _mutator(arg) | [
"def",
"applicationpolicy",
"(",
"arg",
"=",
"None",
")",
":",
"def",
"_mutator",
"(",
"func",
")",
":",
"wrapped",
"=",
"singledispatch",
"(",
"func",
")",
"@",
"wraps",
"(",
"wrapped",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"event",
"=",
"kwargs",
".",
"get",
"(",
"'event'",
")",
"or",
"args",
"[",
"-",
"1",
"]",
"return",
"wrapped",
".",
"dispatch",
"(",
"type",
"(",
"event",
")",
")",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"wrapper",
".",
"register",
"=",
"wrapped",
".",
"register",
"return",
"wrapper",
"assert",
"isfunction",
"(",
"arg",
")",
",",
"arg",
"return",
"_mutator",
"(",
"arg",
")"
] | Decorator for application policy method.
Allows policy to be built up from methods
registered for different event classes. | [
"Decorator",
"for",
"application",
"policy",
"method",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/application/decorators.py#L5-L26 |
247,653 | johnbywater/eventsourcing | eventsourcing/infrastructure/sqlalchemy/manager.py | SQLAlchemyRecordManager._prepare_insert | def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False):
"""
With transaction isolation level of "read committed" this should
generate records with a contiguous sequence of integer IDs, assumes
an indexed ID column, the database-side SQL max function, the
insert-select-from form, and optimistic concurrency control.
"""
field_names = list(field_names)
if hasattr(record_class, 'application_name') and 'application_name' not in field_names:
field_names.append('application_name')
if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names:
field_names.append('pipeline_id')
if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names:
field_names.append('causal_dependencies')
if self.notification_id_name:
if placeholder_for_id:
if self.notification_id_name not in field_names:
field_names.append(self.notification_id_name)
statement = text(tmpl.format(
tablename=self.get_record_table_name(record_class),
columns=", ".join(field_names),
placeholders=", ".join([":{}".format(f) for f in field_names]),
notification_id=self.notification_id_name
))
# Define bind parameters with explicit types taken from record column types.
bindparams = []
for col_name in field_names:
column_type = getattr(record_class, col_name).type
bindparams.append(bindparam(col_name, type_=column_type))
# Redefine statement with explicitly typed bind parameters.
statement = statement.bindparams(*bindparams)
# Compile the statement with the session dialect.
compiled = statement.compile(dialect=self.session.bind.dialect)
return compiled | python | def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False):
field_names = list(field_names)
if hasattr(record_class, 'application_name') and 'application_name' not in field_names:
field_names.append('application_name')
if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names:
field_names.append('pipeline_id')
if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names:
field_names.append('causal_dependencies')
if self.notification_id_name:
if placeholder_for_id:
if self.notification_id_name not in field_names:
field_names.append(self.notification_id_name)
statement = text(tmpl.format(
tablename=self.get_record_table_name(record_class),
columns=", ".join(field_names),
placeholders=", ".join([":{}".format(f) for f in field_names]),
notification_id=self.notification_id_name
))
# Define bind parameters with explicit types taken from record column types.
bindparams = []
for col_name in field_names:
column_type = getattr(record_class, col_name).type
bindparams.append(bindparam(col_name, type_=column_type))
# Redefine statement with explicitly typed bind parameters.
statement = statement.bindparams(*bindparams)
# Compile the statement with the session dialect.
compiled = statement.compile(dialect=self.session.bind.dialect)
return compiled | [
"def",
"_prepare_insert",
"(",
"self",
",",
"tmpl",
",",
"record_class",
",",
"field_names",
",",
"placeholder_for_id",
"=",
"False",
")",
":",
"field_names",
"=",
"list",
"(",
"field_names",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'application_name'",
")",
"and",
"'application_name'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'application_name'",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'pipeline_id'",
")",
"and",
"'pipeline_id'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'pipeline_id'",
")",
"if",
"hasattr",
"(",
"record_class",
",",
"'causal_dependencies'",
")",
"and",
"'causal_dependencies'",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"'causal_dependencies'",
")",
"if",
"self",
".",
"notification_id_name",
":",
"if",
"placeholder_for_id",
":",
"if",
"self",
".",
"notification_id_name",
"not",
"in",
"field_names",
":",
"field_names",
".",
"append",
"(",
"self",
".",
"notification_id_name",
")",
"statement",
"=",
"text",
"(",
"tmpl",
".",
"format",
"(",
"tablename",
"=",
"self",
".",
"get_record_table_name",
"(",
"record_class",
")",
",",
"columns",
"=",
"\", \"",
".",
"join",
"(",
"field_names",
")",
",",
"placeholders",
"=",
"\", \"",
".",
"join",
"(",
"[",
"\":{}\"",
".",
"format",
"(",
"f",
")",
"for",
"f",
"in",
"field_names",
"]",
")",
",",
"notification_id",
"=",
"self",
".",
"notification_id_name",
")",
")",
"# Define bind parameters with explicit types taken from record column types.",
"bindparams",
"=",
"[",
"]",
"for",
"col_name",
"in",
"field_names",
":",
"column_type",
"=",
"getattr",
"(",
"record_class",
",",
"col_name",
")",
".",
"type",
"bindparams",
".",
"append",
"(",
"bindparam",
"(",
"col_name",
",",
"type_",
"=",
"column_type",
")",
")",
"# Redefine statement with explicitly typed bind parameters.",
"statement",
"=",
"statement",
".",
"bindparams",
"(",
"*",
"bindparams",
")",
"# Compile the statement with the session dialect.",
"compiled",
"=",
"statement",
".",
"compile",
"(",
"dialect",
"=",
"self",
".",
"session",
".",
"bind",
".",
"dialect",
")",
"return",
"compiled"
] | With transaction isolation level of "read committed" this should
generate records with a contiguous sequence of integer IDs, assumes
an indexed ID column, the database-side SQL max function, the
insert-select-from form, and optimistic concurrency control. | [
"With",
"transaction",
"isolation",
"level",
"of",
"read",
"committed",
"this",
"should",
"generate",
"records",
"with",
"a",
"contiguous",
"sequence",
"of",
"integer",
"IDs",
"assumes",
"an",
"indexed",
"ID",
"column",
"the",
"database",
"-",
"side",
"SQL",
"max",
"function",
"the",
"insert",
"-",
"select",
"-",
"from",
"form",
"and",
"optimistic",
"concurrency",
"control",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/sqlalchemy/manager.py#L22-L60 |
247,654 | johnbywater/eventsourcing | eventsourcing/infrastructure/sqlalchemy/manager.py | SQLAlchemyRecordManager.delete_record | def delete_record(self, record):
"""
Permanently removes record from table.
"""
try:
self.session.delete(record)
self.session.commit()
except Exception as e:
self.session.rollback()
raise ProgrammingError(e)
finally:
self.session.close() | python | def delete_record(self, record):
try:
self.session.delete(record)
self.session.commit()
except Exception as e:
self.session.rollback()
raise ProgrammingError(e)
finally:
self.session.close() | [
"def",
"delete_record",
"(",
"self",
",",
"record",
")",
":",
"try",
":",
"self",
".",
"session",
".",
"delete",
"(",
"record",
")",
"self",
".",
"session",
".",
"commit",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"session",
".",
"rollback",
"(",
")",
"raise",
"ProgrammingError",
"(",
"e",
")",
"finally",
":",
"self",
".",
"session",
".",
"close",
"(",
")"
] | Permanently removes record from table. | [
"Permanently",
"removes",
"record",
"from",
"table",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/sqlalchemy/manager.py#L274-L285 |
247,655 | johnbywater/eventsourcing | eventsourcing/domain/model/timebucketedlog.py | TimebucketedlogRepository.get_or_create | def get_or_create(self, log_name, bucket_size):
"""
Gets or creates a log.
:rtype: Timebucketedlog
"""
try:
return self[log_name]
except RepositoryKeyError:
return start_new_timebucketedlog(log_name, bucket_size=bucket_size) | python | def get_or_create(self, log_name, bucket_size):
try:
return self[log_name]
except RepositoryKeyError:
return start_new_timebucketedlog(log_name, bucket_size=bucket_size) | [
"def",
"get_or_create",
"(",
"self",
",",
"log_name",
",",
"bucket_size",
")",
":",
"try",
":",
"return",
"self",
"[",
"log_name",
"]",
"except",
"RepositoryKeyError",
":",
"return",
"start_new_timebucketedlog",
"(",
"log_name",
",",
"bucket_size",
"=",
"bucket_size",
")"
] | Gets or creates a log.
:rtype: Timebucketedlog | [
"Gets",
"or",
"creates",
"a",
"log",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/domain/model/timebucketedlog.py#L70-L79 |
247,656 | johnbywater/eventsourcing | eventsourcing/infrastructure/eventplayer.py | EventPlayer.project_events | def project_events(self, initial_state, domain_events):
"""
Evolves initial state using the sequence of domain events and a mutator function.
"""
return reduce(self._mutator_func or self.mutate, domain_events, initial_state) | python | def project_events(self, initial_state, domain_events):
return reduce(self._mutator_func or self.mutate, domain_events, initial_state) | [
"def",
"project_events",
"(",
"self",
",",
"initial_state",
",",
"domain_events",
")",
":",
"return",
"reduce",
"(",
"self",
".",
"_mutator_func",
"or",
"self",
".",
"mutate",
",",
"domain_events",
",",
"initial_state",
")"
] | Evolves initial state using the sequence of domain events and a mutator function. | [
"Evolves",
"initial",
"state",
"using",
"the",
"sequence",
"of",
"domain",
"events",
"and",
"a",
"mutator",
"function",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/eventplayer.py#L32-L36 |
247,657 | johnbywater/eventsourcing | eventsourcing/domain/model/array.py | BigArray.get_last_array | def get_last_array(self):
"""
Returns last array in compound.
:rtype: CompoundSequenceReader
"""
# Get the root array (might not have been registered).
root = self.repo[self.id]
# Get length and last item in the root array.
apex_id, apex_height = root.get_last_item_and_next_position()
# Bail if there isn't anything yet.
if apex_id is None:
return None, None
# Get the current apex array.
apex = self.repo[apex_id]
assert isinstance(apex, Array)
# Descend until hitting the bottom.
array = apex
array_i = 0
height = apex_height
while height > 1:
height -= 1
array_id, width = array.get_last_item_and_next_position()
assert width > 0
offset = width - 1
array_i += offset * self.repo.array_size ** height
array = self.repo[array_id]
return array, array_i | python | def get_last_array(self):
# Get the root array (might not have been registered).
root = self.repo[self.id]
# Get length and last item in the root array.
apex_id, apex_height = root.get_last_item_and_next_position()
# Bail if there isn't anything yet.
if apex_id is None:
return None, None
# Get the current apex array.
apex = self.repo[apex_id]
assert isinstance(apex, Array)
# Descend until hitting the bottom.
array = apex
array_i = 0
height = apex_height
while height > 1:
height -= 1
array_id, width = array.get_last_item_and_next_position()
assert width > 0
offset = width - 1
array_i += offset * self.repo.array_size ** height
array = self.repo[array_id]
return array, array_i | [
"def",
"get_last_array",
"(",
"self",
")",
":",
"# Get the root array (might not have been registered).",
"root",
"=",
"self",
".",
"repo",
"[",
"self",
".",
"id",
"]",
"# Get length and last item in the root array.",
"apex_id",
",",
"apex_height",
"=",
"root",
".",
"get_last_item_and_next_position",
"(",
")",
"# Bail if there isn't anything yet.",
"if",
"apex_id",
"is",
"None",
":",
"return",
"None",
",",
"None",
"# Get the current apex array.",
"apex",
"=",
"self",
".",
"repo",
"[",
"apex_id",
"]",
"assert",
"isinstance",
"(",
"apex",
",",
"Array",
")",
"# Descend until hitting the bottom.",
"array",
"=",
"apex",
"array_i",
"=",
"0",
"height",
"=",
"apex_height",
"while",
"height",
">",
"1",
":",
"height",
"-=",
"1",
"array_id",
",",
"width",
"=",
"array",
".",
"get_last_item_and_next_position",
"(",
")",
"assert",
"width",
">",
"0",
"offset",
"=",
"width",
"-",
"1",
"array_i",
"+=",
"offset",
"*",
"self",
".",
"repo",
".",
"array_size",
"**",
"height",
"array",
"=",
"self",
".",
"repo",
"[",
"array_id",
"]",
"return",
"array",
",",
"array_i"
] | Returns last array in compound.
:rtype: CompoundSequenceReader | [
"Returns",
"last",
"array",
"in",
"compound",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/domain/model/array.py#L213-L245 |
247,658 | johnbywater/eventsourcing | eventsourcing/domain/model/array.py | BigArray.calc_parent | def calc_parent(self, i, j, h):
"""
Returns get_big_array and end of span of parent sequence that contains given child.
"""
N = self.repo.array_size
c_i = i
c_j = j
c_h = h
# Calculate the number of the sequence in its row (sequences
# with same height), from left to right, starting from 0.
c_n = c_i // (N ** c_h)
p_n = c_n // N
# Position of the child ID in the parent array.
p_p = c_n % N
# Parent height is child height plus one.
p_h = c_h + 1
# Span of sequences in parent row is max size N, to the power of the height.
span = N ** p_h
# Calculate parent i and j.
p_i = p_n * span
p_j = p_i + span
# Check the parent i,j bounds the child i,j, ie child span is contained by parent span.
assert p_i <= c_i, 'i greater on parent than child: {}'.format(p_i, p_j)
assert p_j >= c_j, 'j less on parent than child: {}'.format(p_i, p_j)
# Return parent i, j, h, p.
return p_i, p_j, p_h, p_p | python | def calc_parent(self, i, j, h):
N = self.repo.array_size
c_i = i
c_j = j
c_h = h
# Calculate the number of the sequence in its row (sequences
# with same height), from left to right, starting from 0.
c_n = c_i // (N ** c_h)
p_n = c_n // N
# Position of the child ID in the parent array.
p_p = c_n % N
# Parent height is child height plus one.
p_h = c_h + 1
# Span of sequences in parent row is max size N, to the power of the height.
span = N ** p_h
# Calculate parent i and j.
p_i = p_n * span
p_j = p_i + span
# Check the parent i,j bounds the child i,j, ie child span is contained by parent span.
assert p_i <= c_i, 'i greater on parent than child: {}'.format(p_i, p_j)
assert p_j >= c_j, 'j less on parent than child: {}'.format(p_i, p_j)
# Return parent i, j, h, p.
return p_i, p_j, p_h, p_p | [
"def",
"calc_parent",
"(",
"self",
",",
"i",
",",
"j",
",",
"h",
")",
":",
"N",
"=",
"self",
".",
"repo",
".",
"array_size",
"c_i",
"=",
"i",
"c_j",
"=",
"j",
"c_h",
"=",
"h",
"# Calculate the number of the sequence in its row (sequences",
"# with same height), from left to right, starting from 0.",
"c_n",
"=",
"c_i",
"//",
"(",
"N",
"**",
"c_h",
")",
"p_n",
"=",
"c_n",
"//",
"N",
"# Position of the child ID in the parent array.",
"p_p",
"=",
"c_n",
"%",
"N",
"# Parent height is child height plus one.",
"p_h",
"=",
"c_h",
"+",
"1",
"# Span of sequences in parent row is max size N, to the power of the height.",
"span",
"=",
"N",
"**",
"p_h",
"# Calculate parent i and j.",
"p_i",
"=",
"p_n",
"*",
"span",
"p_j",
"=",
"p_i",
"+",
"span",
"# Check the parent i,j bounds the child i,j, ie child span is contained by parent span.",
"assert",
"p_i",
"<=",
"c_i",
",",
"'i greater on parent than child: {}'",
".",
"format",
"(",
"p_i",
",",
"p_j",
")",
"assert",
"p_j",
">=",
"c_j",
",",
"'j less on parent than child: {}'",
".",
"format",
"(",
"p_i",
",",
"p_j",
")",
"# Return parent i, j, h, p.",
"return",
"p_i",
",",
"p_j",
",",
"p_h",
",",
"p_p"
] | Returns get_big_array and end of span of parent sequence that contains given child. | [
"Returns",
"get_big_array",
"and",
"end",
"of",
"span",
"of",
"parent",
"sequence",
"that",
"contains",
"given",
"child",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/domain/model/array.py#L372-L397 |
247,659 | johnbywater/eventsourcing | eventsourcing/infrastructure/sequenceditemmapper.py | SequencedItemMapper.item_from_event | def item_from_event(self, domain_event):
"""
Constructs a sequenced item from a domain event.
"""
item_args = self.construct_item_args(domain_event)
return self.construct_sequenced_item(item_args) | python | def item_from_event(self, domain_event):
item_args = self.construct_item_args(domain_event)
return self.construct_sequenced_item(item_args) | [
"def",
"item_from_event",
"(",
"self",
",",
"domain_event",
")",
":",
"item_args",
"=",
"self",
".",
"construct_item_args",
"(",
"domain_event",
")",
"return",
"self",
".",
"construct_sequenced_item",
"(",
"item_args",
")"
] | Constructs a sequenced item from a domain event. | [
"Constructs",
"a",
"sequenced",
"item",
"from",
"a",
"domain",
"event",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/sequenceditemmapper.py#L41-L46 |
247,660 | johnbywater/eventsourcing | eventsourcing/infrastructure/sequenceditemmapper.py | SequencedItemMapper.construct_item_args | def construct_item_args(self, domain_event):
"""
Constructs attributes of a sequenced item from the given domain event.
"""
# Get the sequence ID.
sequence_id = domain_event.__dict__[self.sequence_id_attr_name]
# Get the position in the sequence.
position = getattr(domain_event, self.position_attr_name, None)
# Get topic and data.
topic, state = self.get_item_topic_and_state(
domain_event.__class__,
domain_event.__dict__
)
# Get the 'other' args.
# - these are meant to be derivative of the other attributes,
# to populate database fields, and shouldn't affect the hash.
other_args = tuple((getattr(domain_event, name) for name in self.other_attr_names))
return (sequence_id, position, topic, state) + other_args | python | def construct_item_args(self, domain_event):
# Get the sequence ID.
sequence_id = domain_event.__dict__[self.sequence_id_attr_name]
# Get the position in the sequence.
position = getattr(domain_event, self.position_attr_name, None)
# Get topic and data.
topic, state = self.get_item_topic_and_state(
domain_event.__class__,
domain_event.__dict__
)
# Get the 'other' args.
# - these are meant to be derivative of the other attributes,
# to populate database fields, and shouldn't affect the hash.
other_args = tuple((getattr(domain_event, name) for name in self.other_attr_names))
return (sequence_id, position, topic, state) + other_args | [
"def",
"construct_item_args",
"(",
"self",
",",
"domain_event",
")",
":",
"# Get the sequence ID.",
"sequence_id",
"=",
"domain_event",
".",
"__dict__",
"[",
"self",
".",
"sequence_id_attr_name",
"]",
"# Get the position in the sequence.",
"position",
"=",
"getattr",
"(",
"domain_event",
",",
"self",
".",
"position_attr_name",
",",
"None",
")",
"# Get topic and data.",
"topic",
",",
"state",
"=",
"self",
".",
"get_item_topic_and_state",
"(",
"domain_event",
".",
"__class__",
",",
"domain_event",
".",
"__dict__",
")",
"# Get the 'other' args.",
"# - these are meant to be derivative of the other attributes,",
"# to populate database fields, and shouldn't affect the hash.",
"other_args",
"=",
"tuple",
"(",
"(",
"getattr",
"(",
"domain_event",
",",
"name",
")",
"for",
"name",
"in",
"self",
".",
"other_attr_names",
")",
")",
"return",
"(",
"sequence_id",
",",
"position",
",",
"topic",
",",
"state",
")",
"+",
"other_args"
] | Constructs attributes of a sequenced item from the given domain event. | [
"Constructs",
"attributes",
"of",
"a",
"sequenced",
"item",
"from",
"the",
"given",
"domain",
"event",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/sequenceditemmapper.py#L48-L69 |
247,661 | johnbywater/eventsourcing | eventsourcing/infrastructure/sequenceditemmapper.py | SequencedItemMapper.event_from_item | def event_from_item(self, sequenced_item):
"""
Reconstructs domain event from stored event topic and
event attrs. Used in the event store when getting domain events.
"""
assert isinstance(sequenced_item, self.sequenced_item_class), (
self.sequenced_item_class, type(sequenced_item)
)
# Get the topic and state.
topic = getattr(sequenced_item, self.field_names.topic)
state = getattr(sequenced_item, self.field_names.state)
return self.event_from_topic_and_state(topic, state) | python | def event_from_item(self, sequenced_item):
assert isinstance(sequenced_item, self.sequenced_item_class), (
self.sequenced_item_class, type(sequenced_item)
)
# Get the topic and state.
topic = getattr(sequenced_item, self.field_names.topic)
state = getattr(sequenced_item, self.field_names.state)
return self.event_from_topic_and_state(topic, state) | [
"def",
"event_from_item",
"(",
"self",
",",
"sequenced_item",
")",
":",
"assert",
"isinstance",
"(",
"sequenced_item",
",",
"self",
".",
"sequenced_item_class",
")",
",",
"(",
"self",
".",
"sequenced_item_class",
",",
"type",
"(",
"sequenced_item",
")",
")",
"# Get the topic and state.",
"topic",
"=",
"getattr",
"(",
"sequenced_item",
",",
"self",
".",
"field_names",
".",
"topic",
")",
"state",
"=",
"getattr",
"(",
"sequenced_item",
",",
"self",
".",
"field_names",
".",
"state",
")",
"return",
"self",
".",
"event_from_topic_and_state",
"(",
"topic",
",",
"state",
")"
] | Reconstructs domain event from stored event topic and
event attrs. Used in the event store when getting domain events. | [
"Reconstructs",
"domain",
"event",
"from",
"stored",
"event",
"topic",
"and",
"event",
"attrs",
".",
"Used",
"in",
"the",
"event",
"store",
"when",
"getting",
"domain",
"events",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/sequenceditemmapper.py#L87-L100 |
247,662 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | AbstractSequencedItemRecordManager.get_item | def get_item(self, sequence_id, position):
"""
Gets sequenced item from the datastore.
"""
return self.from_record(self.get_record(sequence_id, position)) | python | def get_item(self, sequence_id, position):
return self.from_record(self.get_record(sequence_id, position)) | [
"def",
"get_item",
"(",
"self",
",",
"sequence_id",
",",
"position",
")",
":",
"return",
"self",
".",
"from_record",
"(",
"self",
".",
"get_record",
"(",
"sequence_id",
",",
"position",
")",
")"
] | Gets sequenced item from the datastore. | [
"Gets",
"sequenced",
"item",
"from",
"the",
"datastore",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L55-L59 |
247,663 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | AbstractSequencedItemRecordManager.get_items | def get_items(self, sequence_id, gt=None, gte=None, lt=None, lte=None, limit=None,
query_ascending=True, results_ascending=True):
"""
Returns sequenced item generator.
"""
records = self.get_records(
sequence_id=sequence_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
query_ascending=query_ascending,
results_ascending=results_ascending,
)
for item in map(self.from_record, records):
yield item | python | def get_items(self, sequence_id, gt=None, gte=None, lt=None, lte=None, limit=None,
query_ascending=True, results_ascending=True):
records = self.get_records(
sequence_id=sequence_id,
gt=gt,
gte=gte,
lt=lt,
lte=lte,
limit=limit,
query_ascending=query_ascending,
results_ascending=results_ascending,
)
for item in map(self.from_record, records):
yield item | [
"def",
"get_items",
"(",
"self",
",",
"sequence_id",
",",
"gt",
"=",
"None",
",",
"gte",
"=",
"None",
",",
"lt",
"=",
"None",
",",
"lte",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"query_ascending",
"=",
"True",
",",
"results_ascending",
"=",
"True",
")",
":",
"records",
"=",
"self",
".",
"get_records",
"(",
"sequence_id",
"=",
"sequence_id",
",",
"gt",
"=",
"gt",
",",
"gte",
"=",
"gte",
",",
"lt",
"=",
"lt",
",",
"lte",
"=",
"lte",
",",
"limit",
"=",
"limit",
",",
"query_ascending",
"=",
"query_ascending",
",",
"results_ascending",
"=",
"results_ascending",
",",
")",
"for",
"item",
"in",
"map",
"(",
"self",
".",
"from_record",
",",
"records",
")",
":",
"yield",
"item"
] | Returns sequenced item generator. | [
"Returns",
"sequenced",
"item",
"generator",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L67-L84 |
247,664 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | AbstractSequencedItemRecordManager.to_record | def to_record(self, sequenced_item):
"""
Constructs a record object from given sequenced item object.
"""
kwargs = self.get_field_kwargs(sequenced_item)
# Supply application_name, if needed.
if hasattr(self.record_class, 'application_name'):
kwargs['application_name'] = self.application_name
# Supply pipeline_id, if needed.
if hasattr(self.record_class, 'pipeline_id'):
kwargs['pipeline_id'] = self.pipeline_id
return self.record_class(**kwargs) | python | def to_record(self, sequenced_item):
kwargs = self.get_field_kwargs(sequenced_item)
# Supply application_name, if needed.
if hasattr(self.record_class, 'application_name'):
kwargs['application_name'] = self.application_name
# Supply pipeline_id, if needed.
if hasattr(self.record_class, 'pipeline_id'):
kwargs['pipeline_id'] = self.pipeline_id
return self.record_class(**kwargs) | [
"def",
"to_record",
"(",
"self",
",",
"sequenced_item",
")",
":",
"kwargs",
"=",
"self",
".",
"get_field_kwargs",
"(",
"sequenced_item",
")",
"# Supply application_name, if needed.",
"if",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'application_name'",
")",
":",
"kwargs",
"[",
"'application_name'",
"]",
"=",
"self",
".",
"application_name",
"# Supply pipeline_id, if needed.",
"if",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'pipeline_id'",
")",
":",
"kwargs",
"[",
"'pipeline_id'",
"]",
"=",
"self",
".",
"pipeline_id",
"return",
"self",
".",
"record_class",
"(",
"*",
"*",
"kwargs",
")"
] | Constructs a record object from given sequenced item object. | [
"Constructs",
"a",
"record",
"object",
"from",
"given",
"sequenced",
"item",
"object",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L99-L110 |
247,665 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | AbstractSequencedItemRecordManager.from_record | def from_record(self, record):
"""
Constructs and returns a sequenced item object, from given ORM object.
"""
kwargs = self.get_field_kwargs(record)
return self.sequenced_item_class(**kwargs) | python | def from_record(self, record):
kwargs = self.get_field_kwargs(record)
return self.sequenced_item_class(**kwargs) | [
"def",
"from_record",
"(",
"self",
",",
"record",
")",
":",
"kwargs",
"=",
"self",
".",
"get_field_kwargs",
"(",
"record",
")",
"return",
"self",
".",
"sequenced_item_class",
"(",
"*",
"*",
"kwargs",
")"
] | Constructs and returns a sequenced item object, from given ORM object. | [
"Constructs",
"and",
"returns",
"a",
"sequenced",
"item",
"object",
"from",
"given",
"ORM",
"object",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L112-L117 |
247,666 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | ACIDRecordManager.get_pipeline_and_notification_id | def get_pipeline_and_notification_id(self, sequence_id, position):
"""
Returns pipeline ID and notification ID for
event at given position in given sequence.
"""
# Todo: Optimise query by selecting only two columns, pipeline_id and id (notification ID)?
record = self.get_record(sequence_id, position)
notification_id = getattr(record, self.notification_id_name)
return record.pipeline_id, notification_id | python | def get_pipeline_and_notification_id(self, sequence_id, position):
# Todo: Optimise query by selecting only two columns, pipeline_id and id (notification ID)?
record = self.get_record(sequence_id, position)
notification_id = getattr(record, self.notification_id_name)
return record.pipeline_id, notification_id | [
"def",
"get_pipeline_and_notification_id",
"(",
"self",
",",
"sequence_id",
",",
"position",
")",
":",
"# Todo: Optimise query by selecting only two columns, pipeline_id and id (notification ID)?",
"record",
"=",
"self",
".",
"get_record",
"(",
"sequence_id",
",",
"position",
")",
"notification_id",
"=",
"getattr",
"(",
"record",
",",
"self",
".",
"notification_id_name",
")",
"return",
"record",
".",
"pipeline_id",
",",
"notification_id"
] | Returns pipeline ID and notification ID for
event at given position in given sequence. | [
"Returns",
"pipeline",
"ID",
"and",
"notification",
"ID",
"for",
"event",
"at",
"given",
"position",
"in",
"given",
"sequence",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L194-L202 |
247,667 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | SQLRecordManager.insert_select_max | def insert_select_max(self):
"""
SQL statement that inserts records with contiguous IDs,
by selecting max ID from indexed table records.
"""
if self._insert_select_max is None:
if hasattr(self.record_class, 'application_name'):
# Todo: Maybe make it support application_name without pipeline_id?
assert hasattr(self.record_class, 'pipeline_id'), self.record_class
tmpl = self._insert_select_max_tmpl + self._where_application_name_tmpl
else:
tmpl = self._insert_select_max_tmpl
self._insert_select_max = self._prepare_insert(
tmpl=tmpl,
record_class=self.record_class,
field_names=list(self.field_names),
)
return self._insert_select_max | python | def insert_select_max(self):
if self._insert_select_max is None:
if hasattr(self.record_class, 'application_name'):
# Todo: Maybe make it support application_name without pipeline_id?
assert hasattr(self.record_class, 'pipeline_id'), self.record_class
tmpl = self._insert_select_max_tmpl + self._where_application_name_tmpl
else:
tmpl = self._insert_select_max_tmpl
self._insert_select_max = self._prepare_insert(
tmpl=tmpl,
record_class=self.record_class,
field_names=list(self.field_names),
)
return self._insert_select_max | [
"def",
"insert_select_max",
"(",
"self",
")",
":",
"if",
"self",
".",
"_insert_select_max",
"is",
"None",
":",
"if",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'application_name'",
")",
":",
"# Todo: Maybe make it support application_name without pipeline_id?",
"assert",
"hasattr",
"(",
"self",
".",
"record_class",
",",
"'pipeline_id'",
")",
",",
"self",
".",
"record_class",
"tmpl",
"=",
"self",
".",
"_insert_select_max_tmpl",
"+",
"self",
".",
"_where_application_name_tmpl",
"else",
":",
"tmpl",
"=",
"self",
".",
"_insert_select_max_tmpl",
"self",
".",
"_insert_select_max",
"=",
"self",
".",
"_prepare_insert",
"(",
"tmpl",
"=",
"tmpl",
",",
"record_class",
"=",
"self",
".",
"record_class",
",",
"field_names",
"=",
"list",
"(",
"self",
".",
"field_names",
")",
",",
")",
"return",
"self",
".",
"_insert_select_max"
] | SQL statement that inserts records with contiguous IDs,
by selecting max ID from indexed table records. | [
"SQL",
"statement",
"that",
"inserts",
"records",
"with",
"contiguous",
"IDs",
"by",
"selecting",
"max",
"ID",
"from",
"indexed",
"table",
"records",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L236-L253 |
247,668 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | SQLRecordManager.insert_values | def insert_values(self):
"""
SQL statement that inserts records without ID.
"""
if self._insert_values is None:
self._insert_values = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.record_class,
field_names=self.field_names,
)
return self._insert_values | python | def insert_values(self):
if self._insert_values is None:
self._insert_values = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.record_class,
field_names=self.field_names,
)
return self._insert_values | [
"def",
"insert_values",
"(",
"self",
")",
":",
"if",
"self",
".",
"_insert_values",
"is",
"None",
":",
"self",
".",
"_insert_values",
"=",
"self",
".",
"_prepare_insert",
"(",
"tmpl",
"=",
"self",
".",
"_insert_values_tmpl",
",",
"placeholder_for_id",
"=",
"True",
",",
"record_class",
"=",
"self",
".",
"record_class",
",",
"field_names",
"=",
"self",
".",
"field_names",
",",
")",
"return",
"self",
".",
"_insert_values"
] | SQL statement that inserts records without ID. | [
"SQL",
"statement",
"that",
"inserts",
"records",
"without",
"ID",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L270-L281 |
247,669 | johnbywater/eventsourcing | eventsourcing/infrastructure/base.py | SQLRecordManager.insert_tracking_record | def insert_tracking_record(self):
"""
SQL statement that inserts tracking records.
"""
if self._insert_tracking_record is None:
self._insert_tracking_record = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.tracking_record_class,
field_names=self.tracking_record_field_names,
)
return self._insert_tracking_record | python | def insert_tracking_record(self):
if self._insert_tracking_record is None:
self._insert_tracking_record = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.tracking_record_class,
field_names=self.tracking_record_field_names,
)
return self._insert_tracking_record | [
"def",
"insert_tracking_record",
"(",
"self",
")",
":",
"if",
"self",
".",
"_insert_tracking_record",
"is",
"None",
":",
"self",
".",
"_insert_tracking_record",
"=",
"self",
".",
"_prepare_insert",
"(",
"tmpl",
"=",
"self",
".",
"_insert_values_tmpl",
",",
"placeholder_for_id",
"=",
"True",
",",
"record_class",
"=",
"self",
".",
"tracking_record_class",
",",
"field_names",
"=",
"self",
".",
"tracking_record_field_names",
",",
")",
"return",
"self",
".",
"_insert_tracking_record"
] | SQL statement that inserts tracking records. | [
"SQL",
"statement",
"that",
"inserts",
"tracking",
"records",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/infrastructure/base.py#L284-L295 |
247,670 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosAggregate.start | def start(cls, originator_id, quorum_size, network_uid):
"""
Factory method that returns a new Paxos aggregate.
"""
assert isinstance(quorum_size, int), "Not an integer: {}".format(quorum_size)
return cls.__create__(
event_class=cls.Started,
originator_id=originator_id,
quorum_size=quorum_size,
network_uid=network_uid
) | python | def start(cls, originator_id, quorum_size, network_uid):
assert isinstance(quorum_size, int), "Not an integer: {}".format(quorum_size)
return cls.__create__(
event_class=cls.Started,
originator_id=originator_id,
quorum_size=quorum_size,
network_uid=network_uid
) | [
"def",
"start",
"(",
"cls",
",",
"originator_id",
",",
"quorum_size",
",",
"network_uid",
")",
":",
"assert",
"isinstance",
"(",
"quorum_size",
",",
"int",
")",
",",
"\"Not an integer: {}\"",
".",
"format",
"(",
"quorum_size",
")",
"return",
"cls",
".",
"__create__",
"(",
"event_class",
"=",
"cls",
".",
"Started",
",",
"originator_id",
"=",
"originator_id",
",",
"quorum_size",
"=",
"quorum_size",
",",
"network_uid",
"=",
"network_uid",
")"
] | Factory method that returns a new Paxos aggregate. | [
"Factory",
"method",
"that",
"returns",
"a",
"new",
"Paxos",
"aggregate",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L105-L115 |
247,671 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosAggregate.propose_value | def propose_value(self, value, assume_leader=False):
"""
Proposes a value to the network.
"""
if value is None:
raise ValueError("Not allowed to propose value None")
paxos = self.paxos_instance
paxos.leader = assume_leader
msg = paxos.propose_value(value)
if msg is None:
msg = paxos.prepare()
self.setattrs_from_paxos(paxos)
self.announce(msg)
return msg | python | def propose_value(self, value, assume_leader=False):
if value is None:
raise ValueError("Not allowed to propose value None")
paxos = self.paxos_instance
paxos.leader = assume_leader
msg = paxos.propose_value(value)
if msg is None:
msg = paxos.prepare()
self.setattrs_from_paxos(paxos)
self.announce(msg)
return msg | [
"def",
"propose_value",
"(",
"self",
",",
"value",
",",
"assume_leader",
"=",
"False",
")",
":",
"if",
"value",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Not allowed to propose value None\"",
")",
"paxos",
"=",
"self",
".",
"paxos_instance",
"paxos",
".",
"leader",
"=",
"assume_leader",
"msg",
"=",
"paxos",
".",
"propose_value",
"(",
"value",
")",
"if",
"msg",
"is",
"None",
":",
"msg",
"=",
"paxos",
".",
"prepare",
"(",
")",
"self",
".",
"setattrs_from_paxos",
"(",
"paxos",
")",
"self",
".",
"announce",
"(",
"msg",
")",
"return",
"msg"
] | Proposes a value to the network. | [
"Proposes",
"a",
"value",
"to",
"the",
"network",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L117-L130 |
247,672 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosAggregate.receive_message | def receive_message(self, msg):
"""
Responds to messages from other participants.
"""
if isinstance(msg, Resolution):
return
paxos = self.paxos_instance
while msg:
if isinstance(msg, Resolution):
self.print_if_verbose("{} resolved value {}".format(self.network_uid, msg.value))
break
else:
self.print_if_verbose("{} <- {} <- {}".format(self.network_uid, msg.__class__.__name__, msg.from_uid))
msg = paxos.receive(msg)
# Todo: Make it optional not to announce resolution (without which it's hard to see final value).
do_announce_resolution = True
if msg and (do_announce_resolution or not isinstance(msg, Resolution)):
self.announce(msg)
self.setattrs_from_paxos(paxos) | python | def receive_message(self, msg):
if isinstance(msg, Resolution):
return
paxos = self.paxos_instance
while msg:
if isinstance(msg, Resolution):
self.print_if_verbose("{} resolved value {}".format(self.network_uid, msg.value))
break
else:
self.print_if_verbose("{} <- {} <- {}".format(self.network_uid, msg.__class__.__name__, msg.from_uid))
msg = paxos.receive(msg)
# Todo: Make it optional not to announce resolution (without which it's hard to see final value).
do_announce_resolution = True
if msg and (do_announce_resolution or not isinstance(msg, Resolution)):
self.announce(msg)
self.setattrs_from_paxos(paxos) | [
"def",
"receive_message",
"(",
"self",
",",
"msg",
")",
":",
"if",
"isinstance",
"(",
"msg",
",",
"Resolution",
")",
":",
"return",
"paxos",
"=",
"self",
".",
"paxos_instance",
"while",
"msg",
":",
"if",
"isinstance",
"(",
"msg",
",",
"Resolution",
")",
":",
"self",
".",
"print_if_verbose",
"(",
"\"{} resolved value {}\"",
".",
"format",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"value",
")",
")",
"break",
"else",
":",
"self",
".",
"print_if_verbose",
"(",
"\"{} <- {} <- {}\"",
".",
"format",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"__class__",
".",
"__name__",
",",
"msg",
".",
"from_uid",
")",
")",
"msg",
"=",
"paxos",
".",
"receive",
"(",
"msg",
")",
"# Todo: Make it optional not to announce resolution (without which it's hard to see final value).",
"do_announce_resolution",
"=",
"True",
"if",
"msg",
"and",
"(",
"do_announce_resolution",
"or",
"not",
"isinstance",
"(",
"msg",
",",
"Resolution",
")",
")",
":",
"self",
".",
"announce",
"(",
"msg",
")",
"self",
".",
"setattrs_from_paxos",
"(",
"paxos",
")"
] | Responds to messages from other participants. | [
"Responds",
"to",
"messages",
"from",
"other",
"participants",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L132-L151 |
247,673 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosAggregate.announce | def announce(self, msg):
"""
Announces a Paxos message.
"""
self.print_if_verbose("{} -> {}".format(self.network_uid, msg.__class__.__name__))
self.__trigger_event__(
event_class=self.MessageAnnounced,
msg=msg,
) | python | def announce(self, msg):
self.print_if_verbose("{} -> {}".format(self.network_uid, msg.__class__.__name__))
self.__trigger_event__(
event_class=self.MessageAnnounced,
msg=msg,
) | [
"def",
"announce",
"(",
"self",
",",
"msg",
")",
":",
"self",
".",
"print_if_verbose",
"(",
"\"{} -> {}\"",
".",
"format",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"__class__",
".",
"__name__",
")",
")",
"self",
".",
"__trigger_event__",
"(",
"event_class",
"=",
"self",
".",
"MessageAnnounced",
",",
"msg",
"=",
"msg",
",",
")"
] | Announces a Paxos message. | [
"Announces",
"a",
"Paxos",
"message",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L153-L161 |
247,674 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosAggregate.setattrs_from_paxos | def setattrs_from_paxos(self, paxos):
"""
Registers changes of attribute value on Paxos instance.
"""
changes = {}
for name in self.paxos_variables:
paxos_value = getattr(paxos, name)
if paxos_value != getattr(self, name, None):
self.print_if_verbose("{} {}: {}".format(self.network_uid, name, paxos_value))
changes[name] = paxos_value
setattr(self, name, paxos_value)
if changes:
self.__trigger_event__(
event_class=self.AttributesChanged,
changes=changes
) | python | def setattrs_from_paxos(self, paxos):
changes = {}
for name in self.paxos_variables:
paxos_value = getattr(paxos, name)
if paxos_value != getattr(self, name, None):
self.print_if_verbose("{} {}: {}".format(self.network_uid, name, paxos_value))
changes[name] = paxos_value
setattr(self, name, paxos_value)
if changes:
self.__trigger_event__(
event_class=self.AttributesChanged,
changes=changes
) | [
"def",
"setattrs_from_paxos",
"(",
"self",
",",
"paxos",
")",
":",
"changes",
"=",
"{",
"}",
"for",
"name",
"in",
"self",
".",
"paxos_variables",
":",
"paxos_value",
"=",
"getattr",
"(",
"paxos",
",",
"name",
")",
"if",
"paxos_value",
"!=",
"getattr",
"(",
"self",
",",
"name",
",",
"None",
")",
":",
"self",
".",
"print_if_verbose",
"(",
"\"{} {}: {}\"",
".",
"format",
"(",
"self",
".",
"network_uid",
",",
"name",
",",
"paxos_value",
")",
")",
"changes",
"[",
"name",
"]",
"=",
"paxos_value",
"setattr",
"(",
"self",
",",
"name",
",",
"paxos_value",
")",
"if",
"changes",
":",
"self",
".",
"__trigger_event__",
"(",
"event_class",
"=",
"self",
".",
"AttributesChanged",
",",
"changes",
"=",
"changes",
")"
] | Registers changes of attribute value on Paxos instance. | [
"Registers",
"changes",
"of",
"attribute",
"value",
"on",
"Paxos",
"instance",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L163-L178 |
247,675 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/application.py | PaxosProcess.propose_value | def propose_value(self, key, value, assume_leader=False):
"""
Starts new Paxos aggregate and proposes a value for a key.
Decorated with retry in case of notification log conflict
or operational error.
"""
assert isinstance(key, UUID)
paxos_aggregate = PaxosAggregate.start(
originator_id=key,
quorum_size=self.quorum_size,
network_uid=self.name
)
msg = paxos_aggregate.propose_value(value, assume_leader=assume_leader)
while msg:
msg = paxos_aggregate.receive_message(msg)
new_events = paxos_aggregate.__batch_pending_events__()
self.record_process_event(ProcessEvent(new_events))
self.repository.take_snapshot(paxos_aggregate.id)
self.publish_prompt()
return paxos_aggregate | python | def propose_value(self, key, value, assume_leader=False):
assert isinstance(key, UUID)
paxos_aggregate = PaxosAggregate.start(
originator_id=key,
quorum_size=self.quorum_size,
network_uid=self.name
)
msg = paxos_aggregate.propose_value(value, assume_leader=assume_leader)
while msg:
msg = paxos_aggregate.receive_message(msg)
new_events = paxos_aggregate.__batch_pending_events__()
self.record_process_event(ProcessEvent(new_events))
self.repository.take_snapshot(paxos_aggregate.id)
self.publish_prompt()
return paxos_aggregate | [
"def",
"propose_value",
"(",
"self",
",",
"key",
",",
"value",
",",
"assume_leader",
"=",
"False",
")",
":",
"assert",
"isinstance",
"(",
"key",
",",
"UUID",
")",
"paxos_aggregate",
"=",
"PaxosAggregate",
".",
"start",
"(",
"originator_id",
"=",
"key",
",",
"quorum_size",
"=",
"self",
".",
"quorum_size",
",",
"network_uid",
"=",
"self",
".",
"name",
")",
"msg",
"=",
"paxos_aggregate",
".",
"propose_value",
"(",
"value",
",",
"assume_leader",
"=",
"assume_leader",
")",
"while",
"msg",
":",
"msg",
"=",
"paxos_aggregate",
".",
"receive_message",
"(",
"msg",
")",
"new_events",
"=",
"paxos_aggregate",
".",
"__batch_pending_events__",
"(",
")",
"self",
".",
"record_process_event",
"(",
"ProcessEvent",
"(",
"new_events",
")",
")",
"self",
".",
"repository",
".",
"take_snapshot",
"(",
"paxos_aggregate",
".",
"id",
")",
"self",
".",
"publish_prompt",
"(",
")",
"return",
"paxos_aggregate"
] | Starts new Paxos aggregate and proposes a value for a key.
Decorated with retry in case of notification log conflict
or operational error. | [
"Starts",
"new",
"Paxos",
"aggregate",
"and",
"proposes",
"a",
"value",
"for",
"a",
"key",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/application.py#L203-L223 |
247,676 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | MessageHandler.receive | def receive(self, msg):
'''
Message dispatching function. This function accepts any PaxosMessage subclass and calls
the appropriate handler function
'''
handler = getattr(self, 'receive_' + msg.__class__.__name__.lower(), None)
if handler is None:
raise InvalidMessageError('Receiving class does not support messages of type: ' + msg.__class__.__name__)
return handler(msg) | python | def receive(self, msg):
'''
Message dispatching function. This function accepts any PaxosMessage subclass and calls
the appropriate handler function
'''
handler = getattr(self, 'receive_' + msg.__class__.__name__.lower(), None)
if handler is None:
raise InvalidMessageError('Receiving class does not support messages of type: ' + msg.__class__.__name__)
return handler(msg) | [
"def",
"receive",
"(",
"self",
",",
"msg",
")",
":",
"handler",
"=",
"getattr",
"(",
"self",
",",
"'receive_'",
"+",
"msg",
".",
"__class__",
".",
"__name__",
".",
"lower",
"(",
")",
",",
"None",
")",
"if",
"handler",
"is",
"None",
":",
"raise",
"InvalidMessageError",
"(",
"'Receiving class does not support messages of type: '",
"+",
"msg",
".",
"__class__",
".",
"__name__",
")",
"return",
"handler",
"(",
"msg",
")"
] | Message dispatching function. This function accepts any PaxosMessage subclass and calls
the appropriate handler function | [
"Message",
"dispatching",
"function",
".",
"This",
"function",
"accepts",
"any",
"PaxosMessage",
"subclass",
"and",
"calls",
"the",
"appropriate",
"handler",
"function"
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L176-L184 |
247,677 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Proposer.propose_value | def propose_value(self, value):
'''
Sets the proposal value for this node iff this node is not already aware of
a previous proposal value. If the node additionally believes itself to be
the current leader, an Accept message will be returned
'''
if self.proposed_value is None:
self.proposed_value = value
if self.leader:
self.current_accept_msg = Accept(self.network_uid, self.proposal_id, value)
return self.current_accept_msg | python | def propose_value(self, value):
'''
Sets the proposal value for this node iff this node is not already aware of
a previous proposal value. If the node additionally believes itself to be
the current leader, an Accept message will be returned
'''
if self.proposed_value is None:
self.proposed_value = value
if self.leader:
self.current_accept_msg = Accept(self.network_uid, self.proposal_id, value)
return self.current_accept_msg | [
"def",
"propose_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"proposed_value",
"is",
"None",
":",
"self",
".",
"proposed_value",
"=",
"value",
"if",
"self",
".",
"leader",
":",
"self",
".",
"current_accept_msg",
"=",
"Accept",
"(",
"self",
".",
"network_uid",
",",
"self",
".",
"proposal_id",
",",
"value",
")",
"return",
"self",
".",
"current_accept_msg"
] | Sets the proposal value for this node iff this node is not already aware of
a previous proposal value. If the node additionally believes itself to be
the current leader, an Accept message will be returned | [
"Sets",
"the",
"proposal",
"value",
"for",
"this",
"node",
"iff",
"this",
"node",
"is",
"not",
"already",
"aware",
"of",
"a",
"previous",
"proposal",
"value",
".",
"If",
"the",
"node",
"additionally",
"believes",
"itself",
"to",
"be",
"the",
"current",
"leader",
"an",
"Accept",
"message",
"will",
"be",
"returned"
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L210-L221 |
247,678 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Proposer.prepare | def prepare(self):
'''
Returns a new Prepare message with a proposal id higher than
that of any observed proposals. A side effect of this method is
to clear the leader flag if it is currently set.
'''
self.leader = False
self.promises_received = set()
self.nacks_received = set()
self.proposal_id = ProposalID(self.highest_proposal_id.number + 1, self.network_uid)
self.highest_proposal_id = self.proposal_id
self.current_prepare_msg = Prepare(self.network_uid, self.proposal_id)
return self.current_prepare_msg | python | def prepare(self):
'''
Returns a new Prepare message with a proposal id higher than
that of any observed proposals. A side effect of this method is
to clear the leader flag if it is currently set.
'''
self.leader = False
self.promises_received = set()
self.nacks_received = set()
self.proposal_id = ProposalID(self.highest_proposal_id.number + 1, self.network_uid)
self.highest_proposal_id = self.proposal_id
self.current_prepare_msg = Prepare(self.network_uid, self.proposal_id)
return self.current_prepare_msg | [
"def",
"prepare",
"(",
"self",
")",
":",
"self",
".",
"leader",
"=",
"False",
"self",
".",
"promises_received",
"=",
"set",
"(",
")",
"self",
".",
"nacks_received",
"=",
"set",
"(",
")",
"self",
".",
"proposal_id",
"=",
"ProposalID",
"(",
"self",
".",
"highest_proposal_id",
".",
"number",
"+",
"1",
",",
"self",
".",
"network_uid",
")",
"self",
".",
"highest_proposal_id",
"=",
"self",
".",
"proposal_id",
"self",
".",
"current_prepare_msg",
"=",
"Prepare",
"(",
"self",
".",
"network_uid",
",",
"self",
".",
"proposal_id",
")",
"return",
"self",
".",
"current_prepare_msg"
] | Returns a new Prepare message with a proposal id higher than
that of any observed proposals. A side effect of this method is
to clear the leader flag if it is currently set. | [
"Returns",
"a",
"new",
"Prepare",
"message",
"with",
"a",
"proposal",
"id",
"higher",
"than",
"that",
"of",
"any",
"observed",
"proposals",
".",
"A",
"side",
"effect",
"of",
"this",
"method",
"is",
"to",
"clear",
"the",
"leader",
"flag",
"if",
"it",
"is",
"currently",
"set",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L223-L237 |
247,679 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Proposer.receive_nack | def receive_nack(self, msg):
'''
Returns a new Prepare message if the number of Nacks received reaches
a quorum.
'''
self.observe_proposal(msg.promised_proposal_id)
if msg.proposal_id == self.proposal_id and self.nacks_received is not None:
self.nacks_received.add(msg.from_uid)
if len(self.nacks_received) == self.quorum_size:
return self.prepare() | python | def receive_nack(self, msg):
'''
Returns a new Prepare message if the number of Nacks received reaches
a quorum.
'''
self.observe_proposal(msg.promised_proposal_id)
if msg.proposal_id == self.proposal_id and self.nacks_received is not None:
self.nacks_received.add(msg.from_uid)
if len(self.nacks_received) == self.quorum_size:
return self.prepare() | [
"def",
"receive_nack",
"(",
"self",
",",
"msg",
")",
":",
"self",
".",
"observe_proposal",
"(",
"msg",
".",
"promised_proposal_id",
")",
"if",
"msg",
".",
"proposal_id",
"==",
"self",
".",
"proposal_id",
"and",
"self",
".",
"nacks_received",
"is",
"not",
"None",
":",
"self",
".",
"nacks_received",
".",
"add",
"(",
"msg",
".",
"from_uid",
")",
"if",
"len",
"(",
"self",
".",
"nacks_received",
")",
"==",
"self",
".",
"quorum_size",
":",
"return",
"self",
".",
"prepare",
"(",
")"
] | Returns a new Prepare message if the number of Nacks received reaches
a quorum. | [
"Returns",
"a",
"new",
"Prepare",
"message",
"if",
"the",
"number",
"of",
"Nacks",
"received",
"reaches",
"a",
"quorum",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L251-L262 |
247,680 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Proposer.receive_promise | def receive_promise(self, msg):
'''
Returns an Accept messages if a quorum of Promise messages is achieved
'''
self.observe_proposal(msg.proposal_id)
if not self.leader and msg.proposal_id == self.proposal_id and msg.from_uid not in self.promises_received:
self.promises_received.add(msg.from_uid)
if self.highest_accepted_id is None or msg.last_accepted_id > self.highest_accepted_id:
self.highest_accepted_id = msg.last_accepted_id
if msg.last_accepted_value is not None:
self.proposed_value = msg.last_accepted_value
if len(self.promises_received) == self.quorum_size:
self.leader = True
if self.proposed_value is not None:
self.current_accept_msg = Accept(self.network_uid, self.proposal_id, self.proposed_value)
return self.current_accept_msg | python | def receive_promise(self, msg):
'''
Returns an Accept messages if a quorum of Promise messages is achieved
'''
self.observe_proposal(msg.proposal_id)
if not self.leader and msg.proposal_id == self.proposal_id and msg.from_uid not in self.promises_received:
self.promises_received.add(msg.from_uid)
if self.highest_accepted_id is None or msg.last_accepted_id > self.highest_accepted_id:
self.highest_accepted_id = msg.last_accepted_id
if msg.last_accepted_value is not None:
self.proposed_value = msg.last_accepted_value
if len(self.promises_received) == self.quorum_size:
self.leader = True
if self.proposed_value is not None:
self.current_accept_msg = Accept(self.network_uid, self.proposal_id, self.proposed_value)
return self.current_accept_msg | [
"def",
"receive_promise",
"(",
"self",
",",
"msg",
")",
":",
"self",
".",
"observe_proposal",
"(",
"msg",
".",
"proposal_id",
")",
"if",
"not",
"self",
".",
"leader",
"and",
"msg",
".",
"proposal_id",
"==",
"self",
".",
"proposal_id",
"and",
"msg",
".",
"from_uid",
"not",
"in",
"self",
".",
"promises_received",
":",
"self",
".",
"promises_received",
".",
"add",
"(",
"msg",
".",
"from_uid",
")",
"if",
"self",
".",
"highest_accepted_id",
"is",
"None",
"or",
"msg",
".",
"last_accepted_id",
">",
"self",
".",
"highest_accepted_id",
":",
"self",
".",
"highest_accepted_id",
"=",
"msg",
".",
"last_accepted_id",
"if",
"msg",
".",
"last_accepted_value",
"is",
"not",
"None",
":",
"self",
".",
"proposed_value",
"=",
"msg",
".",
"last_accepted_value",
"if",
"len",
"(",
"self",
".",
"promises_received",
")",
"==",
"self",
".",
"quorum_size",
":",
"self",
".",
"leader",
"=",
"True",
"if",
"self",
".",
"proposed_value",
"is",
"not",
"None",
":",
"self",
".",
"current_accept_msg",
"=",
"Accept",
"(",
"self",
".",
"network_uid",
",",
"self",
".",
"proposal_id",
",",
"self",
".",
"proposed_value",
")",
"return",
"self",
".",
"current_accept_msg"
] | Returns an Accept messages if a quorum of Promise messages is achieved | [
"Returns",
"an",
"Accept",
"messages",
"if",
"a",
"quorum",
"of",
"Promise",
"messages",
"is",
"achieved"
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L264-L284 |
247,681 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Acceptor.receive_prepare | def receive_prepare(self, msg):
'''
Returns either a Promise or a Nack in response. The Acceptor's state must be persisted to disk
prior to transmitting the Promise message.
'''
if self.promised_id is None or msg.proposal_id >= self.promised_id:
self.promised_id = msg.proposal_id
return Promise(self.network_uid, msg.from_uid, self.promised_id, self.accepted_id, self.accepted_value)
else:
return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id) | python | def receive_prepare(self, msg):
'''
Returns either a Promise or a Nack in response. The Acceptor's state must be persisted to disk
prior to transmitting the Promise message.
'''
if self.promised_id is None or msg.proposal_id >= self.promised_id:
self.promised_id = msg.proposal_id
return Promise(self.network_uid, msg.from_uid, self.promised_id, self.accepted_id, self.accepted_value)
else:
return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id) | [
"def",
"receive_prepare",
"(",
"self",
",",
"msg",
")",
":",
"if",
"self",
".",
"promised_id",
"is",
"None",
"or",
"msg",
".",
"proposal_id",
">=",
"self",
".",
"promised_id",
":",
"self",
".",
"promised_id",
"=",
"msg",
".",
"proposal_id",
"return",
"Promise",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"from_uid",
",",
"self",
".",
"promised_id",
",",
"self",
".",
"accepted_id",
",",
"self",
".",
"accepted_value",
")",
"else",
":",
"return",
"Nack",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"from_uid",
",",
"msg",
".",
"proposal_id",
",",
"self",
".",
"promised_id",
")"
] | Returns either a Promise or a Nack in response. The Acceptor's state must be persisted to disk
prior to transmitting the Promise message. | [
"Returns",
"either",
"a",
"Promise",
"or",
"a",
"Nack",
"in",
"response",
".",
"The",
"Acceptor",
"s",
"state",
"must",
"be",
"persisted",
"to",
"disk",
"prior",
"to",
"transmitting",
"the",
"Promise",
"message",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L310-L319 |
247,682 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Acceptor.receive_accept | def receive_accept(self, msg):
'''
Returns either an Accepted or Nack message in response. The Acceptor's state must be persisted
to disk prior to transmitting the Accepted message.
'''
if self.promised_id is None or msg.proposal_id >= self.promised_id:
self.promised_id = msg.proposal_id
self.accepted_id = msg.proposal_id
self.accepted_value = msg.proposal_value
return Accepted(self.network_uid, msg.proposal_id, msg.proposal_value)
else:
return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id) | python | def receive_accept(self, msg):
'''
Returns either an Accepted or Nack message in response. The Acceptor's state must be persisted
to disk prior to transmitting the Accepted message.
'''
if self.promised_id is None or msg.proposal_id >= self.promised_id:
self.promised_id = msg.proposal_id
self.accepted_id = msg.proposal_id
self.accepted_value = msg.proposal_value
return Accepted(self.network_uid, msg.proposal_id, msg.proposal_value)
else:
return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id) | [
"def",
"receive_accept",
"(",
"self",
",",
"msg",
")",
":",
"if",
"self",
".",
"promised_id",
"is",
"None",
"or",
"msg",
".",
"proposal_id",
">=",
"self",
".",
"promised_id",
":",
"self",
".",
"promised_id",
"=",
"msg",
".",
"proposal_id",
"self",
".",
"accepted_id",
"=",
"msg",
".",
"proposal_id",
"self",
".",
"accepted_value",
"=",
"msg",
".",
"proposal_value",
"return",
"Accepted",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"proposal_id",
",",
"msg",
".",
"proposal_value",
")",
"else",
":",
"return",
"Nack",
"(",
"self",
".",
"network_uid",
",",
"msg",
".",
"from_uid",
",",
"msg",
".",
"proposal_id",
",",
"self",
".",
"promised_id",
")"
] | Returns either an Accepted or Nack message in response. The Acceptor's state must be persisted
to disk prior to transmitting the Accepted message. | [
"Returns",
"either",
"an",
"Accepted",
"or",
"Nack",
"message",
"in",
"response",
".",
"The",
"Acceptor",
"s",
"state",
"must",
"be",
"persisted",
"to",
"disk",
"prior",
"to",
"transmitting",
"the",
"Accepted",
"message",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L321-L332 |
247,683 | johnbywater/eventsourcing | eventsourcing/contrib/paxos/composable.py | Learner.receive_accepted | def receive_accepted(self, msg):
'''
Called when an Accepted message is received from an acceptor. Once the final value
is determined, the return value of this method will be a Resolution message containing
the consentual value. Subsequent calls after the resolution is chosen will continue to add
new Acceptors to the final_acceptors set and return Resolution messages.
'''
if self.final_value is not None:
if msg.proposal_id >= self.final_proposal_id and msg.proposal_value == self.final_value:
self.final_acceptors.add(msg.from_uid)
return Resolution(self.network_uid, self.final_value)
last_pn = self.acceptors.get(msg.from_uid)
if last_pn is not None and msg.proposal_id <= last_pn:
return # Old message
self.acceptors[msg.from_uid] = msg.proposal_id
if last_pn is not None:
# String proposal_key, need string keys for JSON.
proposal_key = str(last_pn)
ps = self.proposals[proposal_key]
ps.retain_count -= 1
ps.acceptors.remove(msg.from_uid)
if ps.retain_count == 0:
del self.proposals[proposal_key]
# String proposal_key, need string keys for JSON.
proposal_key = str(msg.proposal_id)
if not proposal_key in self.proposals:
self.proposals[proposal_key] = ProposalStatus(msg.proposal_value)
ps = self.proposals[proposal_key]
assert msg.proposal_value == ps.value, 'Value mismatch for single proposal!'
ps.accept_count += 1
ps.retain_count += 1
ps.acceptors.add(msg.from_uid)
if ps.accept_count == self.quorum_size:
self.final_proposal_id = msg.proposal_id
self.final_value = msg.proposal_value
self.final_acceptors = ps.acceptors
self.proposals = None
self.acceptors = None
return Resolution(self.network_uid, self.final_value) | python | def receive_accepted(self, msg):
'''
Called when an Accepted message is received from an acceptor. Once the final value
is determined, the return value of this method will be a Resolution message containing
the consentual value. Subsequent calls after the resolution is chosen will continue to add
new Acceptors to the final_acceptors set and return Resolution messages.
'''
if self.final_value is not None:
if msg.proposal_id >= self.final_proposal_id and msg.proposal_value == self.final_value:
self.final_acceptors.add(msg.from_uid)
return Resolution(self.network_uid, self.final_value)
last_pn = self.acceptors.get(msg.from_uid)
if last_pn is not None and msg.proposal_id <= last_pn:
return # Old message
self.acceptors[msg.from_uid] = msg.proposal_id
if last_pn is not None:
# String proposal_key, need string keys for JSON.
proposal_key = str(last_pn)
ps = self.proposals[proposal_key]
ps.retain_count -= 1
ps.acceptors.remove(msg.from_uid)
if ps.retain_count == 0:
del self.proposals[proposal_key]
# String proposal_key, need string keys for JSON.
proposal_key = str(msg.proposal_id)
if not proposal_key in self.proposals:
self.proposals[proposal_key] = ProposalStatus(msg.proposal_value)
ps = self.proposals[proposal_key]
assert msg.proposal_value == ps.value, 'Value mismatch for single proposal!'
ps.accept_count += 1
ps.retain_count += 1
ps.acceptors.add(msg.from_uid)
if ps.accept_count == self.quorum_size:
self.final_proposal_id = msg.proposal_id
self.final_value = msg.proposal_value
self.final_acceptors = ps.acceptors
self.proposals = None
self.acceptors = None
return Resolution(self.network_uid, self.final_value) | [
"def",
"receive_accepted",
"(",
"self",
",",
"msg",
")",
":",
"if",
"self",
".",
"final_value",
"is",
"not",
"None",
":",
"if",
"msg",
".",
"proposal_id",
">=",
"self",
".",
"final_proposal_id",
"and",
"msg",
".",
"proposal_value",
"==",
"self",
".",
"final_value",
":",
"self",
".",
"final_acceptors",
".",
"add",
"(",
"msg",
".",
"from_uid",
")",
"return",
"Resolution",
"(",
"self",
".",
"network_uid",
",",
"self",
".",
"final_value",
")",
"last_pn",
"=",
"self",
".",
"acceptors",
".",
"get",
"(",
"msg",
".",
"from_uid",
")",
"if",
"last_pn",
"is",
"not",
"None",
"and",
"msg",
".",
"proposal_id",
"<=",
"last_pn",
":",
"return",
"# Old message",
"self",
".",
"acceptors",
"[",
"msg",
".",
"from_uid",
"]",
"=",
"msg",
".",
"proposal_id",
"if",
"last_pn",
"is",
"not",
"None",
":",
"# String proposal_key, need string keys for JSON.",
"proposal_key",
"=",
"str",
"(",
"last_pn",
")",
"ps",
"=",
"self",
".",
"proposals",
"[",
"proposal_key",
"]",
"ps",
".",
"retain_count",
"-=",
"1",
"ps",
".",
"acceptors",
".",
"remove",
"(",
"msg",
".",
"from_uid",
")",
"if",
"ps",
".",
"retain_count",
"==",
"0",
":",
"del",
"self",
".",
"proposals",
"[",
"proposal_key",
"]",
"# String proposal_key, need string keys for JSON.",
"proposal_key",
"=",
"str",
"(",
"msg",
".",
"proposal_id",
")",
"if",
"not",
"proposal_key",
"in",
"self",
".",
"proposals",
":",
"self",
".",
"proposals",
"[",
"proposal_key",
"]",
"=",
"ProposalStatus",
"(",
"msg",
".",
"proposal_value",
")",
"ps",
"=",
"self",
".",
"proposals",
"[",
"proposal_key",
"]",
"assert",
"msg",
".",
"proposal_value",
"==",
"ps",
".",
"value",
",",
"'Value mismatch for single proposal!'",
"ps",
".",
"accept_count",
"+=",
"1",
"ps",
".",
"retain_count",
"+=",
"1",
"ps",
".",
"acceptors",
".",
"add",
"(",
"msg",
".",
"from_uid",
")",
"if",
"ps",
".",
"accept_count",
"==",
"self",
".",
"quorum_size",
":",
"self",
".",
"final_proposal_id",
"=",
"msg",
".",
"proposal_id",
"self",
".",
"final_value",
"=",
"msg",
".",
"proposal_value",
"self",
".",
"final_acceptors",
"=",
"ps",
".",
"acceptors",
"self",
".",
"proposals",
"=",
"None",
"self",
".",
"acceptors",
"=",
"None",
"return",
"Resolution",
"(",
"self",
".",
"network_uid",
",",
"self",
".",
"final_value",
")"
] | Called when an Accepted message is received from an acceptor. Once the final value
is determined, the return value of this method will be a Resolution message containing
the consentual value. Subsequent calls after the resolution is chosen will continue to add
new Acceptors to the final_acceptors set and return Resolution messages. | [
"Called",
"when",
"an",
"Accepted",
"message",
"is",
"received",
"from",
"an",
"acceptor",
".",
"Once",
"the",
"final",
"value",
"is",
"determined",
"the",
"return",
"value",
"of",
"this",
"method",
"will",
"be",
"a",
"Resolution",
"message",
"containing",
"the",
"consentual",
"value",
".",
"Subsequent",
"calls",
"after",
"the",
"resolution",
"is",
"chosen",
"will",
"continue",
"to",
"add",
"new",
"Acceptors",
"to",
"the",
"final_acceptors",
"set",
"and",
"return",
"Resolution",
"messages",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/contrib/paxos/composable.py#L360-L408 |
247,684 | johnbywater/eventsourcing | eventsourcing/utils/topic.py | resolve_topic | def resolve_topic(topic):
"""Return class described by given topic.
Args:
topic: A string describing a class.
Returns:
A class.
Raises:
TopicResolutionError: If there is no such class.
"""
try:
module_name, _, class_name = topic.partition('#')
module = importlib.import_module(module_name)
except ImportError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
try:
cls = resolve_attr(module, class_name)
except AttributeError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
return cls | python | def resolve_topic(topic):
try:
module_name, _, class_name = topic.partition('#')
module = importlib.import_module(module_name)
except ImportError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
try:
cls = resolve_attr(module, class_name)
except AttributeError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
return cls | [
"def",
"resolve_topic",
"(",
"topic",
")",
":",
"try",
":",
"module_name",
",",
"_",
",",
"class_name",
"=",
"topic",
".",
"partition",
"(",
"'#'",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"except",
"ImportError",
"as",
"e",
":",
"raise",
"TopicResolutionError",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"topic",
",",
"e",
")",
")",
"try",
":",
"cls",
"=",
"resolve_attr",
"(",
"module",
",",
"class_name",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"TopicResolutionError",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"topic",
",",
"e",
")",
")",
"return",
"cls"
] | Return class described by given topic.
Args:
topic: A string describing a class.
Returns:
A class.
Raises:
TopicResolutionError: If there is no such class. | [
"Return",
"class",
"described",
"by",
"given",
"topic",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/utils/topic.py#L18-L39 |
247,685 | johnbywater/eventsourcing | eventsourcing/utils/topic.py | resolve_attr | def resolve_attr(obj, path):
"""A recursive version of getattr for navigating dotted paths.
Args:
obj: An object for which we want to retrieve a nested attribute.
path: A dot separated string containing zero or more attribute names.
Returns:
The attribute referred to by obj.a1.a2.a3...
Raises:
AttributeError: If there is no such attribute.
"""
if not path:
return obj
head, _, tail = path.partition('.')
head_obj = getattr(obj, head)
return resolve_attr(head_obj, tail) | python | def resolve_attr(obj, path):
if not path:
return obj
head, _, tail = path.partition('.')
head_obj = getattr(obj, head)
return resolve_attr(head_obj, tail) | [
"def",
"resolve_attr",
"(",
"obj",
",",
"path",
")",
":",
"if",
"not",
"path",
":",
"return",
"obj",
"head",
",",
"_",
",",
"tail",
"=",
"path",
".",
"partition",
"(",
"'.'",
")",
"head_obj",
"=",
"getattr",
"(",
"obj",
",",
"head",
")",
"return",
"resolve_attr",
"(",
"head_obj",
",",
"tail",
")"
] | A recursive version of getattr for navigating dotted paths.
Args:
obj: An object for which we want to retrieve a nested attribute.
path: A dot separated string containing zero or more attribute names.
Returns:
The attribute referred to by obj.a1.a2.a3...
Raises:
AttributeError: If there is no such attribute. | [
"A",
"recursive",
"version",
"of",
"getattr",
"for",
"navigating",
"dotted",
"paths",
"."
] | de2c22c653fdccf2f5ee96faea74453ff1847e42 | https://github.com/johnbywater/eventsourcing/blob/de2c22c653fdccf2f5ee96faea74453ff1847e42/eventsourcing/utils/topic.py#L42-L59 |
247,686 | openeventdata/mordecai | mordecai/utilities.py | make_skip_list | def make_skip_list(cts):
"""
Return hand-defined list of place names to skip and not attempt to geolocate. If users would like to exclude
country names, this would be the function to do it with.
"""
# maybe make these non-country searches but don't discard, at least for
# some (esp. bodies of water)
special_terms = ["Europe", "West", "the West", "South Pacific", "Gulf of Mexico", "Atlantic",
"the Black Sea", "Black Sea", "North America", "Mideast", "Middle East",
"the Middle East", "Asia", "the Caucasus", "Africa",
"Central Asia", "Balkans", "Eastern Europe", "Arctic", "Ottoman Empire",
"Asia-Pacific", "East Asia", "Horn of Africa", "Americas",
"North Africa", "the Strait of Hormuz", "Mediterranean", "East", "North",
"South", "Latin America", "Southeast Asia", "Western Pacific", "South Asia",
"Persian Gulf", "Central Europe", "Western Hemisphere", "Western Europe",
"European Union (E.U.)", "EU", "European Union", "E.U.", "Asia-Pacific",
"Europe", "Caribbean", "US", "U.S.", "Persian Gulf", "West Africa", "North", "East",
"South", "West", "Western Countries"
]
# Some words are recurring spacy problems...
spacy_problems = ["Kurd", "Qur'an"]
#skip_list = list(cts.keys()) + special_terms
skip_list = special_terms + spacy_problems
skip_list = set(skip_list)
return skip_list | python | def make_skip_list(cts):
# maybe make these non-country searches but don't discard, at least for
# some (esp. bodies of water)
special_terms = ["Europe", "West", "the West", "South Pacific", "Gulf of Mexico", "Atlantic",
"the Black Sea", "Black Sea", "North America", "Mideast", "Middle East",
"the Middle East", "Asia", "the Caucasus", "Africa",
"Central Asia", "Balkans", "Eastern Europe", "Arctic", "Ottoman Empire",
"Asia-Pacific", "East Asia", "Horn of Africa", "Americas",
"North Africa", "the Strait of Hormuz", "Mediterranean", "East", "North",
"South", "Latin America", "Southeast Asia", "Western Pacific", "South Asia",
"Persian Gulf", "Central Europe", "Western Hemisphere", "Western Europe",
"European Union (E.U.)", "EU", "European Union", "E.U.", "Asia-Pacific",
"Europe", "Caribbean", "US", "U.S.", "Persian Gulf", "West Africa", "North", "East",
"South", "West", "Western Countries"
]
# Some words are recurring spacy problems...
spacy_problems = ["Kurd", "Qur'an"]
#skip_list = list(cts.keys()) + special_terms
skip_list = special_terms + spacy_problems
skip_list = set(skip_list)
return skip_list | [
"def",
"make_skip_list",
"(",
"cts",
")",
":",
"# maybe make these non-country searches but don't discard, at least for",
"# some (esp. bodies of water)",
"special_terms",
"=",
"[",
"\"Europe\"",
",",
"\"West\"",
",",
"\"the West\"",
",",
"\"South Pacific\"",
",",
"\"Gulf of Mexico\"",
",",
"\"Atlantic\"",
",",
"\"the Black Sea\"",
",",
"\"Black Sea\"",
",",
"\"North America\"",
",",
"\"Mideast\"",
",",
"\"Middle East\"",
",",
"\"the Middle East\"",
",",
"\"Asia\"",
",",
"\"the Caucasus\"",
",",
"\"Africa\"",
",",
"\"Central Asia\"",
",",
"\"Balkans\"",
",",
"\"Eastern Europe\"",
",",
"\"Arctic\"",
",",
"\"Ottoman Empire\"",
",",
"\"Asia-Pacific\"",
",",
"\"East Asia\"",
",",
"\"Horn of Africa\"",
",",
"\"Americas\"",
",",
"\"North Africa\"",
",",
"\"the Strait of Hormuz\"",
",",
"\"Mediterranean\"",
",",
"\"East\"",
",",
"\"North\"",
",",
"\"South\"",
",",
"\"Latin America\"",
",",
"\"Southeast Asia\"",
",",
"\"Western Pacific\"",
",",
"\"South Asia\"",
",",
"\"Persian Gulf\"",
",",
"\"Central Europe\"",
",",
"\"Western Hemisphere\"",
",",
"\"Western Europe\"",
",",
"\"European Union (E.U.)\"",
",",
"\"EU\"",
",",
"\"European Union\"",
",",
"\"E.U.\"",
",",
"\"Asia-Pacific\"",
",",
"\"Europe\"",
",",
"\"Caribbean\"",
",",
"\"US\"",
",",
"\"U.S.\"",
",",
"\"Persian Gulf\"",
",",
"\"West Africa\"",
",",
"\"North\"",
",",
"\"East\"",
",",
"\"South\"",
",",
"\"West\"",
",",
"\"Western Countries\"",
"]",
"# Some words are recurring spacy problems...",
"spacy_problems",
"=",
"[",
"\"Kurd\"",
",",
"\"Qur'an\"",
"]",
"#skip_list = list(cts.keys()) + special_terms",
"skip_list",
"=",
"special_terms",
"+",
"spacy_problems",
"skip_list",
"=",
"set",
"(",
"skip_list",
")",
"return",
"skip_list"
] | Return hand-defined list of place names to skip and not attempt to geolocate. If users would like to exclude
country names, this would be the function to do it with. | [
"Return",
"hand",
"-",
"defined",
"list",
"of",
"place",
"names",
"to",
"skip",
"and",
"not",
"attempt",
"to",
"geolocate",
".",
"If",
"users",
"would",
"like",
"to",
"exclude",
"country",
"names",
"this",
"would",
"be",
"the",
"function",
"to",
"do",
"it",
"with",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/utilities.py#L138-L164 |
247,687 | openeventdata/mordecai | mordecai/utilities.py | country_list_nlp | def country_list_nlp(cts):
"""NLP countries so we can use for vector comparisons"""
ct_nlp = []
for i in cts.keys():
nlped = nlp(i)
ct_nlp.append(nlped)
return ct_nlp | python | def country_list_nlp(cts):
ct_nlp = []
for i in cts.keys():
nlped = nlp(i)
ct_nlp.append(nlped)
return ct_nlp | [
"def",
"country_list_nlp",
"(",
"cts",
")",
":",
"ct_nlp",
"=",
"[",
"]",
"for",
"i",
"in",
"cts",
".",
"keys",
"(",
")",
":",
"nlped",
"=",
"nlp",
"(",
"i",
")",
"ct_nlp",
".",
"append",
"(",
"nlped",
")",
"return",
"ct_nlp"
] | NLP countries so we can use for vector comparisons | [
"NLP",
"countries",
"so",
"we",
"can",
"use",
"for",
"vector",
"comparisons"
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/utilities.py#L167-L173 |
247,688 | openeventdata/mordecai | mordecai/utilities.py | make_country_nationality_list | def make_country_nationality_list(cts, ct_file):
"""Combine list of countries and list of nationalities"""
countries = pd.read_csv(ct_file)
nationality = dict(zip(countries.nationality,countries.alpha_3_code))
both_codes = {**nationality, **cts}
return both_codes | python | def make_country_nationality_list(cts, ct_file):
countries = pd.read_csv(ct_file)
nationality = dict(zip(countries.nationality,countries.alpha_3_code))
both_codes = {**nationality, **cts}
return both_codes | [
"def",
"make_country_nationality_list",
"(",
"cts",
",",
"ct_file",
")",
":",
"countries",
"=",
"pd",
".",
"read_csv",
"(",
"ct_file",
")",
"nationality",
"=",
"dict",
"(",
"zip",
"(",
"countries",
".",
"nationality",
",",
"countries",
".",
"alpha_3_code",
")",
")",
"both_codes",
"=",
"{",
"*",
"*",
"nationality",
",",
"*",
"*",
"cts",
"}",
"return",
"both_codes"
] | Combine list of countries and list of nationalities | [
"Combine",
"list",
"of",
"countries",
"and",
"list",
"of",
"nationalities"
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/utilities.py#L176-L181 |
247,689 | openeventdata/mordecai | mordecai/utilities.py | structure_results | def structure_results(res):
"""Format Elasticsearch result as Python dictionary"""
out = {'hits': {'hits': []}}
keys = [u'admin1_code', u'admin2_code', u'admin3_code', u'admin4_code',
u'alternativenames', u'asciiname', u'cc2', u'coordinates',
u'country_code2', u'country_code3', u'dem', u'elevation',
u'feature_class', u'feature_code', u'geonameid',
u'modification_date', u'name', u'population', u'timezone']
for i in res:
i_out = {}
for k in keys:
i_out[k] = i[k]
out['hits']['hits'].append(i_out)
return out | python | def structure_results(res):
out = {'hits': {'hits': []}}
keys = [u'admin1_code', u'admin2_code', u'admin3_code', u'admin4_code',
u'alternativenames', u'asciiname', u'cc2', u'coordinates',
u'country_code2', u'country_code3', u'dem', u'elevation',
u'feature_class', u'feature_code', u'geonameid',
u'modification_date', u'name', u'population', u'timezone']
for i in res:
i_out = {}
for k in keys:
i_out[k] = i[k]
out['hits']['hits'].append(i_out)
return out | [
"def",
"structure_results",
"(",
"res",
")",
":",
"out",
"=",
"{",
"'hits'",
":",
"{",
"'hits'",
":",
"[",
"]",
"}",
"}",
"keys",
"=",
"[",
"u'admin1_code'",
",",
"u'admin2_code'",
",",
"u'admin3_code'",
",",
"u'admin4_code'",
",",
"u'alternativenames'",
",",
"u'asciiname'",
",",
"u'cc2'",
",",
"u'coordinates'",
",",
"u'country_code2'",
",",
"u'country_code3'",
",",
"u'dem'",
",",
"u'elevation'",
",",
"u'feature_class'",
",",
"u'feature_code'",
",",
"u'geonameid'",
",",
"u'modification_date'",
",",
"u'name'",
",",
"u'population'",
",",
"u'timezone'",
"]",
"for",
"i",
"in",
"res",
":",
"i_out",
"=",
"{",
"}",
"for",
"k",
"in",
"keys",
":",
"i_out",
"[",
"k",
"]",
"=",
"i",
"[",
"k",
"]",
"out",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
".",
"append",
"(",
"i_out",
")",
"return",
"out"
] | Format Elasticsearch result as Python dictionary | [
"Format",
"Elasticsearch",
"result",
"as",
"Python",
"dictionary"
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/utilities.py#L218-L231 |
247,690 | openeventdata/mordecai | mordecai/utilities.py | setup_es | def setup_es(hosts, port, use_ssl=False, auth=None):
"""
Setup an Elasticsearch connection
Parameters
----------
hosts: list
Hostnames / IP addresses for elasticsearch cluster
port: string
Port for elasticsearch cluster
use_ssl: boolean
Whether to use SSL for the elasticsearch connection
auth: tuple
(username, password) to use with HTTP auth
Returns
-------
es_conn: an elasticsearch_dsl Search connection object.
"""
kwargs = dict(
hosts=hosts or ['localhost'],
port=port or 9200,
use_ssl=use_ssl,
)
if auth:
kwargs.update(http_auth=auth)
CLIENT = Elasticsearch(**kwargs)
S = Search(using=CLIENT, index="geonames")
return S | python | def setup_es(hosts, port, use_ssl=False, auth=None):
kwargs = dict(
hosts=hosts or ['localhost'],
port=port or 9200,
use_ssl=use_ssl,
)
if auth:
kwargs.update(http_auth=auth)
CLIENT = Elasticsearch(**kwargs)
S = Search(using=CLIENT, index="geonames")
return S | [
"def",
"setup_es",
"(",
"hosts",
",",
"port",
",",
"use_ssl",
"=",
"False",
",",
"auth",
"=",
"None",
")",
":",
"kwargs",
"=",
"dict",
"(",
"hosts",
"=",
"hosts",
"or",
"[",
"'localhost'",
"]",
",",
"port",
"=",
"port",
"or",
"9200",
",",
"use_ssl",
"=",
"use_ssl",
",",
")",
"if",
"auth",
":",
"kwargs",
".",
"update",
"(",
"http_auth",
"=",
"auth",
")",
"CLIENT",
"=",
"Elasticsearch",
"(",
"*",
"*",
"kwargs",
")",
"S",
"=",
"Search",
"(",
"using",
"=",
"CLIENT",
",",
"index",
"=",
"\"geonames\"",
")",
"return",
"S"
] | Setup an Elasticsearch connection
Parameters
----------
hosts: list
Hostnames / IP addresses for elasticsearch cluster
port: string
Port for elasticsearch cluster
use_ssl: boolean
Whether to use SSL for the elasticsearch connection
auth: tuple
(username, password) to use with HTTP auth
Returns
-------
es_conn: an elasticsearch_dsl Search connection object. | [
"Setup",
"an",
"Elasticsearch",
"connection"
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/utilities.py#L233-L261 |
247,691 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser._feature_country_mentions | def _feature_country_mentions(self, doc):
"""
Given a document, count how many times different country names and adjectives are mentioned.
These are features used in the country picking phase.
Parameters
---------
doc: a spaCy nlp'ed piece of text
Returns
-------
countries: dict
the top two countries (ISO code) and their frequency of mentions.
"""
c_list = []
for i in doc.ents:
try:
country = self._both_codes[i.text]
c_list.append(country)
except KeyError:
pass
count = Counter(c_list).most_common()
try:
top, top_count = count[0]
except:
top = ""
top_count = 0
try:
two, two_count = count[1]
except:
two = ""
two_count = 0
countries = (top, top_count, two, two_count)
return countries | python | def _feature_country_mentions(self, doc):
c_list = []
for i in doc.ents:
try:
country = self._both_codes[i.text]
c_list.append(country)
except KeyError:
pass
count = Counter(c_list).most_common()
try:
top, top_count = count[0]
except:
top = ""
top_count = 0
try:
two, two_count = count[1]
except:
two = ""
two_count = 0
countries = (top, top_count, two, two_count)
return countries | [
"def",
"_feature_country_mentions",
"(",
"self",
",",
"doc",
")",
":",
"c_list",
"=",
"[",
"]",
"for",
"i",
"in",
"doc",
".",
"ents",
":",
"try",
":",
"country",
"=",
"self",
".",
"_both_codes",
"[",
"i",
".",
"text",
"]",
"c_list",
".",
"append",
"(",
"country",
")",
"except",
"KeyError",
":",
"pass",
"count",
"=",
"Counter",
"(",
"c_list",
")",
".",
"most_common",
"(",
")",
"try",
":",
"top",
",",
"top_count",
"=",
"count",
"[",
"0",
"]",
"except",
":",
"top",
"=",
"\"\"",
"top_count",
"=",
"0",
"try",
":",
"two",
",",
"two_count",
"=",
"count",
"[",
"1",
"]",
"except",
":",
"two",
"=",
"\"\"",
"two_count",
"=",
"0",
"countries",
"=",
"(",
"top",
",",
"top_count",
",",
"two",
",",
"two_count",
")",
"return",
"countries"
] | Given a document, count how many times different country names and adjectives are mentioned.
These are features used in the country picking phase.
Parameters
---------
doc: a spaCy nlp'ed piece of text
Returns
-------
countries: dict
the top two countries (ISO code) and their frequency of mentions. | [
"Given",
"a",
"document",
"count",
"how",
"many",
"times",
"different",
"country",
"names",
"and",
"adjectives",
"are",
"mentioned",
".",
"These",
"are",
"features",
"used",
"in",
"the",
"country",
"picking",
"phase",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L78-L112 |
247,692 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser.clean_entity | def clean_entity(self, ent):
"""
Strip out extra words that often get picked up by spaCy's NER.
To do: preserve info about what got stripped out to help with ES/Geonames
resolution later.
Parameters
---------
ent: a spaCy named entity Span
Returns
-------
new_ent: a spaCy Span, with extra words stripped out.
"""
dump_list = ['province', 'the', 'area', 'airport', 'district', 'square',
'town', 'village', 'prison', "river", "valley", "provincial", "prison",
"region", "municipality", "state", "territory", "of", "in",
"county", "central"]
# maybe have 'city'? Works differently in different countries
# also, "District of Columbia". Might need to use cap/no cap
keep_positions = []
for word in ent:
if word.text.lower() not in dump_list:
keep_positions.append(word.i)
keep_positions = np.asarray(keep_positions)
try:
new_ent = ent.doc[keep_positions.min():keep_positions.max() + 1]
# can't set directly
#new_ent.label_.__set__(ent.label_)
except ValueError:
new_ent = ent
return new_ent | python | def clean_entity(self, ent):
dump_list = ['province', 'the', 'area', 'airport', 'district', 'square',
'town', 'village', 'prison', "river", "valley", "provincial", "prison",
"region", "municipality", "state", "territory", "of", "in",
"county", "central"]
# maybe have 'city'? Works differently in different countries
# also, "District of Columbia". Might need to use cap/no cap
keep_positions = []
for word in ent:
if word.text.lower() not in dump_list:
keep_positions.append(word.i)
keep_positions = np.asarray(keep_positions)
try:
new_ent = ent.doc[keep_positions.min():keep_positions.max() + 1]
# can't set directly
#new_ent.label_.__set__(ent.label_)
except ValueError:
new_ent = ent
return new_ent | [
"def",
"clean_entity",
"(",
"self",
",",
"ent",
")",
":",
"dump_list",
"=",
"[",
"'province'",
",",
"'the'",
",",
"'area'",
",",
"'airport'",
",",
"'district'",
",",
"'square'",
",",
"'town'",
",",
"'village'",
",",
"'prison'",
",",
"\"river\"",
",",
"\"valley\"",
",",
"\"provincial\"",
",",
"\"prison\"",
",",
"\"region\"",
",",
"\"municipality\"",
",",
"\"state\"",
",",
"\"territory\"",
",",
"\"of\"",
",",
"\"in\"",
",",
"\"county\"",
",",
"\"central\"",
"]",
"# maybe have 'city'? Works differently in different countries",
"# also, \"District of Columbia\". Might need to use cap/no cap",
"keep_positions",
"=",
"[",
"]",
"for",
"word",
"in",
"ent",
":",
"if",
"word",
".",
"text",
".",
"lower",
"(",
")",
"not",
"in",
"dump_list",
":",
"keep_positions",
".",
"append",
"(",
"word",
".",
"i",
")",
"keep_positions",
"=",
"np",
".",
"asarray",
"(",
"keep_positions",
")",
"try",
":",
"new_ent",
"=",
"ent",
".",
"doc",
"[",
"keep_positions",
".",
"min",
"(",
")",
":",
"keep_positions",
".",
"max",
"(",
")",
"+",
"1",
"]",
"# can't set directly",
"#new_ent.label_.__set__(ent.label_)",
"except",
"ValueError",
":",
"new_ent",
"=",
"ent",
"return",
"new_ent"
] | Strip out extra words that often get picked up by spaCy's NER.
To do: preserve info about what got stripped out to help with ES/Geonames
resolution later.
Parameters
---------
ent: a spaCy named entity Span
Returns
-------
new_ent: a spaCy Span, with extra words stripped out. | [
"Strip",
"out",
"extra",
"words",
"that",
"often",
"get",
"picked",
"up",
"by",
"spaCy",
"s",
"NER",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L115-L149 |
247,693 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser._feature_most_alternative | def _feature_most_alternative(self, results, full_results=False):
"""
Find the placename with the most alternative names and return its country.
More alternative names are a rough measure of importance.
Paramaters
----------
results: dict
output of `query_geonames`
Returns
-------
most_alt: str
ISO code of country of place with most alternative names,
or empty string if none
"""
try:
alt_names = [len(i['alternativenames']) for i in results['hits']['hits']]
most_alt = results['hits']['hits'][np.array(alt_names).argmax()]
if full_results:
return most_alt
else:
return most_alt['country_code3']
except (IndexError, ValueError, TypeError):
return "" | python | def _feature_most_alternative(self, results, full_results=False):
try:
alt_names = [len(i['alternativenames']) for i in results['hits']['hits']]
most_alt = results['hits']['hits'][np.array(alt_names).argmax()]
if full_results:
return most_alt
else:
return most_alt['country_code3']
except (IndexError, ValueError, TypeError):
return "" | [
"def",
"_feature_most_alternative",
"(",
"self",
",",
"results",
",",
"full_results",
"=",
"False",
")",
":",
"try",
":",
"alt_names",
"=",
"[",
"len",
"(",
"i",
"[",
"'alternativenames'",
"]",
")",
"for",
"i",
"in",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"]",
"most_alt",
"=",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"[",
"np",
".",
"array",
"(",
"alt_names",
")",
".",
"argmax",
"(",
")",
"]",
"if",
"full_results",
":",
"return",
"most_alt",
"else",
":",
"return",
"most_alt",
"[",
"'country_code3'",
"]",
"except",
"(",
"IndexError",
",",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"\""
] | Find the placename with the most alternative names and return its country.
More alternative names are a rough measure of importance.
Paramaters
----------
results: dict
output of `query_geonames`
Returns
-------
most_alt: str
ISO code of country of place with most alternative names,
or empty string if none | [
"Find",
"the",
"placename",
"with",
"the",
"most",
"alternative",
"names",
"and",
"return",
"its",
"country",
".",
"More",
"alternative",
"names",
"are",
"a",
"rough",
"measure",
"of",
"importance",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L176-L200 |
247,694 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser._feature_most_population | def _feature_most_population(self, results):
"""
Find the placename with the largest population and return its country.
More population is a rough measure of importance.
Paramaters
----------
results: dict
output of `query_geonames`
Returns
-------
most_pop: str
ISO code of country of place with largest population,
or empty string if none
"""
try:
populations = [i['population'] for i in results['hits']['hits']]
most_pop = results['hits']['hits'][np.array(populations).astype("int").argmax()]
return most_pop['country_code3']
except Exception as e:
return "" | python | def _feature_most_population(self, results):
try:
populations = [i['population'] for i in results['hits']['hits']]
most_pop = results['hits']['hits'][np.array(populations).astype("int").argmax()]
return most_pop['country_code3']
except Exception as e:
return "" | [
"def",
"_feature_most_population",
"(",
"self",
",",
"results",
")",
":",
"try",
":",
"populations",
"=",
"[",
"i",
"[",
"'population'",
"]",
"for",
"i",
"in",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"]",
"most_pop",
"=",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"[",
"np",
".",
"array",
"(",
"populations",
")",
".",
"astype",
"(",
"\"int\"",
")",
".",
"argmax",
"(",
")",
"]",
"return",
"most_pop",
"[",
"'country_code3'",
"]",
"except",
"Exception",
"as",
"e",
":",
"return",
"\"\""
] | Find the placename with the largest population and return its country.
More population is a rough measure of importance.
Paramaters
----------
results: dict
output of `query_geonames`
Returns
-------
most_pop: str
ISO code of country of place with largest population,
or empty string if none | [
"Find",
"the",
"placename",
"with",
"the",
"largest",
"population",
"and",
"return",
"its",
"country",
".",
"More",
"population",
"is",
"a",
"rough",
"measure",
"of",
"importance",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L203-L225 |
247,695 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser._feature_word_embedding | def _feature_word_embedding(self, text):
"""
Given a word, guess the appropriate country by word vector.
Parameters
---------
text: str
the text to extract locations from.
Returns
-------
country_picking: dict
The top two countries (ISO codes) and two measures
confidence for the first choice.
"""
try:
simils = np.dot(self._prebuilt_vec, text.vector)
except Exception as e:
#print("Vector problem, ", Exception, e)
return {"country_1" : "",
"confid_a" : 0,
"confid_b" : 0,
"country_2" : ""}
ranks = simils.argsort()[::-1]
confid = simils.max()
confid2 = simils[ranks[0]] - simils[ranks[1]]
if confid == 0 or confid2 == 0:
return ""
country_code = self._cts[str(self._ct_nlp[ranks[0]])]
country_picking = {"country_1" : country_code,
"confid_a" : confid,
"confid_b" : confid2,
"country_2" : self._cts[str(self._ct_nlp[ranks[1]])]}
return country_picking | python | def _feature_word_embedding(self, text):
try:
simils = np.dot(self._prebuilt_vec, text.vector)
except Exception as e:
#print("Vector problem, ", Exception, e)
return {"country_1" : "",
"confid_a" : 0,
"confid_b" : 0,
"country_2" : ""}
ranks = simils.argsort()[::-1]
confid = simils.max()
confid2 = simils[ranks[0]] - simils[ranks[1]]
if confid == 0 or confid2 == 0:
return ""
country_code = self._cts[str(self._ct_nlp[ranks[0]])]
country_picking = {"country_1" : country_code,
"confid_a" : confid,
"confid_b" : confid2,
"country_2" : self._cts[str(self._ct_nlp[ranks[1]])]}
return country_picking | [
"def",
"_feature_word_embedding",
"(",
"self",
",",
"text",
")",
":",
"try",
":",
"simils",
"=",
"np",
".",
"dot",
"(",
"self",
".",
"_prebuilt_vec",
",",
"text",
".",
"vector",
")",
"except",
"Exception",
"as",
"e",
":",
"#print(\"Vector problem, \", Exception, e)",
"return",
"{",
"\"country_1\"",
":",
"\"\"",
",",
"\"confid_a\"",
":",
"0",
",",
"\"confid_b\"",
":",
"0",
",",
"\"country_2\"",
":",
"\"\"",
"}",
"ranks",
"=",
"simils",
".",
"argsort",
"(",
")",
"[",
":",
":",
"-",
"1",
"]",
"confid",
"=",
"simils",
".",
"max",
"(",
")",
"confid2",
"=",
"simils",
"[",
"ranks",
"[",
"0",
"]",
"]",
"-",
"simils",
"[",
"ranks",
"[",
"1",
"]",
"]",
"if",
"confid",
"==",
"0",
"or",
"confid2",
"==",
"0",
":",
"return",
"\"\"",
"country_code",
"=",
"self",
".",
"_cts",
"[",
"str",
"(",
"self",
".",
"_ct_nlp",
"[",
"ranks",
"[",
"0",
"]",
"]",
")",
"]",
"country_picking",
"=",
"{",
"\"country_1\"",
":",
"country_code",
",",
"\"confid_a\"",
":",
"confid",
",",
"\"confid_b\"",
":",
"confid2",
",",
"\"country_2\"",
":",
"self",
".",
"_cts",
"[",
"str",
"(",
"self",
".",
"_ct_nlp",
"[",
"ranks",
"[",
"1",
"]",
"]",
")",
"]",
"}",
"return",
"country_picking"
] | Given a word, guess the appropriate country by word vector.
Parameters
---------
text: str
the text to extract locations from.
Returns
-------
country_picking: dict
The top two countries (ISO codes) and two measures
confidence for the first choice. | [
"Given",
"a",
"word",
"guess",
"the",
"appropriate",
"country",
"by",
"word",
"vector",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L228-L261 |
247,696 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser._feature_first_back | def _feature_first_back(self, results):
"""
Get the country of the first two results back from geonames.
Parameters
-----------
results: dict
elasticsearch results
Returns
-------
top: tuple
first and second results' country name (ISO)
"""
try:
first_back = results['hits']['hits'][0]['country_code3']
except (TypeError, IndexError):
# usually occurs if no Geonames result
first_back = ""
try:
second_back = results['hits']['hits'][1]['country_code3']
except (TypeError, IndexError):
second_back = ""
top = (first_back, second_back)
return top | python | def _feature_first_back(self, results):
try:
first_back = results['hits']['hits'][0]['country_code3']
except (TypeError, IndexError):
# usually occurs if no Geonames result
first_back = ""
try:
second_back = results['hits']['hits'][1]['country_code3']
except (TypeError, IndexError):
second_back = ""
top = (first_back, second_back)
return top | [
"def",
"_feature_first_back",
"(",
"self",
",",
"results",
")",
":",
"try",
":",
"first_back",
"=",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"[",
"0",
"]",
"[",
"'country_code3'",
"]",
"except",
"(",
"TypeError",
",",
"IndexError",
")",
":",
"# usually occurs if no Geonames result",
"first_back",
"=",
"\"\"",
"try",
":",
"second_back",
"=",
"results",
"[",
"'hits'",
"]",
"[",
"'hits'",
"]",
"[",
"1",
"]",
"[",
"'country_code3'",
"]",
"except",
"(",
"TypeError",
",",
"IndexError",
")",
":",
"second_back",
"=",
"\"\"",
"top",
"=",
"(",
"first_back",
",",
"second_back",
")",
"return",
"top"
] | Get the country of the first two results back from geonames.
Parameters
-----------
results: dict
elasticsearch results
Returns
-------
top: tuple
first and second results' country name (ISO) | [
"Get",
"the",
"country",
"of",
"the",
"first",
"two",
"results",
"back",
"from",
"geonames",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L264-L288 |
247,697 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser.is_country | def is_country(self, text):
"""Check if a piece of text is in the list of countries"""
ct_list = self._just_cts.keys()
if text in ct_list:
return True
else:
return False | python | def is_country(self, text):
ct_list = self._just_cts.keys()
if text in ct_list:
return True
else:
return False | [
"def",
"is_country",
"(",
"self",
",",
"text",
")",
":",
"ct_list",
"=",
"self",
".",
"_just_cts",
".",
"keys",
"(",
")",
"if",
"text",
"in",
"ct_list",
":",
"return",
"True",
"else",
":",
"return",
"False"
] | Check if a piece of text is in the list of countries | [
"Check",
"if",
"a",
"piece",
"of",
"text",
"is",
"in",
"the",
"list",
"of",
"countries"
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L291-L297 |
247,698 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser.query_geonames | def query_geonames(self, placename):
"""
Wrap search parameters into an elasticsearch query to the geonames index
and return results.
Parameters
---------
conn: an elasticsearch Search conn, like the one returned by `setup_es()`
placename: str
the placename text extracted by NER system
Returns
-------
out: The raw results of the elasticsearch query
"""
# first first, try for country name
if self.is_country(placename):
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"type" : "phrase"}}
res = self.conn.filter("term", feature_code='PCLI').query(q)[0:5].execute() # always 5
else:
# second, try for an exact phrase match
q = {"multi_match": {"query": placename,
"fields": ['name^5', 'asciiname^5', 'alternativenames'],
"type" : "phrase"}}
res = self.conn.query(q)[0:50].execute()
# if no results, use some fuzziness, but still require all terms to be present.
# Fuzzy is not allowed in "phrase" searches.
if res.hits.total == 0:
# tried wrapping this in a {"constant_score" : {"query": ... but made it worse
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"fuzziness" : 1,
"operator": "and"
}
}
res = self.conn.query(q)[0:50].execute()
es_result = utilities.structure_results(res)
return es_result | python | def query_geonames(self, placename):
# first first, try for country name
if self.is_country(placename):
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"type" : "phrase"}}
res = self.conn.filter("term", feature_code='PCLI').query(q)[0:5].execute() # always 5
else:
# second, try for an exact phrase match
q = {"multi_match": {"query": placename,
"fields": ['name^5', 'asciiname^5', 'alternativenames'],
"type" : "phrase"}}
res = self.conn.query(q)[0:50].execute()
# if no results, use some fuzziness, but still require all terms to be present.
# Fuzzy is not allowed in "phrase" searches.
if res.hits.total == 0:
# tried wrapping this in a {"constant_score" : {"query": ... but made it worse
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"fuzziness" : 1,
"operator": "and"
}
}
res = self.conn.query(q)[0:50].execute()
es_result = utilities.structure_results(res)
return es_result | [
"def",
"query_geonames",
"(",
"self",
",",
"placename",
")",
":",
"# first first, try for country name",
"if",
"self",
".",
"is_country",
"(",
"placename",
")",
":",
"q",
"=",
"{",
"\"multi_match\"",
":",
"{",
"\"query\"",
":",
"placename",
",",
"\"fields\"",
":",
"[",
"'name'",
",",
"'asciiname'",
",",
"'alternativenames'",
"]",
",",
"\"type\"",
":",
"\"phrase\"",
"}",
"}",
"res",
"=",
"self",
".",
"conn",
".",
"filter",
"(",
"\"term\"",
",",
"feature_code",
"=",
"'PCLI'",
")",
".",
"query",
"(",
"q",
")",
"[",
"0",
":",
"5",
"]",
".",
"execute",
"(",
")",
"# always 5",
"else",
":",
"# second, try for an exact phrase match",
"q",
"=",
"{",
"\"multi_match\"",
":",
"{",
"\"query\"",
":",
"placename",
",",
"\"fields\"",
":",
"[",
"'name^5'",
",",
"'asciiname^5'",
",",
"'alternativenames'",
"]",
",",
"\"type\"",
":",
"\"phrase\"",
"}",
"}",
"res",
"=",
"self",
".",
"conn",
".",
"query",
"(",
"q",
")",
"[",
"0",
":",
"50",
"]",
".",
"execute",
"(",
")",
"# if no results, use some fuzziness, but still require all terms to be present.",
"# Fuzzy is not allowed in \"phrase\" searches.",
"if",
"res",
".",
"hits",
".",
"total",
"==",
"0",
":",
"# tried wrapping this in a {\"constant_score\" : {\"query\": ... but made it worse",
"q",
"=",
"{",
"\"multi_match\"",
":",
"{",
"\"query\"",
":",
"placename",
",",
"\"fields\"",
":",
"[",
"'name'",
",",
"'asciiname'",
",",
"'alternativenames'",
"]",
",",
"\"fuzziness\"",
":",
"1",
",",
"\"operator\"",
":",
"\"and\"",
"}",
"}",
"res",
"=",
"self",
".",
"conn",
".",
"query",
"(",
"q",
")",
"[",
"0",
":",
"50",
"]",
".",
"execute",
"(",
")",
"es_result",
"=",
"utilities",
".",
"structure_results",
"(",
"res",
")",
"return",
"es_result"
] | Wrap search parameters into an elasticsearch query to the geonames index
and return results.
Parameters
---------
conn: an elasticsearch Search conn, like the one returned by `setup_es()`
placename: str
the placename text extracted by NER system
Returns
-------
out: The raw results of the elasticsearch query | [
"Wrap",
"search",
"parameters",
"into",
"an",
"elasticsearch",
"query",
"to",
"the",
"geonames",
"index",
"and",
"return",
"results",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L301-L341 |
247,699 | openeventdata/mordecai | mordecai/geoparse.py | Geoparser.query_geonames_country | def query_geonames_country(self, placename, country):
"""
Like query_geonames, but this time limited to a specified country.
"""
# first, try for an exact phrase match
q = {"multi_match": {"query": placename,
"fields": ['name^5', 'asciiname^5', 'alternativenames'],
"type": "phrase"}}
res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute()
# if no results, use some fuzziness, but still require all terms to be present.
# Fuzzy is not allowed in "phrase" searches.
if res.hits.total == 0:
# tried wrapping this in a {"constant_score" : {"query": ... but made it worse
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"fuzziness": 1,
"operator": "and"}}
res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute()
out = utilities.structure_results(res)
return out | python | def query_geonames_country(self, placename, country):
# first, try for an exact phrase match
q = {"multi_match": {"query": placename,
"fields": ['name^5', 'asciiname^5', 'alternativenames'],
"type": "phrase"}}
res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute()
# if no results, use some fuzziness, but still require all terms to be present.
# Fuzzy is not allowed in "phrase" searches.
if res.hits.total == 0:
# tried wrapping this in a {"constant_score" : {"query": ... but made it worse
q = {"multi_match": {"query": placename,
"fields": ['name', 'asciiname', 'alternativenames'],
"fuzziness": 1,
"operator": "and"}}
res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute()
out = utilities.structure_results(res)
return out | [
"def",
"query_geonames_country",
"(",
"self",
",",
"placename",
",",
"country",
")",
":",
"# first, try for an exact phrase match",
"q",
"=",
"{",
"\"multi_match\"",
":",
"{",
"\"query\"",
":",
"placename",
",",
"\"fields\"",
":",
"[",
"'name^5'",
",",
"'asciiname^5'",
",",
"'alternativenames'",
"]",
",",
"\"type\"",
":",
"\"phrase\"",
"}",
"}",
"res",
"=",
"self",
".",
"conn",
".",
"filter",
"(",
"\"term\"",
",",
"country_code3",
"=",
"country",
")",
".",
"query",
"(",
"q",
")",
"[",
"0",
":",
"50",
"]",
".",
"execute",
"(",
")",
"# if no results, use some fuzziness, but still require all terms to be present.",
"# Fuzzy is not allowed in \"phrase\" searches.",
"if",
"res",
".",
"hits",
".",
"total",
"==",
"0",
":",
"# tried wrapping this in a {\"constant_score\" : {\"query\": ... but made it worse",
"q",
"=",
"{",
"\"multi_match\"",
":",
"{",
"\"query\"",
":",
"placename",
",",
"\"fields\"",
":",
"[",
"'name'",
",",
"'asciiname'",
",",
"'alternativenames'",
"]",
",",
"\"fuzziness\"",
":",
"1",
",",
"\"operator\"",
":",
"\"and\"",
"}",
"}",
"res",
"=",
"self",
".",
"conn",
".",
"filter",
"(",
"\"term\"",
",",
"country_code3",
"=",
"country",
")",
".",
"query",
"(",
"q",
")",
"[",
"0",
":",
"50",
"]",
".",
"execute",
"(",
")",
"out",
"=",
"utilities",
".",
"structure_results",
"(",
"res",
")",
"return",
"out"
] | Like query_geonames, but this time limited to a specified country. | [
"Like",
"query_geonames",
"but",
"this",
"time",
"limited",
"to",
"a",
"specified",
"country",
"."
] | bd82b8bcc27621345c57cbe9ec7f8c8552620ffc | https://github.com/openeventdata/mordecai/blob/bd82b8bcc27621345c57cbe9ec7f8c8552620ffc/mordecai/geoparse.py#L345-L365 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.