INSTRUCTION
stringlengths 1
46.3k
| RESPONSE
stringlengths 75
80.2k
|
---|---|
return [channel] | def _get_initialized_channels_for_service(self, org_id, service_id):
'''return [channel]'''
channels_dict = self._get_initialized_channels_dict_for_service(org_id, service_id)
return list(channels_dict.values()) |
we make sure that MultiPartyEscrow address from metadata is correct | def _check_mpe_address_metadata(self, metadata):
""" we make sure that MultiPartyEscrow address from metadata is correct """
mpe_address = self.get_mpe_address()
if (str(mpe_address).lower() != str(metadata["mpe_address"]).lower()):
raise Exception("MultiPartyEscrow contract address from metadata %s do not correspond to current MultiPartyEscrow address %s"%(metadata["mpe_address"], mpe_address)) |
similar to _init_or_update_service_if_needed but we get service_registraion from registry,
so we can update only registered services | def _init_or_update_registered_service_if_needed(self):
'''
similar to _init_or_update_service_if_needed but we get service_registraion from registry,
so we can update only registered services
'''
if (self.is_service_initialized()):
old_reg = self._read_service_info(self.args.org_id, self.args.service_id)
# metadataURI will be in old_reg only for service which was initilized from registry (not from metadata)
# we do nothing for services which were initilized from metadata
if ("metadataURI" not in old_reg):
return
service_registration = self._get_service_registration()
# if metadataURI hasn't been changed we do nothing
if (not self.is_metadataURI_has_changed(service_registration)):
return
else:
service_registration = self._get_service_registration()
service_metadata = self._get_service_metadata_from_registry()
self._init_or_update_service_if_needed(service_metadata, service_registration) |
read expiration from args.
We allow the following types of expirations
1. "<int>" simple integer defines absolute expiration in blocks
2. "+<int>blocks", where <int> is integer sets expiration as: current_block + <int>
3. "+<int>days", where <int> is integer sets expiration as: current_block + <int>*4*60*24 (we assume 15 sec/block here)
If expiration > current_block + 1036800 (~6 month) we generate an exception if "--force" flag haven't been set | def _get_expiration_from_args(self):
"""
read expiration from args.
We allow the following types of expirations
1. "<int>" simple integer defines absolute expiration in blocks
2. "+<int>blocks", where <int> is integer sets expiration as: current_block + <int>
3. "+<int>days", where <int> is integer sets expiration as: current_block + <int>*4*60*24 (we assume 15 sec/block here)
If expiration > current_block + 1036800 (~6 month) we generate an exception if "--force" flag haven't been set
"""
current_block = self.ident.w3.eth.blockNumber
s = self.args.expiration
if (s.startswith("+") and s.endswith("days")):
rez = current_block + int(s[1:-4]) * 4 * 60 * 24
elif (s.startswith("+") and s.endswith("blocks")):
rez = current_block + int(s[1:-6])
else:
rez = int(s)
if (rez > current_block + 1036800 and not self.args.force):
d = (rez - current_block) // (4 * 60 * 24)
raise Exception("You try to set expiration time too far in the future: approximately %i days. "%d +
"Set --force parameter if your really want to do it.")
return rez |
- filter_by can be sender or signer | def _smart_get_initialized_channel_for_service(self, metadata, filter_by, is_try_initailize = True):
'''
- filter_by can be sender or signer
'''
channels = self._get_initialized_channels_for_service(self.args.org_id, self.args.service_id)
group_id = metadata.get_group_id(self.args.group_name)
channels = [c for c in channels if c[filter_by].lower() == self.ident.address.lower() and c["groupId"] == group_id]
if (len(channels) == 0 and is_try_initailize):
# this will work only in simple case where signer == sender
self._initialize_already_opened_channel(metadata, self.ident.address, self.ident.address)
return self._smart_get_initialized_channel_for_service(metadata, filter_by, is_try_initailize = False)
if (len(channels) == 0):
raise Exception("Cannot find initialized channel for service with org_id=%s service_id=%s and signer=%s"%(self.args.org_id, self.args.service_id, self.ident.address))
if (self.args.channel_id is None):
if (len(channels) > 1):
channel_ids = [channel["channelId"] for channel in channels]
raise Exception("We have several initialized channel: %s. You should use --channel-id to select one"%str(channel_ids))
return channels[0]
for channel in channels:
if (channel["channelId"] == self.args.channel_id):
return channel
raise Exception("Channel %i has not been initialized or your are not the sender/signer of it"%self.args.channel_id) |
return dict of lists rez[(<org_id>, <service_id>)] = [(channel_id, channel_info)] | def _get_all_initialized_channels(self):
""" return dict of lists rez[(<org_id>, <service_id>)] = [(channel_id, channel_info)] """
channels_dict = defaultdict(list)
for service_base_dir in self._get_persistent_mpe_dir().glob("*/*"):
org_id = service_base_dir.parent.name
service_id = service_base_dir.name
channels = self._get_initialized_channels_for_service(org_id, service_id)
if (channels):
channels_dict[(org_id, service_id)] = channels
return channels_dict |
get all filtered chanels from blockchain logs | def _get_all_filtered_channels(self, topics_without_signature):
""" get all filtered chanels from blockchain logs """
mpe_address = self.get_mpe_address()
event_signature = self.ident.w3.sha3(text="ChannelOpen(uint256,uint256,address,address,address,bytes32,uint256,uint256)").hex()
topics = [event_signature] + topics_without_signature
logs = self.ident.w3.eth.getLogs({"fromBlock" : self.args.from_block, "address" : mpe_address, "topics" : topics})
abi = get_contract_def("MultiPartyEscrow")
event_abi = abi_get_element_by_name(abi, "ChannelOpen")
channels_ids = [get_event_data(event_abi, l)["args"]["channelId"] for l in logs]
return channels_ids |
We try to get config address from the different sources.
The order of priorioty is following:
- command line argument (at)
- command line argument (<contract_name>_at)
- current session configuration (current_<contract_name>_at)
- networks/*json | def get_contract_address(cmd, contract_name, error_message = None):
"""
We try to get config address from the different sources.
The order of priorioty is following:
- command line argument (at)
- command line argument (<contract_name>_at)
- current session configuration (current_<contract_name>_at)
- networks/*json
"""
# try to get from command line argument at or contractname_at
a = "at"
if (hasattr(cmd.args, a) and getattr(cmd.args, a)):
return cmd.w3.toChecksumAddress( getattr(cmd.args, a) )
# try to get from command line argument contractname_at
a = "%s_at"%contract_name.lower()
if (hasattr(cmd.args, a) and getattr(cmd.args, a)):
return cmd.w3.toChecksumAddress( getattr(cmd.args, a) )
# try to get from current session configuration
rez = cmd.config.get_session_field("current_%s_at"%(contract_name.lower()), exception_if_not_found = False)
if rez:
return cmd.w3.toChecksumAddress(rez)
error_message = error_message or "Fail to read %s address from \"networks\", you should specify address by yourself via --%s_at parameter"%(contract_name, contract_name.lower())
# try to take address from networks
return read_default_contract_address(w3=cmd.w3, contract_name=contract_name) |
We try to get field_name from diffent sources:
The order of priorioty is following:
- command line argument (--<field_name>)
- current session configuration (default_<filed_name>) | def get_field_from_args_or_session(config, args, field_name):
"""
We try to get field_name from diffent sources:
The order of priorioty is following:
- command line argument (--<field_name>)
- current session configuration (default_<filed_name>)
"""
rez = getattr(args, field_name, None)
#type(rez) can be int in case of wallet-index, so we cannot make simply if(rez)
if (rez != None):
return rez
rez = config.get_session_field("default_%s"%field_name, exception_if_not_found=False)
if (rez):
return rez
raise Exception("Fail to get default_%s from config, should specify %s via --%s parameter"%(field_name, field_name, field_name.replace("_","-"))) |
Return element of abi (return None if fails to find) | def abi_get_element_by_name(abi, name):
""" Return element of abi (return None if fails to find) """
if (abi and "abi" in abi):
for a in abi["abi"]:
if ("name" in a and a["name"] == name):
return a
return None |
Just ensures the url has a scheme (http/https), and a net location (IP or domain name).
Can make more advanced or do on-network tests if needed, but this is really just to catch obvious errors.
>>> is_valid_endpoint("https://34.216.72.29:6206")
True
>>> is_valid_endpoint("blahblah")
False
>>> is_valid_endpoint("blah://34.216.72.29")
False
>>> is_valid_endpoint("http://34.216.72.29:%%%")
False
>>> is_valid_endpoint("http://192.168.0.2:9999")
True | def is_valid_endpoint(url):
"""
Just ensures the url has a scheme (http/https), and a net location (IP or domain name).
Can make more advanced or do on-network tests if needed, but this is really just to catch obvious errors.
>>> is_valid_endpoint("https://34.216.72.29:6206")
True
>>> is_valid_endpoint("blahblah")
False
>>> is_valid_endpoint("blah://34.216.72.29")
False
>>> is_valid_endpoint("http://34.216.72.29:%%%")
False
>>> is_valid_endpoint("http://192.168.0.2:9999")
True
"""
try:
result = urlparse(url)
if result.port:
_port = int(result.port)
return (
all([result.scheme, result.netloc]) and
result.scheme in ['http', 'https']
)
except ValueError:
return False |
open grpc channel:
- for http:// we open insecure_channel
- for https:// we open secure_channel (with default credentials)
- without prefix we open insecure_channel | def open_grpc_channel(endpoint):
"""
open grpc channel:
- for http:// we open insecure_channel
- for https:// we open secure_channel (with default credentials)
- without prefix we open insecure_channel
"""
if (endpoint.startswith("https://")):
return grpc.secure_channel(remove_http_https_prefix(endpoint), grpc.ssl_channel_credentials())
return grpc.insecure_channel(remove_http_https_prefix(endpoint)) |
Creates a new Repo object in PFS with the given name. Repos are the
top level data object in PFS and should be used to store data of a
similar type. For example rather than having a single Repo for an
entire project you might have separate Repos for logs, metrics,
database dumps etc.
Params:
* repo_name: Name of the repo.
* description: Repo description. | def create_repo(self, repo_name, description=None):
"""
Creates a new Repo object in PFS with the given name. Repos are the
top level data object in PFS and should be used to store data of a
similar type. For example rather than having a single Repo for an
entire project you might have separate Repos for logs, metrics,
database dumps etc.
Params:
* repo_name: Name of the repo.
* description: Repo description.
"""
req = proto.CreateRepoRequest(repo=proto.Repo(name=repo_name), description=description)
self.stub.CreateRepo(req, metadata=self.metadata) |
Returns info about a specific Repo.
Params:
* repo_name: Name of the repo. | def inspect_repo(self, repo_name):
"""
Returns info about a specific Repo.
Params:
* repo_name: Name of the repo.
"""
req = proto.InspectRepoRequest(repo=proto.Repo(name=repo_name))
res = self.stub.InspectRepo(req, metadata=self.metadata)
return res |
Returns info about all Repos. | def list_repo(self):
"""
Returns info about all Repos.
"""
req = proto.ListRepoRequest()
res = self.stub.ListRepo(req, metadata=self.metadata)
if hasattr(res, 'repo_info'):
return res.repo_info
return [] |
Deletes a repo and reclaims the storage space it was using.
Params:
* repo_name: The name of the repo.
* force: If set to true, the repo will be removed regardless of
errors. This argument should be used with care.
* all: Delete all repos. | def delete_repo(self, repo_name=None, force=False, all=False):
"""
Deletes a repo and reclaims the storage space it was using.
Params:
* repo_name: The name of the repo.
* force: If set to true, the repo will be removed regardless of
errors. This argument should be used with care.
* all: Delete all repos.
"""
if not all:
if repo_name:
req = proto.DeleteRepoRequest(repo=proto.Repo(name=repo_name), force=force)
self.stub.DeleteRepo(req, metadata=self.metadata)
else:
raise ValueError("Either a repo_name or all=True needs to be provided")
else:
if not repo_name:
req = proto.DeleteRepoRequest(force=force, all=all)
self.stub.DeleteRepo(req, metadata=self.metadata)
else:
raise ValueError("Cannot specify a repo_name if all=True") |
Begins the process of committing data to a Repo. Once started you can
write to the Commit with PutFile and when all the data has been
written you must finish the Commit with FinishCommit. NOTE, data is
not persisted until FinishCommit is called. A Commit object is
returned.
Params:
* repo_name: The name of the repo.
* branch: A more convenient way to build linear chains of commits.
When a commit is started with a non-empty branch the value of branch
becomes an alias for the created Commit. This enables a more intuitive
access pattern. When the commit is started on a branch the previous
head of the branch is used as the parent of the commit.
* parent: Specifies the parent Commit, upon creation the new Commit
will appear identical to the parent Commit, data can safely be added
to the new commit without affecting the contents of the parent Commit.
You may pass "" as parentCommit in which case the new Commit will have
no parent and will initially appear empty.
* description: (optional) explanation of the commit for clarity. | def start_commit(self, repo_name, branch=None, parent=None, description=None):
"""
Begins the process of committing data to a Repo. Once started you can
write to the Commit with PutFile and when all the data has been
written you must finish the Commit with FinishCommit. NOTE, data is
not persisted until FinishCommit is called. A Commit object is
returned.
Params:
* repo_name: The name of the repo.
* branch: A more convenient way to build linear chains of commits.
When a commit is started with a non-empty branch the value of branch
becomes an alias for the created Commit. This enables a more intuitive
access pattern. When the commit is started on a branch the previous
head of the branch is used as the parent of the commit.
* parent: Specifies the parent Commit, upon creation the new Commit
will appear identical to the parent Commit, data can safely be added
to the new commit without affecting the contents of the parent Commit.
You may pass "" as parentCommit in which case the new Commit will have
no parent and will initially appear empty.
* description: (optional) explanation of the commit for clarity.
"""
req = proto.StartCommitRequest(parent=proto.Commit(repo=proto.Repo(name=repo_name), id=parent), branch=branch,
description=description)
res = self.stub.StartCommit(req, metadata=self.metadata)
return res |
Ends the process of committing data to a Repo and persists the
Commit. Once a Commit is finished the data becomes immutable and
future attempts to write to it with PutFile will error.
Params:
* commit: A tuple, string, or Commit object representing the commit. | def finish_commit(self, commit):
"""
Ends the process of committing data to a Repo and persists the
Commit. Once a Commit is finished the data becomes immutable and
future attempts to write to it with PutFile will error.
Params:
* commit: A tuple, string, or Commit object representing the commit.
"""
req = proto.FinishCommitRequest(commit=commit_from(commit))
res = self.stub.FinishCommit(req, metadata=self.metadata)
return res |
A context manager for doing stuff inside a commit. | def commit(self, repo_name, branch=None, parent=None, description=None):
"""A context manager for doing stuff inside a commit."""
commit = self.start_commit(repo_name, branch, parent, description)
try:
yield commit
except Exception as e:
print("An exception occurred during an open commit. "
"Trying to finish it (Currently a commit can't be cancelled)")
raise e
finally:
self.finish_commit(commit) |
Returns info about a specific Commit.
Params:
* commit: A tuple, string, or Commit object representing the commit. | def inspect_commit(self, commit):
"""
Returns info about a specific Commit.
Params:
* commit: A tuple, string, or Commit object representing the commit.
"""
req = proto.InspectCommitRequest(commit=commit_from(commit))
return self.stub.InspectCommit(req, metadata=self.metadata) |
Gets a list of CommitInfo objects.
Params:
* repo_name: If only `repo_name` is given, all commits in the repo are
returned.
* to_commit: Optional. Only the ancestors of `to`, including `to`
itself, are considered.
* from_commit: Optional. Only the descendants of `from`, including
`from` itself, are considered.
* number: Optional. Determines how many commits are returned. If
`number` is 0, all commits that match the aforementioned criteria are
returned. | def list_commit(self, repo_name, to_commit=None, from_commit=None, number=0):
"""
Gets a list of CommitInfo objects.
Params:
* repo_name: If only `repo_name` is given, all commits in the repo are
returned.
* to_commit: Optional. Only the ancestors of `to`, including `to`
itself, are considered.
* from_commit: Optional. Only the descendants of `from`, including
`from` itself, are considered.
* number: Optional. Determines how many commits are returned. If
`number` is 0, all commits that match the aforementioned criteria are
returned.
"""
req = proto.ListCommitRequest(repo=proto.Repo(name=repo_name), number=number)
if to_commit is not None:
req.to.CopyFrom(commit_from(to_commit))
if from_commit is not None:
getattr(req, 'from').CopyFrom(commit_from(from_commit))
res = self.stub.ListCommit(req, metadata=self.metadata)
if hasattr(res, 'commit_info'):
return res.commit_info
return [] |
Deletes a commit.
Params:
* commit: A tuple, string, or Commit object representing the commit. | def delete_commit(self, commit):
"""
Deletes a commit.
Params:
* commit: A tuple, string, or Commit object representing the commit.
"""
req = proto.DeleteCommitRequest(commit=commit_from(commit))
self.stub.DeleteCommit(req, metadata=self.metadata) |
Blocks until all of the commits which have a set of commits as
provenance have finished. For commits to be considered they must have
all of the specified commits as provenance. This in effect waits for
all of the jobs that are triggered by a set of commits to complete.
It returns an error if any of the commits it's waiting on are
cancelled due to one of the jobs encountering an error during runtime.
Note that it's never necessary to call FlushCommit to run jobs,
they'll run no matter what, FlushCommit just allows you to wait for
them to complete and see their output once they do. This returns an
iterator of CommitInfo objects.
Params:
* commits: A commit or a list of commits to wait on.
* repos: Optional. Only the commits up to and including those repos.
will be considered, otherwise all repos are considered. | def flush_commit(self, commits, repos=tuple()):
"""
Blocks until all of the commits which have a set of commits as
provenance have finished. For commits to be considered they must have
all of the specified commits as provenance. This in effect waits for
all of the jobs that are triggered by a set of commits to complete.
It returns an error if any of the commits it's waiting on are
cancelled due to one of the jobs encountering an error during runtime.
Note that it's never necessary to call FlushCommit to run jobs,
they'll run no matter what, FlushCommit just allows you to wait for
them to complete and see their output once they do. This returns an
iterator of CommitInfo objects.
Params:
* commits: A commit or a list of commits to wait on.
* repos: Optional. Only the commits up to and including those repos.
will be considered, otherwise all repos are considered.
"""
req = proto.FlushCommitRequest(commit=[commit_from(c) for c in commits],
to_repo=[proto.Repo(name=r) for r in repos])
res = self.stub.FlushCommit(req, metadata=self.metadata)
return res |
SubscribeCommit is like ListCommit but it keeps listening for commits as
they come in. This returns an iterator Commit objects.
Params:
* repo_name: Name of the repo.
* branch: Branch to subscribe to.
* from_commit_id: Optional. Only commits created since this commit
are returned. | def subscribe_commit(self, repo_name, branch, from_commit_id=None):
"""
SubscribeCommit is like ListCommit but it keeps listening for commits as
they come in. This returns an iterator Commit objects.
Params:
* repo_name: Name of the repo.
* branch: Branch to subscribe to.
* from_commit_id: Optional. Only commits created since this commit
are returned.
"""
repo = proto.Repo(name=repo_name)
req = proto.SubscribeCommitRequest(repo=repo, branch=branch)
if from_commit_id is not None:
getattr(req, 'from').CopyFrom(proto.Commit(repo=repo, id=from_commit_id))
res = self.stub.SubscribeCommit(req, metadata=self.metadata)
return res |
Lists the active Branch objects on a Repo.
Params:
* repo_name: The name of the repo. | def list_branch(self, repo_name):
"""
Lists the active Branch objects on a Repo.
Params:
* repo_name: The name of the repo.
"""
req = proto.ListBranchRequest(repo=proto.Repo(name=repo_name))
res = self.stub.ListBranch(req, metadata=self.metadata)
if hasattr(res, 'branch_info'):
return res.branch_info
return [] |
Sets a commit and its ancestors as a branch.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* branch_name: The name for the branch to set. | def set_branch(self, commit, branch_name):
"""
Sets a commit and its ancestors as a branch.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* branch_name: The name for the branch to set.
"""
res = proto.SetBranchRequest(commit=commit_from(commit), branch=branch_name)
self.stub.SetBranch(res, metadata=self.metadata) |
Deletes a branch, but leaves the commits themselves intact. In other
words, those commits can still be accessed via commit IDs and other
branches they happen to be on.
Params:
* repo_name: The name of the repo.
* branch_name: The name of the branch to delete. | def delete_branch(self, repo_name, branch_name):
"""
Deletes a branch, but leaves the commits themselves intact. In other
words, those commits can still be accessed via commit IDs and other
branches they happen to be on.
Params:
* repo_name: The name of the repo.
* branch_name: The name of the branch to delete.
"""
res = proto.DeleteBranchRequest(repo=Repo(name=repo_name), branch=branch_name)
self.stub.DeleteBranch(res, metadata=self.metadata) |
Uploads a binary bytes array as file(s) in a certain path.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: Path in the repo the file(s) will be written to.
* value: The file contents as bytes, represented as a file-like
object, bytestring, or iterator of bytestrings.
* delimiter: Optional. causes data to be broken up into separate files
with `path` as a prefix.
* target_file_datums: Optional. Specifies the target number of datums
in each written file. It may be lower if data does not split evenly,
but will never be higher, unless the value is 0.
* target_file_bytes: Specifies the target number of bytes in each
written file, files may have more or fewer bytes than the target. | def put_file_bytes(self, commit, path, value, delimiter=proto.NONE,
target_file_datums=0, target_file_bytes=0, overwrite_index=None):
"""
Uploads a binary bytes array as file(s) in a certain path.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: Path in the repo the file(s) will be written to.
* value: The file contents as bytes, represented as a file-like
object, bytestring, or iterator of bytestrings.
* delimiter: Optional. causes data to be broken up into separate files
with `path` as a prefix.
* target_file_datums: Optional. Specifies the target number of datums
in each written file. It may be lower if data does not split evenly,
but will never be higher, unless the value is 0.
* target_file_bytes: Specifies the target number of bytes in each
written file, files may have more or fewer bytes than the target.
"""
overwrite_index_proto = proto.OverwriteIndex(index=overwrite_index) if overwrite_index is not None else None
if hasattr(value, "read"):
def wrap(value):
for i in itertools.count():
chunk = value.read(BUFFER_SIZE)
if len(chunk) == 0:
return
if i == 0:
yield proto.PutFileRequest(
file=proto.File(commit=commit_from(commit), path=path),
value=chunk,
delimiter=delimiter,
target_file_datums=target_file_datums,
target_file_bytes=target_file_bytes,
overwrite_index=overwrite_index_proto
)
else:
yield proto.PutFileRequest(value=chunk)
elif isinstance(value, collections.Iterable) and not isinstance(value, (six.string_types, six.binary_type)):
def wrap(value):
for i, chunk in enumerate(value):
if i == 0:
yield proto.PutFileRequest(
file=proto.File(commit=commit_from(commit), path=path),
value=chunk,
delimiter=delimiter,
target_file_datums=target_file_datums,
target_file_bytes=target_file_bytes,
overwrite_index=overwrite_index_proto
)
else:
yield proto.PutFileRequest(value=chunk)
else:
def wrap(value):
yield proto.PutFileRequest(
file=proto.File(commit=commit_from(commit), path=path),
value=value[:BUFFER_SIZE],
delimiter=delimiter,
target_file_datums=target_file_datums,
target_file_bytes=target_file_bytes,
overwrite_index=overwrite_index_proto
)
for i in range(BUFFER_SIZE, len(value), BUFFER_SIZE):
yield proto.PutFileRequest(
value=value[i:i + BUFFER_SIZE],
overwrite_index=overwrite_index_proto
)
self.stub.PutFile(wrap(value), metadata=self.metadata) |
Puts a file using the content found at a URL. The URL is sent to the
server which performs the request.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the file.
* url: The url of the file to put.
* recursive: allow for recursive scraping of some types URLs for
example on s3:// urls. | def put_file_url(self, commit, path, url, recursive=False):
"""
Puts a file using the content found at a URL. The URL is sent to the
server which performs the request.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the file.
* url: The url of the file to put.
* recursive: allow for recursive scraping of some types URLs for
example on s3:// urls.
"""
req = iter([
proto.PutFileRequest(
file=proto.File(commit=commit_from(commit), path=path),
url=url,
recursive=recursive
)
])
self.stub.PutFile(req, metadata=self.metadata) |
Returns an iterator of the contents contents of a file at a specific Commit.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path of the file.
* offset_bytes: Optional. specifies a number of bytes that should be
skipped in the beginning of the file.
* size_bytes: Optional. limits the total amount of data returned, note
you will get fewer bytes than size if you pass a value larger than the
size of the file. If size is set to 0 then all of the data will be
returned.
* extract_value: If True, then an ExtractValueIterator will be return,
which will iterate over the bytes of the file. If False, then the
protobuf response iterator will return. | def get_file(self, commit, path, offset_bytes=0, size_bytes=0, extract_value=True):
"""
Returns an iterator of the contents contents of a file at a specific Commit.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path of the file.
* offset_bytes: Optional. specifies a number of bytes that should be
skipped in the beginning of the file.
* size_bytes: Optional. limits the total amount of data returned, note
you will get fewer bytes than size if you pass a value larger than the
size of the file. If size is set to 0 then all of the data will be
returned.
* extract_value: If True, then an ExtractValueIterator will be return,
which will iterate over the bytes of the file. If False, then the
protobuf response iterator will return.
"""
req = proto.GetFileRequest(
file=proto.File(commit=commit_from(commit), path=path),
offset_bytes=offset_bytes,
size_bytes=size_bytes
)
res = self.stub.GetFile(req, metadata=self.metadata)
if extract_value:
return ExtractValueIterator(res)
return res |
Returns the contents of a list of files at a specific Commit as a
dictionary of file paths to data.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* paths: A list of paths to retrieve.
* recursive: If True, will go into each directory in the list
recursively. | def get_files(self, commit, paths, recursive=False):
"""
Returns the contents of a list of files at a specific Commit as a
dictionary of file paths to data.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* paths: A list of paths to retrieve.
* recursive: If True, will go into each directory in the list
recursively.
"""
filtered_file_infos = []
for path in paths:
fi = self.inspect_file(commit, path)
if fi.file_type == proto.FILE:
filtered_file_infos.append(fi)
else:
filtered_file_infos += self.list_file(commit, path, recursive=recursive)
filtered_paths = [fi.file.path for fi in filtered_file_infos if fi.file_type == proto.FILE]
return {path: b''.join(self.get_file(commit, path)) for path in filtered_paths} |
Returns info about a specific file.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: Path to file. | def inspect_file(self, commit, path):
"""
Returns info about a specific file.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: Path to file.
"""
req = proto.InspectFileRequest(file=proto.File(commit=commit_from(commit), path=path))
res = self.stub.InspectFile(req, metadata=self.metadata)
return res |
Lists the files in a directory.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the directory.
* recursive: If True, continue listing the files for sub-directories. | def list_file(self, commit, path, recursive=False):
"""
Lists the files in a directory.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the directory.
* recursive: If True, continue listing the files for sub-directories.
"""
req = proto.ListFileRequest(
file=proto.File(commit=commit_from(commit), path=path)
)
res = self.stub.ListFile(req, metadata=self.metadata)
file_infos = res.file_info
if recursive:
dirs = [f for f in file_infos if f.file_type == proto.DIR]
files = [f for f in file_infos if f.file_type == proto.FILE]
return sum([self.list_file(commit, d.file.path, recursive) for d in dirs], files)
return list(file_infos) |
Deletes a file from a Commit. DeleteFile leaves a tombstone in the
Commit, assuming the file isn't written to later attempting to get the
file from the finished commit will result in not found error. The file
will of course remain intact in the Commit's parent.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the file. | def delete_file(self, commit, path):
"""
Deletes a file from a Commit. DeleteFile leaves a tombstone in the
Commit, assuming the file isn't written to later attempting to get the
file from the finished commit will result in not found error. The file
will of course remain intact in the Commit's parent.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* path: The path to the file.
"""
req = proto.DeleteFileRequest(file=proto.File(commit=commit_from(commit), path=path))
self.stub.DeleteFile(req, metadata=self.metadata) |
See super class method satosa.frontends.base.FrontendModule#handle_authn_response
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype satosa.response.Response | def handle_authn_response(self, context, internal_response):
"""
See super class method satosa.frontends.base.FrontendModule#handle_authn_response
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype satosa.response.Response
"""
return self._handle_authn_response(context, internal_response, self.idp) |
This method is bound to the starting endpoint of the authentication.
:type context: satosa.context.Context
:type binding_in: str
:rtype: satosa.response.Response
:param context: The current context
:param binding_in: The binding type (http post, http redirect, ...)
:return: response | def handle_authn_request(self, context, binding_in):
"""
This method is bound to the starting endpoint of the authentication.
:type context: satosa.context.Context
:type binding_in: str
:rtype: satosa.response.Response
:param context: The current context
:param binding_in: The binding type (http post, http redirect, ...)
:return: response
"""
return self._handle_authn_request(context, binding_in, self.idp) |
See super class satosa.frontends.base.FrontendModule
:type backend_names: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))] | def register_endpoints(self, backend_names):
"""
See super class satosa.frontends.base.FrontendModule
:type backend_names: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
"""
self.idp_config = self._build_idp_config_endpoints(
self.config[self.KEY_IDP_CONFIG], backend_names)
# Create the idp
idp_config = IdPConfig().load(copy.deepcopy(self.idp_config), metadata_construction=False)
self.idp = Server(config=idp_config)
return self._register_endpoints(backend_names) |
Returns a dict containing the state needed in the response flow.
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str]
:param context: The current context
:param resp_args: Response arguments
:param relay_state: Request relay state
:return: A state as a dict | def _create_state_data(self, context, resp_args, relay_state):
"""
Returns a dict containing the state needed in the response flow.
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str]
:param context: The current context
:param resp_args: Response arguments
:param relay_state: Request relay state
:return: A state as a dict
"""
if "name_id_policy" in resp_args and resp_args["name_id_policy"] is not None:
resp_args["name_id_policy"] = resp_args["name_id_policy"].to_string().decode("utf-8")
return {"resp_args": resp_args, "relay_state": relay_state} |
Loads a state from state
:type state: satosa.state.State
:rtype: dict[str, Any]
:param state: The current state
:return: The dictionary given by the save_state function | def load_state(self, state):
"""
Loads a state from state
:type state: satosa.state.State
:rtype: dict[str, Any]
:param state: The current state
:return: The dictionary given by the save_state function
"""
state_data = state[self.name]
if isinstance(state_data["resp_args"]["name_id_policy"], str):
state_data["resp_args"]["name_id_policy"] = name_id_policy_from_string(
state_data["resp_args"]["name_id_policy"])
return state_data |
Validates some parts of the module config
:type config: dict[str, dict[str, Any] | str]
:param config: The module config | def _validate_config(self, config):
"""
Validates some parts of the module config
:type config: dict[str, dict[str, Any] | str]
:param config: The module config
"""
required_keys = [
self.KEY_IDP_CONFIG,
self.KEY_ENDPOINTS,
]
if not config:
raise ValueError("No configuration given")
for key in required_keys:
try:
_val = config[key]
except KeyError as e:
raise ValueError("Missing configuration key: %s" % key) from e |
See doc for handle_authn_request method.
:type context: satosa.context.Context
:type binding_in: str
:type idp: saml.server.Server
:rtype: satosa.response.Response
:param context: The current context
:param binding_in: The pysaml binding type
:param idp: The saml frontend idp server
:return: response | def _handle_authn_request(self, context, binding_in, idp):
"""
See doc for handle_authn_request method.
:type context: satosa.context.Context
:type binding_in: str
:type idp: saml.server.Server
:rtype: satosa.response.Response
:param context: The current context
:param binding_in: The pysaml binding type
:param idp: The saml frontend idp server
:return: response
"""
req_info = idp.parse_authn_request(context.request["SAMLRequest"], binding_in)
authn_req = req_info.message
satosa_logging(logger, logging.DEBUG, "%s" % authn_req, context.state)
try:
resp_args = idp.response_args(authn_req)
except SAMLError as e:
satosa_logging(logger, logging.ERROR, "Could not find necessary info about entity: %s" % e, context.state)
return ServiceError("Incorrect request from requester: %s" % e)
requester = resp_args["sp_entity_id"]
context.state[self.name] = self._create_state_data(context, idp.response_args(authn_req),
context.request.get("RelayState"))
subject = authn_req.subject
name_id_value = subject.name_id.text if subject else None
nameid_formats = {
"from_policy": authn_req.name_id_policy and authn_req.name_id_policy.format,
"from_response": subject and subject.name_id and subject.name_id.format,
"from_metadata": (
idp.metadata[requester]
.get("spsso_descriptor", [{}])[0]
.get("name_id_format", [{}])[0]
.get("text")
),
"default": NAMEID_FORMAT_TRANSIENT,
}
name_id_format = (
nameid_formats["from_policy"]
or (
nameid_formats["from_response"] != NAMEID_FORMAT_UNSPECIFIED
and nameid_formats["from_response"]
)
or nameid_formats["from_metadata"]
or nameid_formats["from_response"]
or nameid_formats["default"]
)
requester_name = self._get_sp_display_name(idp, requester)
internal_req = InternalData(
subject_id=name_id_value,
subject_type=name_id_format,
requester=requester,
requester_name=requester_name,
)
idp_policy = idp.config.getattr("policy", "idp")
if idp_policy:
internal_req.attributes = self._get_approved_attributes(
idp, idp_policy, requester, context.state
)
return self.auth_req_callback_func(context, internal_req) |
Returns a list of approved attributes
:type idp: saml.server.Server
:type idp_policy: saml2.assertion.Policy
:type sp_entity_id: str
:type state: satosa.state.State
:rtype: list[str]
:param idp: The saml frontend idp server
:param idp_policy: The idp policy
:param sp_entity_id: The requesting sp entity id
:param state: The current state
:return: A list containing approved attributes | def _get_approved_attributes(self, idp, idp_policy, sp_entity_id, state):
"""
Returns a list of approved attributes
:type idp: saml.server.Server
:type idp_policy: saml2.assertion.Policy
:type sp_entity_id: str
:type state: satosa.state.State
:rtype: list[str]
:param idp: The saml frontend idp server
:param idp_policy: The idp policy
:param sp_entity_id: The requesting sp entity id
:param state: The current state
:return: A list containing approved attributes
"""
name_format = idp_policy.get_name_form(sp_entity_id)
attrconvs = idp.config.attribute_converters
idp_policy.acs = attrconvs
attribute_filter = []
for aconv in attrconvs:
if aconv.name_format == name_format:
all_attributes = {v: None for v in aconv._fro.values()}
attribute_filter = list(idp_policy.restrict(all_attributes, sp_entity_id, idp.metadata).keys())
break
attribute_filter = self.converter.to_internal_filter(self.attribute_profile, attribute_filter)
satosa_logging(logger, logging.DEBUG, "Filter: %s" % attribute_filter, state)
return attribute_filter |
See super class satosa.frontends.base.FrontendModule
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:type idp: saml.server.Server
:param context: The current context
:param internal_response: The internal response
:param idp: The saml frontend idp server
:return: A saml response | def _handle_authn_response(self, context, internal_response, idp):
"""
See super class satosa.frontends.base.FrontendModule
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:type idp: saml.server.Server
:param context: The current context
:param internal_response: The internal response
:param idp: The saml frontend idp server
:return: A saml response
"""
request_state = self.load_state(context.state)
resp_args = request_state["resp_args"]
sp_entity_id = resp_args["sp_entity_id"]
internal_response.attributes = self._filter_attributes(
idp, internal_response, context)
ava = self.converter.from_internal(
self.attribute_profile, internal_response.attributes)
auth_info = {}
if self.acr_mapping:
auth_info["class_ref"] = self.acr_mapping.get(
internal_response.auth_info.issuer, self.acr_mapping[""])
else:
auth_info["class_ref"] = internal_response.auth_info.auth_class_ref
auth_info["authn_auth"] = internal_response.auth_info.issuer
if self.custom_attribute_release:
custom_release = util.get_dict_defaults(
self.custom_attribute_release,
internal_response.auth_info.issuer,
sp_entity_id)
attributes_to_remove = custom_release.get("exclude", [])
for k in attributes_to_remove:
ava.pop(k, None)
nameid_value = internal_response.subject_id
nameid_format = subject_type_to_saml_nameid_format(
internal_response.subject_type
)
# If the backend did not receive a SAML <NameID> and so
# name_id is set to None then do not create a NameID instance.
# Instead pass None as the name name_id to the IdP server
# instance and it will use its configured policy to construct
# a <NameID>, with the default to create a transient <NameID>.
name_id = None if not nameid_value else NameID(
text=nameid_value,
format=nameid_format,
sp_name_qualifier=None,
name_qualifier=None,
)
dbgmsg = "returning attributes %s" % json.dumps(ava)
satosa_logging(logger, logging.DEBUG, dbgmsg, context.state)
policies = self.idp_config.get(
'service', {}).get('idp', {}).get('policy', {})
sp_policy = policies.get('default', {})
sp_policy.update(policies.get(sp_entity_id, {}))
sign_assertion = sp_policy.get('sign_assertion', False)
sign_response = sp_policy.get('sign_response', True)
sign_alg = sp_policy.get('sign_alg', 'SIG_RSA_SHA256')
digest_alg = sp_policy.get('digest_alg', 'DIGEST_SHA256')
# Construct arguments for method create_authn_response
# on IdP Server instance
args = {
'identity' : ava,
'name_id' : name_id,
'authn' : auth_info,
'sign_response' : sign_response,
'sign_assertion': sign_assertion,
}
# Add the SP details
args.update(**resp_args)
try:
args['sign_alg'] = getattr(xmldsig, sign_alg)
except AttributeError as e:
errmsg = "Unsupported sign algorithm %s" % sign_alg
satosa_logging(logger, logging.ERROR, errmsg, context.state)
raise Exception(errmsg) from e
else:
dbgmsg = "signing with algorithm %s" % args['sign_alg']
satosa_logging(logger, logging.DEBUG, dbgmsg, context.state)
try:
args['digest_alg'] = getattr(xmldsig, digest_alg)
except AttributeError as e:
errmsg = "Unsupported digest algorithm %s" % digest_alg
satosa_logging(logger, logging.ERROR, errmsg, context.state)
raise Exception(errmsg) from e
else:
dbgmsg = "using digest algorithm %s" % args['digest_alg']
satosa_logging(logger, logging.DEBUG, dbgmsg, context.state)
resp = idp.create_authn_response(**args)
http_args = idp.apply_binding(
resp_args["binding"], str(resp), resp_args["destination"],
request_state["relay_state"], response=True)
# Set the common domain cookie _saml_idp if so configured.
if self.config.get('common_domain_cookie'):
self._set_common_domain_cookie(internal_response, http_args, context)
del context.state[self.name]
return make_saml_response(resp_args["binding"], http_args) |
See super class satosa.frontends.base.FrontendModule
:type exception: satosa.exception.SATOSAAuthenticationError
:type idp: saml.server.Server
:rtype: satosa.response.Response
:param exception: The SATOSAAuthenticationError
:param idp: The saml frontend idp server
:return: A response | def _handle_backend_error(self, exception, idp):
"""
See super class satosa.frontends.base.FrontendModule
:type exception: satosa.exception.SATOSAAuthenticationError
:type idp: saml.server.Server
:rtype: satosa.response.Response
:param exception: The SATOSAAuthenticationError
:param idp: The saml frontend idp server
:return: A response
"""
loaded_state = self.load_state(exception.state)
relay_state = loaded_state["relay_state"]
resp_args = loaded_state["resp_args"]
error_resp = idp.create_error_response(resp_args["in_response_to"],
resp_args["destination"],
Exception(exception.message))
http_args = idp.apply_binding(resp_args["binding"], str(error_resp), resp_args["destination"], relay_state,
response=True)
satosa_logging(logger, logging.DEBUG, "HTTPargs: %s" % http_args, exception.state)
return make_saml_response(resp_args["binding"], http_args) |
Register methods to endpoints
:type providers: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:param providers: A list of backend providers
:return: A list of endpoint/method pairs | def _register_endpoints(self, providers):
"""
Register methods to endpoints
:type providers: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:param providers: A list of backend providers
:return: A list of endpoint/method pairs
"""
url_map = []
for endp_category in self.endpoints:
for binding, endp in self.endpoints[endp_category].items():
valid_providers = ""
for provider in providers:
valid_providers = "{}|^{}".format(valid_providers, provider)
valid_providers = valid_providers.lstrip("|")
parsed_endp = urlparse(endp)
url_map.append(("(%s)/%s$" % (valid_providers, parsed_endp.path),
functools.partial(self.handle_authn_request, binding_in=binding)))
if self.expose_entityid_endpoint():
parsed_entity_id = urlparse(self.idp.config.entityid)
url_map.append(("^{0}".format(parsed_entity_id.path[1:]),
self._metadata_endpoint))
return url_map |
Builds the final frontend module config
:type config: dict[str, Any]
:type providers: list[str]
:rtype: dict[str, Any]
:param config: The module config
:param providers: A list of backend names
:return: The final config | def _build_idp_config_endpoints(self, config, providers):
"""
Builds the final frontend module config
:type config: dict[str, Any]
:type providers: list[str]
:rtype: dict[str, Any]
:param config: The module config
:param providers: A list of backend names
:return: The final config
"""
# Add an endpoint to each provider
idp_endpoints = []
for endp_category in self.endpoints:
for func, endpoint in self.endpoints[endp_category].items():
for provider in providers:
_endpoint = "{base}/{provider}/{endpoint}".format(
base=self.base_url, provider=provider, endpoint=endpoint)
idp_endpoints.append((_endpoint, func))
config["service"]["idp"]["endpoints"][endp_category] = idp_endpoints
return config |
Loads approved endpoints to the config.
:type url_base: str
:type provider: str
:type target_entity_id: str
:rtype: dict[str, Any]
:param url_base: The proxy base url
:param provider: target backend name
:param target_entity_id: frontend target entity id
:return: IDP config with endpoints | def _load_endpoints_to_config(self, provider, target_entity_id, config=None):
"""
Loads approved endpoints to the config.
:type url_base: str
:type provider: str
:type target_entity_id: str
:rtype: dict[str, Any]
:param url_base: The proxy base url
:param provider: target backend name
:param target_entity_id: frontend target entity id
:return: IDP config with endpoints
"""
idp_conf = copy.deepcopy(config or self.idp_config)
for service, endpoint in self.endpoints.items():
idp_endpoints = []
for binding, path in endpoint.items():
url = "{base}/{provider}/{target_id}/{path}".format(
base=self.base_url, provider=provider,
target_id=target_entity_id, path=path)
idp_endpoints.append((url, binding))
idp_conf["service"]["idp"]["endpoints"][service] = idp_endpoints
return idp_conf |
Loads an idp server that accepts the target backend name in the endpoint url
ex: /<backend_name>/sso/redirect
:type context: The current context
:rtype: saml.server.Server
:param context:
:return: An idp server | def _load_idp_dynamic_endpoints(self, context):
"""
Loads an idp server that accepts the target backend name in the endpoint url
ex: /<backend_name>/sso/redirect
:type context: The current context
:rtype: saml.server.Server
:param context:
:return: An idp server
"""
target_entity_id = context.target_entity_id_from_path()
idp_conf_file = self._load_endpoints_to_config(context.target_backend, target_entity_id)
idp_config = IdPConfig().load(idp_conf_file, metadata_construction=False)
return Server(config=idp_config) |
Loads an idp server with the entity id saved in state
:type state: satosa.state.State
:rtype: saml.server.Server
:param state: The current state
:return: An idp server | def _load_idp_dynamic_entity_id(self, state):
"""
Loads an idp server with the entity id saved in state
:type state: satosa.state.State
:rtype: saml.server.Server
:param state: The current state
:return: An idp server
"""
# Change the idp entity id dynamically
idp_config_file = copy.deepcopy(self.idp_config)
idp_config_file["entityid"] = "{}/{}".format(self.idp_config["entityid"], state[self.name]["target_entity_id"])
idp_config = IdPConfig().load(idp_config_file, metadata_construction=False)
return Server(config=idp_config) |
Loads approved endpoints dynamically
See super class satosa.frontends.saml2.SAMLFrontend#handle_authn_request
:type context: satosa.context.Context
:type binding_in: str
:rtype: satosa.response.Response | def handle_authn_request(self, context, binding_in):
"""
Loads approved endpoints dynamically
See super class satosa.frontends.saml2.SAMLFrontend#handle_authn_request
:type context: satosa.context.Context
:type binding_in: str
:rtype: satosa.response.Response
"""
target_entity_id = context.target_entity_id_from_path()
target_entity_id = urlsafe_b64decode(target_entity_id).decode()
context.decorate(Context.KEY_TARGET_ENTITYID, target_entity_id)
idp = self._load_idp_dynamic_endpoints(context)
return self._handle_authn_request(context, binding_in, idp) |
Adds the frontend idp entity id to state
See super class satosa.frontends.saml2.SAMLFrontend#save_state
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str] | def _create_state_data(self, context, resp_args, relay_state):
"""
Adds the frontend idp entity id to state
See super class satosa.frontends.saml2.SAMLFrontend#save_state
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str]
"""
state = super()._create_state_data(context, resp_args, relay_state)
state["target_entity_id"] = context.target_entity_id_from_path()
return state |
Loads the frontend entity id dynamically.
See super class satosa.frontends.saml2.SAMLFrontend#handle_backend_error
:type exception: satosa.exception.SATOSAAuthenticationError
:rtype: satosa.response.Response | def handle_backend_error(self, exception):
"""
Loads the frontend entity id dynamically.
See super class satosa.frontends.saml2.SAMLFrontend#handle_backend_error
:type exception: satosa.exception.SATOSAAuthenticationError
:rtype: satosa.response.Response
"""
idp = self._load_idp_dynamic_entity_id(exception.state)
return self._handle_backend_error(exception, idp) |
See super class satosa.frontends.base.FrontendModule#handle_authn_response
:param context:
:param internal_response:
:return: | def handle_authn_response(self, context, internal_response):
"""
See super class satosa.frontends.base.FrontendModule#handle_authn_response
:param context:
:param internal_response:
:return:
"""
idp = self._load_idp_dynamic_entity_id(context.state)
return self._handle_authn_response(context, internal_response, idp) |
See super class satosa.frontends.base.FrontendModule#register_endpoints
:type providers: list[str]
:rtype list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))] |
list[(str, (satosa.context.Context) -> satosa.response.Response)]
:param providers: A list with backend names
:return: A list of url and endpoint function pairs | def _register_endpoints(self, providers):
"""
See super class satosa.frontends.base.FrontendModule#register_endpoints
:type providers: list[str]
:rtype list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))] |
list[(str, (satosa.context.Context) -> satosa.response.Response)]
:param providers: A list with backend names
:return: A list of url and endpoint function pairs
"""
url_map = []
for endp_category in self.endpoints:
for binding, endp in self.endpoints[endp_category].items():
valid_providers = "|^".join(providers)
parsed_endp = urlparse(endp)
url_map.append(("(^%s)/\S+/%s" % (valid_providers, parsed_endp.path),
functools.partial(self.handle_authn_request, binding_in=binding)))
return url_map |
Adds the CO name to state
See super class satosa.frontends.saml2.SAMLFrontend#save_state
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str] | def _create_state_data(self, context, resp_args, relay_state):
"""
Adds the CO name to state
See super class satosa.frontends.saml2.SAMLFrontend#save_state
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str]
"""
state = super()._create_state_data(context, resp_args, relay_state)
state[self.KEY_CO_NAME] = context.get_decoration(self.KEY_CO_NAME)
return state |
The CO name is URL encoded and obtained from the request path
for a request coming into one of the standard binding endpoints.
For example the HTTP-Redirect binding request path will have the
format
{base}/{backend}/{co_name}/sso/redirect
:type context: satosa.context.Context
:rtype: str
:param context: | def _get_co_name_from_path(self, context):
"""
The CO name is URL encoded and obtained from the request path
for a request coming into one of the standard binding endpoints.
For example the HTTP-Redirect binding request path will have the
format
{base}/{backend}/{co_name}/sso/redirect
:type context: satosa.context.Context
:rtype: str
:param context:
"""
url_encoded_co_name = context.path.split("/")[1]
co_name = unquote_plus(url_encoded_co_name)
return co_name |
Obtain the CO name previously saved in the request state, or if not set
use the request path obtained from the current context to determine
the target CO.
:type context: The current context
:rtype: string
:param context: The current context
:return: CO name | def _get_co_name(self, context):
"""
Obtain the CO name previously saved in the request state, or if not set
use the request path obtained from the current context to determine
the target CO.
:type context: The current context
:rtype: string
:param context: The current context
:return: CO name
"""
try:
co_name = context.state[self.name][self.KEY_CO_NAME]
logger.debug("Found CO {} from state".format(co_name))
except KeyError:
co_name = self._get_co_name_from_path(context)
logger.debug("Found CO {} from request path".format(co_name))
return co_name |
Use the request path from the context to determine the target backend,
then construct mappings from bindings to endpoints for the virtual
IdP for the CO.
The endpoint URLs have the form
{base}/{backend}/{co_name}/{path}
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:type backend_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:param backend_name: The target backend name
:return: config with mappings for CO IdP | def _add_endpoints_to_config(self, config, co_name, backend_name):
"""
Use the request path from the context to determine the target backend,
then construct mappings from bindings to endpoints for the virtual
IdP for the CO.
The endpoint URLs have the form
{base}/{backend}/{co_name}/{path}
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:type backend_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:param backend_name: The target backend name
:return: config with mappings for CO IdP
"""
for service, endpoint in self.endpoints.items():
idp_endpoints = []
for binding, path in endpoint.items():
url = "{base}/{backend}/{co_name}/{path}".format(
base=self.base_url,
backend=backend_name,
co_name=quote_plus(co_name),
path=path)
mapping = (url, binding)
idp_endpoints.append(mapping)
# Overwrite the IdP config with the CO specific mappings between
# SAML binding and URL endpoints.
config["service"]["idp"]["endpoints"][service] = idp_endpoints
return config |
Use the CO name to construct the entity ID for the virtual IdP
for the CO.
The entity ID has the form
{base_entity_id}/{co_name}
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:return: config with updated entity ID | def _add_entity_id(self, config, co_name):
"""
Use the CO name to construct the entity ID for the virtual IdP
for the CO.
The entity ID has the form
{base_entity_id}/{co_name}
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:return: config with updated entity ID
"""
base_entity_id = config['entityid']
co_entity_id = "{}/{}".format(base_entity_id, quote_plus(co_name))
config['entityid'] = co_entity_id
return config |
Overlay configuration details like organization and contact person
from the front end configuration onto the IdP configuration to
support SAML metadata generation.
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:return: config with updated details for SAML metadata | def _overlay_for_saml_metadata(self, config, co_name):
"""
Overlay configuration details like organization and contact person
from the front end configuration onto the IdP configuration to
support SAML metadata generation.
:type config: satosa.satosa_config.SATOSAConfig
:type co_name: str
:rtype: satosa.satosa_config.SATOSAConfig
:param config: satosa proxy config
:param co_name: CO name
:return: config with updated details for SAML metadata
"""
for co in self.config[self.KEY_CO]:
if co[self.KEY_ENCODEABLE_NAME] == co_name:
break
key = self.KEY_ORGANIZATION
if key in co:
if key not in config:
config[key] = {}
for org_key in self.KEY_ORGANIZATION_KEYS:
if org_key in co[key]:
config[key][org_key] = co[key][org_key]
key = self.KEY_CONTACT_PERSON
if key in co:
config[key] = co[key]
return config |
Parse the configuration for the names of the COs for which to
construct virtual IdPs.
:rtype: [str]
:return: list of CO names | def _co_names_from_config(self):
"""
Parse the configuration for the names of the COs for which to
construct virtual IdPs.
:rtype: [str]
:return: list of CO names
"""
co_names = [co[self.KEY_ENCODEABLE_NAME] for
co in self.config[self.KEY_CO]]
return co_names |
Create a virtual IdP to represent the CO.
:type context: The current context
:rtype: saml.server.Server
:param context:
:return: An idp server | def _create_co_virtual_idp(self, context):
"""
Create a virtual IdP to represent the CO.
:type context: The current context
:rtype: saml.server.Server
:param context:
:return: An idp server
"""
co_name = self._get_co_name(context)
context.decorate(self.KEY_CO_NAME, co_name)
# Verify that we are configured for this CO. If the CO was not
# configured most likely the endpoint used was not registered and
# SATOSA core code threw an exception before getting here, but we
# include this check in case later the regex used to register the
# endpoints is relaxed.
co_names = self._co_names_from_config()
if co_name not in co_names:
msg = "CO {} not in configured list of COs {}".format(co_name,
co_names)
satosa_logging(logger, logging.WARN, msg, context.state)
raise SATOSAError(msg)
# Make a copy of the general IdP config that we will then overwrite
# with mappings between SAML bindings and CO specific URL endpoints,
# and the entityID for the CO virtual IdP.
backend_name = context.target_backend
idp_config = copy.deepcopy(self.idp_config)
idp_config = self._add_endpoints_to_config(idp_config,
co_name,
backend_name)
idp_config = self._add_entity_id(idp_config, co_name)
# Use the overwritten IdP config to generate a pysaml2 config object
# and from it a server object.
pysaml2_idp_config = IdPConfig().load(idp_config,
metadata_construction=False)
server = Server(config=pysaml2_idp_config)
return server |
See super class satosa.frontends.base.FrontendModule#register_endpoints
Endpoints have the format
{base}/{backend}/{co_name}/{binding path}
For example the HTTP-Redirect binding request path will have the
format
{base}/{backend}/{co_name}/sso/redirect
:type providers: list[str]
:rtype list[(str, ((satosa.context.Context, Any) ->
satosa.response.Response, Any))] |
list[(str, (satosa.context.Context) ->
satosa.response.Response)]
:param backend_names: A list of backend names
:return: A list of url and endpoint function pairs | def _register_endpoints(self, backend_names):
"""
See super class satosa.frontends.base.FrontendModule#register_endpoints
Endpoints have the format
{base}/{backend}/{co_name}/{binding path}
For example the HTTP-Redirect binding request path will have the
format
{base}/{backend}/{co_name}/sso/redirect
:type providers: list[str]
:rtype list[(str, ((satosa.context.Context, Any) ->
satosa.response.Response, Any))] |
list[(str, (satosa.context.Context) ->
satosa.response.Response)]
:param backend_names: A list of backend names
:return: A list of url and endpoint function pairs
"""
# Create a regex pattern that will match any of the CO names. We
# escape special characters like '+' and '.' that are valid
# characters in an URL encoded string.
co_names = self._co_names_from_config()
url_encoded_co_names = [re.escape(quote_plus(name)) for name in
co_names]
co_name_pattern = "|".join(url_encoded_co_names)
# Create a regex pattern that will match any of the backend names.
backend_url_pattern = "|^".join(backend_names)
logger.debug("Input backend names are {}".format(backend_names))
logger.debug("Created backend regex '{}'".format(backend_url_pattern))
# Hold a list of tuples containing URL regex patterns and the callables
# that handle them.
url_to_callable_mappings = []
# Loop over IdP endpoint categories, e.g., single_sign_on_service.
for endpoint_category in self.endpoints:
logger.debug("Examining endpoint category {}".format(
endpoint_category))
# For each endpoint category loop of the bindings and their
# assigned endpoints.
for binding, endpoint in self.endpoints[endpoint_category].items():
logger.debug("Found binding {} and endpoint {}".format(binding,
endpoint))
# Parse out the path from the endpoint.
endpoint_path = urlparse(endpoint).path
logger.debug("Using path {}".format(endpoint_path))
# Use the backend URL pattern and the endpoint path to create
# a regex that will match and that includes a pattern for
# matching the URL encoded CO name.
regex_pattern = "(^{})/({})/{}".format(
backend_url_pattern,
co_name_pattern,
endpoint_path)
logger.debug("Created URL regex {}".format(regex_pattern))
# Map the regex pattern to a callable.
the_callable = functools.partial(self.handle_authn_request,
binding_in=binding)
logger.debug("Created callable {}".format(the_callable))
mapping = (regex_pattern, the_callable)
url_to_callable_mappings.append(mapping)
logger.debug("Adding mapping {}".format(mapping))
return url_to_callable_mappings |
:param get_state: Generates a state to be used in authentication call
:type get_state: Callable[[str, bytes], str]
:type context: satosa.context.Context
:type internal_request: satosa.internal.InternalData
:rtype satosa.response.Redirect | def start_auth(self, context, internal_request, get_state=stateID):
"""
:param get_state: Generates a state to be used in authentication call
:type get_state: Callable[[str, bytes], str]
:type context: satosa.context.Context
:type internal_request: satosa.internal.InternalData
:rtype satosa.response.Redirect
"""
request_args = dict(
client_id=self.config['client_config']['client_id'],
redirect_uri=self.redirect_url,
scope=' '.join(self.config['scope']), )
cis = self.consumer.construct_AuthorizationRequest(
request_args=request_args)
return Redirect(cis.request(self.consumer.authorization_endpoint)) |
Returns a SAML metadata entity (IdP) descriptor for a configured OAuth/OpenID Connect Backend.
:param entity_id: If entity_id is None, the id will be retrieved from the config
:type entity_id: str
:param config: The backend module config
:type config: dict[str, Any]
:return: metadata description
:rtype: satosa.metadata_creation.description.MetadataDescription | def get_metadata_desc_for_oauth_backend(entity_id, config):
"""
Returns a SAML metadata entity (IdP) descriptor for a configured OAuth/OpenID Connect Backend.
:param entity_id: If entity_id is None, the id will be retrieved from the config
:type entity_id: str
:param config: The backend module config
:type config: dict[str, Any]
:return: metadata description
:rtype: satosa.metadata_creation.description.MetadataDescription
"""
metadata_description = []
entity_id = urlsafe_b64encode(entity_id.encode("utf-8")).decode("utf-8")
description = MetadataDescription(entity_id)
if "entity_info" in config:
entity_info = config["entity_info"]
# Add contact person information
for contact_person in entity_info.get("contact_person", []):
person = ContactPersonDesc()
if 'contact_type' in contact_person:
person.contact_type = contact_person['contact_type']
for address in contact_person.get('email_address', []):
person.add_email_address(address)
if 'given_name' in contact_person:
person.given_name = contact_person['given_name']
if 'sur_name' in contact_person:
person.sur_name = contact_person['sur_name']
description.add_contact_person(person)
# Add organization information
if "organization" in entity_info:
organization_info = entity_info["organization"]
organization = OrganizationDesc()
for name_info in organization_info.get("organization_name", []):
organization.add_name(name_info[0], name_info[1])
for display_name_info in organization_info.get("organization_display_name", []):
organization.add_display_name(display_name_info[0], display_name_info[1])
for url_info in organization_info.get("organization_url", []):
organization.add_url(url_info[0], url_info[1])
description.organization = organization
# Add ui information
if "ui_info" in entity_info:
ui_info = entity_info["ui_info"]
ui_description = UIInfoDesc()
for desc in ui_info.get("description", []):
ui_description.add_description(desc[0], desc[1])
for name in ui_info.get("display_name", []):
ui_description.add_display_name(name[0], name[1])
for logo in ui_info.get("logo", []):
ui_description.add_logo(logo["image"], logo["width"], logo["height"], logo["lang"])
description.ui_info = ui_description
metadata_description.append(description)
return metadata_description |
See super class method satosa.backends.base#start_auth
:param get_state: Generates a state to be used in the authentication call.
:type get_state: Callable[[str, bytes], str]
:type context: satosa.context.Context
:type internal_request: satosa.internal.InternalData
:rtype satosa.response.Redirect | def start_auth(self, context, internal_request, get_state=stateID):
"""
See super class method satosa.backends.base#start_auth
:param get_state: Generates a state to be used in the authentication call.
:type get_state: Callable[[str, bytes], str]
:type context: satosa.context.Context
:type internal_request: satosa.internal.InternalData
:rtype satosa.response.Redirect
"""
oauth_state = get_state(self.config["base_url"], rndstr().encode())
state_data = dict(state=oauth_state)
context.state[self.name] = state_data
request_args = {"redirect_uri": self.redirect_url, "state": oauth_state}
cis = self.consumer.construct_AuthorizationRequest(request_args=request_args)
return Redirect(cis.request(self.consumer.authorization_endpoint)) |
Will verify the state and throw and error if the state is invalid.
:type resp: AuthorizationResponse
:type state_data: dict[str, str]
:type state: satosa.state.State
:param resp: The authorization response from the AS, created by pyoidc.
:param state_data: The state data for this backend.
:param state: The current state for the proxy and this backend.
Only used for raising errors. | def _verify_state(self, resp, state_data, state):
"""
Will verify the state and throw and error if the state is invalid.
:type resp: AuthorizationResponse
:type state_data: dict[str, str]
:type state: satosa.state.State
:param resp: The authorization response from the AS, created by pyoidc.
:param state_data: The state data for this backend.
:param state: The current state for the proxy and this backend.
Only used for raising errors.
"""
is_known_state = "state" in resp and "state" in state_data and resp["state"] == state_data["state"]
if not is_known_state:
received_state = resp.get("state", "")
satosa_logging(logger, logging.DEBUG,
"Missing or invalid state [%s] in response!" % received_state, state)
raise SATOSAAuthenticationError(state,
"Missing or invalid state [%s] in response!" %
received_state) |
Handles the authentication response from the AS.
:type context: satosa.context.Context
:rtype: satosa.response.Response
:param context: The context in SATOSA
:return: A SATOSA response. This method is only responsible to call the callback function
which generates the Response object. | def _authn_response(self, context):
"""
Handles the authentication response from the AS.
:type context: satosa.context.Context
:rtype: satosa.response.Response
:param context: The context in SATOSA
:return: A SATOSA response. This method is only responsible to call the callback function
which generates the Response object.
"""
state_data = context.state[self.name]
aresp = self.consumer.parse_response(AuthorizationResponse, info=json.dumps(context.request))
self._verify_state(aresp, state_data, context.state)
rargs = {"code": aresp["code"], "redirect_uri": self.redirect_url,
"state": state_data["state"]}
atresp = self.consumer.do_access_token_request(request_args=rargs, state=aresp["state"])
if "verify_accesstoken_state" not in self.config or self.config["verify_accesstoken_state"]:
self._verify_state(atresp, state_data, context.state)
user_info = self.user_information(atresp["access_token"])
internal_response = InternalData(auth_info=self.auth_info(context.request))
internal_response.attributes = self.converter.to_internal(self.external_type, user_info)
internal_response.subject_id = user_info[self.user_id_attr]
del context.state[self.name]
return self.auth_callback_func(context, internal_response) |
Will retrieve the user information data for the authenticated user.
:type access_token: str
:rtype: dict[str, str]
:param access_token: The access token to be used to retrieve the data.
:return: Dictionary with attribute name as key and attribute value as value. | def user_information(self, access_token):
"""
Will retrieve the user information data for the authenticated user.
:type access_token: str
:rtype: dict[str, str]
:param access_token: The access token to be used to retrieve the data.
:return: Dictionary with attribute name as key and attribute value as value.
"""
payload = {'access_token': access_token}
url = "https://graph.facebook.com/v2.5/me"
if self.config["fields"]:
payload["fields"] = ",".join(self.config["fields"])
resp = requests.get(url, params=payload)
data = json.loads(resp.text)
try:
picture_url = data["picture"]["data"]["url"]
data["picture"] = picture_url
except KeyError as e:
pass
return data |
Creates a SAML response.
:param binding: SAML response binding
:param http_args: http arguments
:return: response.Response | def make_saml_response(binding, http_args):
"""
Creates a SAML response.
:param binding: SAML response binding
:param http_args: http arguments
:return: response.Response
"""
if binding == BINDING_HTTP_REDIRECT:
headers = dict(http_args["headers"])
return SeeOther(str(headers["Location"]))
return Response(http_args["data"], headers=http_args["headers"]) |
Hashes a value together with a salt with the given hash algorithm.
:type salt: str
:type hash_alg: str
:type value: str
:param salt: hash salt
:param hash_alg: the hash algorithm to use (default: SHA512)
:param value: value to hash together with the salt
:return: hashed value | def hash_data(salt, value, hash_alg=None):
"""
Hashes a value together with a salt with the given hash algorithm.
:type salt: str
:type hash_alg: str
:type value: str
:param salt: hash salt
:param hash_alg: the hash algorithm to use (default: SHA512)
:param value: value to hash together with the salt
:return: hashed value
"""
hash_alg = hash_alg or 'sha512'
hasher = hashlib.new(hash_alg)
hasher.update(value.encode('utf-8'))
hasher.update(salt.encode('utf-8'))
value_hashed = hasher.hexdigest()
return value_hashed |
Returns a string of random ascii characters or digits
:type size: int
:type alphabet: str
:param size: The length of the string
:param alphabet: A string with characters.
:return: string | def rndstr(size=16, alphabet=""):
"""
Returns a string of random ascii characters or digits
:type size: int
:type alphabet: str
:param size: The length of the string
:param alphabet: A string with characters.
:return: string
"""
rng = random.SystemRandom()
if not alphabet:
alphabet = string.ascii_letters[0:52] + string.digits
return type(alphabet)().join(rng.choice(alphabet) for _ in range(size)) |
Construct and return a LDAP directory search filter value from the
candidate identifier.
Argument 'canidate' is a dictionary with one required key and
two optional keys:
key required value
--------------- -------- ---------------------------------
attribute_names Y list of identifier names
name_id_format N NameID format (string)
add_scope N "issuer_entityid" or other string
Argument 'data' is that object passed into the microservice
method process().
If the attribute_names list consists of more than one identifier
name then the values of the identifiers will be concatenated together
to create the filter value.
If one of the identifier names in the attribute_names is the string
'name_id' then the NameID value with format name_id_format
will be concatenated to the filter value.
If the add_scope key is present with value 'issuer_entityid' then the
entityID for the IdP will be concatenated to "scope" the value. If the
string is any other value it will be directly concatenated. | def _construct_filter_value(self, candidate, data):
"""
Construct and return a LDAP directory search filter value from the
candidate identifier.
Argument 'canidate' is a dictionary with one required key and
two optional keys:
key required value
--------------- -------- ---------------------------------
attribute_names Y list of identifier names
name_id_format N NameID format (string)
add_scope N "issuer_entityid" or other string
Argument 'data' is that object passed into the microservice
method process().
If the attribute_names list consists of more than one identifier
name then the values of the identifiers will be concatenated together
to create the filter value.
If one of the identifier names in the attribute_names is the string
'name_id' then the NameID value with format name_id_format
will be concatenated to the filter value.
If the add_scope key is present with value 'issuer_entityid' then the
entityID for the IdP will be concatenated to "scope" the value. If the
string is any other value it will be directly concatenated.
"""
context = self.context
attributes = data.attributes
satosa_logging(logger, logging.DEBUG, "Input attributes {}".format(attributes), context.state)
# Get the values configured list of identifier names for this candidate
# and substitute None if there are no values for a configured identifier.
values = []
for identifier_name in candidate['attribute_names']:
v = attributes.get(identifier_name, None)
if isinstance(v, list):
v = v[0]
values.append(v)
satosa_logging(logger, logging.DEBUG, "Found candidate values {}".format(values), context.state)
# If one of the configured identifier names is name_id then if there is also a configured
# name_id_format add the value for the NameID of that format if it was asserted by the IdP
# or else add the value None.
if 'name_id' in candidate['attribute_names']:
candidate_nameid_value = None
candidate_name_id_format = candidate.get('name_id_format')
name_id_value = data.subject_id
name_id_format = data.subject_type
if (
name_id_value
and candidate_name_id_format
and candidate_name_id_format == name_id_format
):
satosa_logging(logger, logging.DEBUG, "IdP asserted NameID {}".format(name_id_value), context.state)
candidate_nameid_value = name_id_value
# Only add the NameID value asserted by the IdP if it is not already
# in the list of values. This is necessary because some non-compliant IdPs
# have been known, for example, to assert the value of eduPersonPrincipalName
# in the value for SAML2 persistent NameID as well as asserting
# eduPersonPrincipalName.
if candidate_nameid_value not in values:
satosa_logging(logger, logging.DEBUG, "Added NameID {} to candidate values".format(candidate_nameid_value), context.state)
values.append(candidate_nameid_value)
else:
satosa_logging(logger, logging.WARN, "NameID {} value also asserted as attribute value".format(candidate_nameid_value), context.state)
# If no value was asserted by the IdP for one of the configured list of identifier names
# for this candidate then go onto the next candidate.
if None in values:
satosa_logging(logger, logging.DEBUG, "Candidate is missing value so skipping", context.state)
return None
# All values for the configured list of attribute names are present
# so we can create a value. Add a scope if configured
# to do so.
if 'add_scope' in candidate:
if candidate['add_scope'] == 'issuer_entityid':
scope = data.auth_info.issuer
else:
scope = candidate['add_scope']
satosa_logging(logger, logging.DEBUG, "Added scope {} to values".format(scope), context.state)
values.append(scope)
# Concatenate all values to create the filter value.
value = ''.join(values)
satosa_logging(logger, logging.DEBUG, "Constructed filter value {}".format(value), context.state)
return value |
Filter sensitive details like passwords from a configuration
dictionary. | def _filter_config(self, config, fields=None):
"""
Filter sensitive details like passwords from a configuration
dictionary.
"""
filter_fields_default = [
'bind_password',
'connection'
]
filter_fields = fields or filter_fields_default
return dict(
map(
lambda key: (key, '<hidden>' if key in filter_fields else config[key]),
config.keys()
)
) |
Use the input configuration to instantiate and return
a ldap3 Connection object. | def _ldap_connection_factory(self, config):
"""
Use the input configuration to instantiate and return
a ldap3 Connection object.
"""
ldap_url = config['ldap_url']
bind_dn = config['bind_dn']
bind_password = config['bind_password']
if not ldap_url:
raise LdapAttributeStoreError("ldap_url is not configured")
if not bind_dn:
raise LdapAttributeStoreError("bind_dn is not configured")
if not bind_password:
raise LdapAttributeStoreError("bind_password is not configured")
pool_size = config['pool_size']
pool_keepalive = config['pool_keepalive']
server = ldap3.Server(config['ldap_url'])
satosa_logging(logger, logging.DEBUG, "Creating a new LDAP connection", None)
satosa_logging(logger, logging.DEBUG, "Using LDAP URL {}".format(ldap_url), None)
satosa_logging(logger, logging.DEBUG, "Using bind DN {}".format(bind_dn), None)
satosa_logging(logger, logging.DEBUG, "Using pool size {}".format(pool_size), None)
satosa_logging(logger, logging.DEBUG, "Using pool keep alive {}".format(pool_keepalive), None)
try:
connection = ldap3.Connection(
server,
bind_dn,
bind_password,
auto_bind=True,
client_strategy=ldap3.REUSABLE,
pool_size=pool_size,
pool_keepalive=pool_keepalive
)
except LDAPException as e:
msg = "Caught exception when connecting to LDAP server: {}".format(e)
satosa_logging(logger, logging.ERROR, msg, None)
raise LdapAttributeStoreError(msg)
satosa_logging(logger, logging.DEBUG, "Successfully connected to LDAP server", None)
return connection |
Use a record found in LDAP to populate attributes. | def _populate_attributes(self, config, record, context, data):
"""
Use a record found in LDAP to populate attributes.
"""
search_return_attributes = config['search_return_attributes']
for attr in search_return_attributes.keys():
if attr in record["attributes"]:
if record["attributes"][attr]:
data.attributes[search_return_attributes[attr]] = record["attributes"][attr]
satosa_logging(
logger,
logging.DEBUG,
"Setting internal attribute {} with values {}".format(
search_return_attributes[attr],
record["attributes"][attr]
),
context.state
)
else:
satosa_logging(
logger,
logging.DEBUG,
"Not setting internal attribute {} because value {} is null or empty".format(
search_return_attributes[attr],
record["attributes"][attr]
),
context.state
) |
Use a record found in LDAP to populate input for
NameID generation. | def _populate_input_for_name_id(self, config, record, context, data):
"""
Use a record found in LDAP to populate input for
NameID generation.
"""
user_id = ""
user_id_from_attrs = config['user_id_from_attrs']
for attr in user_id_from_attrs:
if attr in record["attributes"]:
value = record["attributes"][attr]
if isinstance(value, list):
# Use a default sort to ensure some predictability since the
# LDAP directory server may return multi-valued attributes
# in any order.
value.sort()
user_id += "".join(value)
satosa_logging(
logger,
logging.DEBUG,
"Added attribute {} with values {} to input for NameID".format(attr, value),
context.state
)
else:
user_id += value
satosa_logging(
logger,
logging.DEBUG,
"Added attribute {} with value {} to input for NameID".format(attr, value),
context.state
)
if not user_id:
satosa_logging(
logger,
logging.WARNING,
"Input for NameID is empty so not overriding default",
context.state
)
else:
data.subject_id = user_id
satosa_logging(
logger,
logging.DEBUG,
"Input for NameID is {}".format(data.subject_id),
context.state
) |
Default interface for microservices. Process the input data for
the input context. | def process(self, context, data):
"""
Default interface for microservices. Process the input data for
the input context.
"""
self.context = context
# Find the entityID for the SP that initiated the flow.
try:
sp_entity_id = context.state.state_dict['SATOSA_BASE']['requester']
except KeyError as err:
satosa_logging(logger, logging.ERROR, "Unable to determine the entityID for the SP requester", context.state)
return super().process(context, data)
satosa_logging(logger, logging.DEBUG, "entityID for the SP requester is {}".format(sp_entity_id), context.state)
# Get the configuration for the SP.
if sp_entity_id in self.config.keys():
config = self.config[sp_entity_id]
else:
config = self.config['default']
satosa_logging(logger, logging.DEBUG, "Using config {}".format(self._filter_config(config)), context.state)
# Ignore this SP entirely if so configured.
if config['ignore']:
satosa_logging(logger, logging.INFO, "Ignoring SP {}".format(sp_entity_id), None)
return super().process(context, data)
# The list of values for the LDAP search filters that will be tried in order to find the
# LDAP directory record for the user.
filter_values = []
# Loop over the configured list of identifiers from the IdP to consider and find
# asserted values to construct the ordered list of values for the LDAP search filters.
for candidate in config['ordered_identifier_candidates']:
value = self._construct_filter_value(candidate, data)
# If we have constructed a non empty value then add it as the next filter value
# to use when searching for the user record.
if value:
filter_values.append(value)
satosa_logging(logger, logging.DEBUG, "Added search filter value {} to list of search filters".format(value), context.state)
# Initialize an empty LDAP record. The first LDAP record found using the ordered
# list of search filter values will be the record used.
record = None
try:
connection = config['connection']
for filter_val in filter_values:
if record:
break
search_filter = '({0}={1})'.format(config['ldap_identifier_attribute'], filter_val)
satosa_logging(logger, logging.DEBUG, "Constructed search filter {}".format(search_filter), context.state)
satosa_logging(logger, logging.DEBUG, "Querying LDAP server...", context.state)
message_id = connection.search(config['search_base'], search_filter, attributes=config['search_return_attributes'].keys())
responses = connection.get_response(message_id)[0]
satosa_logging(logger, logging.DEBUG, "Done querying LDAP server", context.state)
satosa_logging(logger, logging.DEBUG, "LDAP server returned {} records".format(len(responses)), context.state)
# for now consider only the first record found (if any)
if len(responses) > 0:
if len(responses) > 1:
satosa_logging(logger, logging.WARN, "LDAP server returned {} records using search filter value {}".format(len(responses), filter_val), context.state)
record = responses[0]
break
except LDAPException as err:
satosa_logging(logger, logging.ERROR, "Caught LDAP exception: {}".format(err), context.state)
except LdapAttributeStoreError as err:
satosa_logging(logger, logging.ERROR, "Caught LDAP Attribute Store exception: {}".format(err), context.state)
except Exception as err:
satosa_logging(logger, logging.ERROR, "Caught unhandled exception: {}".format(err), context.state)
else:
err = None
finally:
if err:
return super().process(context, data)
# Before using a found record, if any, to populate attributes
# clear any attributes incoming to this microservice if so configured.
if config['clear_input_attributes']:
satosa_logging(logger, logging.DEBUG, "Clearing values for these input attributes: {}".format(data.attributes), context.state)
data.attributes = {}
# Use a found record, if any, to populate attributes and input for NameID
if record:
satosa_logging(logger, logging.DEBUG, "Using record with DN {}".format(record["dn"]), context.state)
satosa_logging(logger, logging.DEBUG, "Record with DN {} has attributes {}".format(record["dn"], record["attributes"]), context.state)
# Populate attributes as configured.
self._populate_attributes(config, record, context, data)
# Populate input for NameID if configured. SATOSA core does the hashing of input
# to create a persistent NameID.
self._populate_input_for_name_id(config, record, context, data)
else:
satosa_logging(logger, logging.WARN, "No record found in LDAP so no attributes will be added", context.state)
on_ldap_search_result_empty = config['on_ldap_search_result_empty']
if on_ldap_search_result_empty:
# Redirect to the configured URL with
# the entityIDs for the target SP and IdP used by the user
# as query string parameters (URL encoded).
encoded_sp_entity_id = urllib.parse.quote_plus(sp_entity_id)
encoded_idp_entity_id = urllib.parse.quote_plus(data.auth_info.issuer)
url = "{}?sp={}&idp={}".format(on_ldap_search_result_empty, encoded_sp_entity_id, encoded_idp_entity_id)
satosa_logging(logger, logging.INFO, "Redirecting to {}".format(url), context.state)
return Redirect(url)
satosa_logging(logger, logging.DEBUG, "Returning data.attributes {}".format(str(data.attributes)), context.state)
return super().process(context, data) |
:type data: dict[str, str]
:rtype: satosa.internal.AuthenticationInformation
:param data: A dict representation of an AuthenticationInformation object
:return: An AuthenticationInformation object | def from_dict(cls, data):
"""
:type data: dict[str, str]
:rtype: satosa.internal.AuthenticationInformation
:param data: A dict representation of an AuthenticationInformation object
:return: An AuthenticationInformation object
"""
return cls(
auth_class_ref=data.get("auth_class_ref"),
timestamp=data.get("timestamp"),
issuer=data.get("issuer"),
) |
Converts an InternalData object to a dict
:rtype: dict[str, str]
:return: A dict representation of the object | def to_dict(self):
"""
Converts an InternalData object to a dict
:rtype: dict[str, str]
:return: A dict representation of the object
"""
data = {
"auth_info": self.auth_info.to_dict(),
"requester": self.requester,
"requester_name": self.requester_name,
"attributes": self.attributes,
"subject_id": self.subject_id,
"subject_type": self.subject_type,
}
data.update(
{
"user_id": self.subject_id,
"hash_type": self.subject_type,
"name_id": self.subject_id,
"approved_attributes": self.attributes,
}
)
return data |
:type data: dict[str, str]
:rtype: satosa.internal.InternalData
:param data: A dict representation of an InternalData object
:return: An InternalData object | def from_dict(cls, data):
"""
:type data: dict[str, str]
:rtype: satosa.internal.InternalData
:param data: A dict representation of an InternalData object
:return: An InternalData object
"""
auth_info = AuthenticationInformation.from_dict(
data.get("auth_info", {})
)
instance = cls(
auth_info=auth_info,
requester=data.get("requester"),
requester_name=data.get("requester_name"),
subject_id=data.get("subject_id"),
subject_type=data.get("subject_type"),
attributes=data.get("attributes"),
user_id=data.get("user_id"),
user_id_hash_type=data.get("hash_type"),
name_id=data.get("name_id"),
approved_attributes=data.get("approved_attributes"),
)
return instance |
Check that the configuration contains all necessary keys.
:type conf: dict
:rtype: None
:raise SATOSAConfigurationError: if the configuration is incorrect
:param conf: config to verify
:return: None | def _verify_dict(self, conf):
"""
Check that the configuration contains all necessary keys.
:type conf: dict
:rtype: None
:raise SATOSAConfigurationError: if the configuration is incorrect
:param conf: config to verify
:return: None
"""
if not conf:
raise SATOSAConfigurationError("Missing configuration or unknown format")
for key in SATOSAConfig.mandatory_dict_keys:
if key not in conf:
raise SATOSAConfigurationError("Missing key '%s' in config" % key)
for key in SATOSAConfig.sensitive_dict_keys:
if key not in conf and "SATOSA_{key}".format(key=key) not in os.environ:
raise SATOSAConfigurationError("Missing key '%s' from config and ENVIRONMENT" % key) |
Load config from yaml file or string
:type config_file: str
:rtype: dict
:param config_file: config to load. Can be file path or yaml string
:return: Loaded config | def _load_yaml(self, config_file):
"""
Load config from yaml file or string
:type config_file: str
:rtype: dict
:param config_file: config to load. Can be file path or yaml string
:return: Loaded config
"""
try:
with open(config_file) as f:
return yaml.safe_load(f.read())
except yaml.YAMLError as exc:
logger.error("Could not parse config as YAML: {}", str(exc))
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
logger.error("Error position: (%s:%s)" % (mark.line + 1, mark.column + 1))
except IOError as e:
logger.debug("Could not open config file: {}", str(e))
return None |
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log | def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
if state is None:
session_id = "UNKNOWN"
else:
try:
session_id = state[LOGGER_STATE_KEY]
except KeyError:
session_id = uuid4().urn
state[LOGGER_STATE_KEY] = session_id
logger.log(level, "[{id}] {msg}".format(id=session_id, msg=message), **kwargs) |
Will modify the context.target_backend attribute based on the requester identifier.
:param context: request context
:param data: the internal request | def process(self, context, data):
"""
Will modify the context.target_backend attribute based on the requester identifier.
:param context: request context
:param data: the internal request
"""
context.target_backend = self.requester_mapping[data.requester]
return super().process(context, data) |
Endpoint for handling consent service response
:type context: satosa.context.Context
:rtype: satosa.response.Response
:param context: response context
:return: response | def _handle_consent_response(self, context):
"""
Endpoint for handling consent service response
:type context: satosa.context.Context
:rtype: satosa.response.Response
:param context: response context
:return: response
"""
consent_state = context.state[STATE_KEY]
saved_resp = consent_state["internal_resp"]
internal_response = InternalData.from_dict(saved_resp)
hash_id = self._get_consent_id(internal_response.requester, internal_response.subject_id,
internal_response.attributes)
try:
consent_attributes = self._verify_consent(hash_id)
except ConnectionError as e:
satosa_logging(logger, logging.ERROR,
"Consent service is not reachable, no consent given.", context.state)
# Send an internal_response without any attributes
consent_attributes = None
if consent_attributes is None:
satosa_logging(logger, logging.INFO, "Consent was NOT given", context.state)
# If consent was not given, then don't send any attributes
consent_attributes = []
else:
satosa_logging(logger, logging.INFO, "Consent was given", context.state)
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_attributes)
return self._end_consent(context, internal_response) |
Manage consent and attribute filtering
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response | def process(self, context, internal_response):
"""
Manage consent and attribute filtering
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response
"""
consent_state = context.state[STATE_KEY]
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_state["filter"])
id_hash = self._get_consent_id(internal_response.requester, internal_response.subject_id,
internal_response.attributes)
try:
# Check if consent is already given
consent_attributes = self._verify_consent(id_hash)
except requests.exceptions.ConnectionError as e:
satosa_logging(logger, logging.ERROR,
"Consent service is not reachable, no consent given.", context.state)
# Send an internal_response without any attributes
internal_response.attributes = {}
return self._end_consent(context, internal_response)
# Previous consent was given
if consent_attributes is not None:
satosa_logging(logger, logging.DEBUG, "Previous consent was given", context.state)
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_attributes)
return self._end_consent(context, internal_response)
# No previous consent, request consent by user
return self._approve_new_consent(context, internal_response, id_hash) |
Get a hashed id based on requester, user id and filtered attributes
:type requester: str
:type user_id: str
:type filtered_attr: dict[str, str]
:param requester: The calling requester
:param user_id: The authorized user id
:param filtered_attr: a list containing all attributes to be sent
:return: an id | def _get_consent_id(self, requester, user_id, filtered_attr):
"""
Get a hashed id based on requester, user id and filtered attributes
:type requester: str
:type user_id: str
:type filtered_attr: dict[str, str]
:param requester: The calling requester
:param user_id: The authorized user id
:param filtered_attr: a list containing all attributes to be sent
:return: an id
"""
filtered_attr_key_list = sorted(filtered_attr.keys())
hash_str = ""
for key in filtered_attr_key_list:
_hash_value = "".join(sorted(filtered_attr[key]))
hash_str += key + _hash_value
id_string = "%s%s%s" % (requester, user_id, hash_str)
return urlsafe_b64encode(hashlib.sha512(id_string.encode("utf-8")).hexdigest().encode("utf-8")).decode("utf-8") |
Register a request at the consent service
:type consent_args: dict
:rtype: str
:param consent_args: All necessary parameters for the consent request
:return: Ticket received from the consent service | def _consent_registration(self, consent_args):
"""
Register a request at the consent service
:type consent_args: dict
:rtype: str
:param consent_args: All necessary parameters for the consent request
:return: Ticket received from the consent service
"""
jws = JWS(json.dumps(consent_args), alg=self.signing_key.alg).sign_compact([self.signing_key])
request = "{}/creq/{}".format(self.api_url, jws)
res = requests.get(request)
if res.status_code != 200:
raise UnexpectedResponseError("Consent service error: %s %s", res.status_code, res.text)
return res.text |
Connects to the consent service using the REST api and checks if the user has given consent
:type consent_id: str
:rtype: Optional[List[str]]
:param consent_id: An id associated to the authenticated user, the calling requester and
attributes to be sent.
:return: list attributes given which have been approved by user consent | def _verify_consent(self, consent_id):
"""
Connects to the consent service using the REST api and checks if the user has given consent
:type consent_id: str
:rtype: Optional[List[str]]
:param consent_id: An id associated to the authenticated user, the calling requester and
attributes to be sent.
:return: list attributes given which have been approved by user consent
"""
request = "{}/verify/{}".format(self.api_url, consent_id)
res = requests.get(request)
if res.status_code == 200:
return json.loads(res.text)
return None |
Clear the state for consent and end the consent step
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response | def _end_consent(self, context, internal_response):
"""
Clear the state for consent and end the consent step
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response
"""
del context.state[STATE_KEY]
return super().process(context, internal_response) |
Construct and return a primary identifier value from the
data asserted by the IdP using the ordered list of candidates
from the configuration. | def constructPrimaryIdentifier(self, data, ordered_identifier_candidates):
"""
Construct and return a primary identifier value from the
data asserted by the IdP using the ordered list of candidates
from the configuration.
"""
logprefix = PrimaryIdentifier.logprefix
context = self.context
attributes = data.attributes
satosa_logging(logger, logging.DEBUG, "{} Input attributes {}".format(logprefix, attributes), context.state)
value = None
for candidate in ordered_identifier_candidates:
satosa_logging(logger, logging.DEBUG, "{} Considering candidate {}".format(logprefix, candidate), context.state)
# Get the values asserted by the IdP for the configured list of attribute names for this candidate
# and substitute None if the IdP did not assert any value for a configured attribute.
values = [ attributes.get(attribute_name, [None])[0] for attribute_name in candidate['attribute_names'] ]
satosa_logging(logger, logging.DEBUG, "{} Found candidate values {}".format(logprefix, values), context.state)
# If one of the configured attribute names is name_id then if there is also a configured
# name_id_format add the value for the NameID of that format if it was asserted by the IdP
# or else add the value None.
if 'name_id' in candidate['attribute_names']:
candidate_nameid_value = None
candidate_nameid_value = None
candidate_name_id_format = candidate.get('name_id_format')
name_id_value = data.subject_id
name_id_format = data.subject_type
if (
name_id_value
and candidate_name_id_format
and candidate_name_id_format == name_id_format
):
satosa_logging(logger, logging.DEBUG, "{} IdP asserted NameID {}".format(logprefix, name_id_value), context.state)
candidate_nameid_value = name_id_value
# Only add the NameID value asserted by the IdP if it is not already
# in the list of values. This is necessary because some non-compliant IdPs
# have been known, for example, to assert the value of eduPersonPrincipalName
# in the value for SAML2 persistent NameID as well as asserting
# eduPersonPrincipalName.
if candidate_nameid_value not in values:
satosa_logging(logger, logging.DEBUG, "{} Added NameID {} to candidate values".format(logprefix, candidate_nameid_value), context.state)
values.append(candidate_nameid_value)
else:
satosa_logging(logger, logging.WARN, "{} NameID {} value also asserted as attribute value".format(logprefix, candidate_nameid_value), context.state)
# If no value was asserted by the IdP for one of the configured list of attribute names
# for this candidate then go onto the next candidate.
if None in values:
satosa_logging(logger, logging.DEBUG, "{} Candidate is missing value so skipping".format(logprefix), context.state)
continue
# All values for the configured list of attribute names are present
# so we can create a primary identifer. Add a scope if configured
# to do so.
if 'add_scope' in candidate:
if candidate['add_scope'] == 'issuer_entityid':
scope = data.auth_info.issuer
else:
scope = candidate['add_scope']
satosa_logging(logger, logging.DEBUG, "{} Added scope {} to values".format(logprefix, scope), context.state)
values.append(scope)
# Concatenate all values to create the primary identifier.
value = ''.join(values)
break
return value |
Saves a state to a cookie
:type state: satosa.state.State
:type name: str
:type path: str
:type encryption_key: str
:rtype: http.cookies.SimpleCookie
:param state: The state to save
:param name: Name identifier of the cookie
:param path: Endpoint path the cookie will be associated to
:param encryption_key: Key to encrypt the state information
:return: A cookie | def state_to_cookie(state, name, path, encryption_key):
"""
Saves a state to a cookie
:type state: satosa.state.State
:type name: str
:type path: str
:type encryption_key: str
:rtype: http.cookies.SimpleCookie
:param state: The state to save
:param name: Name identifier of the cookie
:param path: Endpoint path the cookie will be associated to
:param encryption_key: Key to encrypt the state information
:return: A cookie
"""
cookie_data = "" if state.delete else state.urlstate(encryption_key)
max_age = 0 if state.delete else STATE_COOKIE_MAX_AGE
satosa_logging(logger, logging.DEBUG,
"Saving state as cookie, secure: %s, max-age: %s, path: %s" %
(STATE_COOKIE_SECURE, STATE_COOKIE_MAX_AGE, path), state)
cookie = SimpleCookie()
cookie[name] = cookie_data
cookie[name]["secure"] = STATE_COOKIE_SECURE
cookie[name]["path"] = path
cookie[name]["max-age"] = max_age
return cookie |
Loads a state from a cookie
:type cookie_str: str
:type name: str
:type encryption_key: str
:rtype: satosa.state.State
:param cookie_str: string representation of cookie/s
:param name: Name identifier of the cookie
:param encryption_key: Key to encrypt the state information
:return: A state | def cookie_to_state(cookie_str, name, encryption_key):
"""
Loads a state from a cookie
:type cookie_str: str
:type name: str
:type encryption_key: str
:rtype: satosa.state.State
:param cookie_str: string representation of cookie/s
:param name: Name identifier of the cookie
:param encryption_key: Key to encrypt the state information
:return: A state
"""
try:
cookie = SimpleCookie(cookie_str)
state = State(cookie[name].value, encryption_key)
except KeyError as e:
msg_tmpl = 'No cookie named {name} in {data}'
msg = msg_tmpl.format(name=name, data=cookie_str)
logger.exception(msg)
raise SATOSAStateError(msg) from e
except ValueError as e:
msg_tmpl = 'Failed to process {name} from {data}'
msg = msg_tmpl.format(name=name, data=cookie_str)
logger.exception(msg)
raise SATOSAStateError(msg) from e
else:
msg_tmpl = 'Loading state from cookie {data}'
msg = msg_tmpl.format(data=cookie_str)
satosa_logging(logger, logging.DEBUG, msg, state)
return state |
Encryptes the parameter raw.
:type raw: bytes
:rtype: str
:param: bytes to be encrypted.
:return: A base 64 encoded string. | def encrypt(self, raw):
"""
Encryptes the parameter raw.
:type raw: bytes
:rtype: str
:param: bytes to be encrypted.
:return: A base 64 encoded string.
"""
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.urlsafe_b64encode(iv + cipher.encrypt(raw)) |
Will padd the param to be of the correct length for the encryption alg.
:type b: bytes
:rtype: bytes | def _pad(self, b):
"""
Will padd the param to be of the correct length for the encryption alg.
:type b: bytes
:rtype: bytes
"""
return b + (self.bs - len(b) % self.bs) * chr(self.bs - len(b) % self.bs).encode("UTF-8") |
Will return a url safe representation of the state.
:type encryption_key: Key used for encryption.
:rtype: str
:return: Url representation av of the state. | def urlstate(self, encryption_key):
"""
Will return a url safe representation of the state.
:type encryption_key: Key used for encryption.
:rtype: str
:return: Url representation av of the state.
"""
lzma = LZMACompressor()
urlstate_data = json.dumps(self._state_dict)
urlstate_data = lzma.compress(urlstate_data.encode("UTF-8"))
urlstate_data += lzma.flush()
urlstate_data = _AESCipher(encryption_key).encrypt(urlstate_data)
lzma = LZMACompressor()
urlstate_data = lzma.compress(urlstate_data)
urlstate_data += lzma.flush()
urlstate_data = base64.urlsafe_b64encode(urlstate_data)
return urlstate_data.decode("utf-8") |
Returns a deepcopy of the state
:rtype: satosa.state.State
:return: A copy of the state | def copy(self):
"""
Returns a deepcopy of the state
:rtype: satosa.state.State
:return: A copy of the state
"""
state_copy = State()
state_copy._state_dict = copy.deepcopy(self._state_dict)
return state_copy |
Translate pySAML2 name format to satosa format
:type name_format: str
:rtype: satosa.internal_data.UserIdHashType
:param name_format: SAML2 name format
:return: satosa format | def saml_name_id_format_to_hash_type(name_format):
"""
Translate pySAML2 name format to satosa format
:type name_format: str
:rtype: satosa.internal_data.UserIdHashType
:param name_format: SAML2 name format
:return: satosa format
"""
msg = "saml_name_id_format_to_hash_type is deprecated and will be removed."
_warnings.warn(msg, DeprecationWarning)
name_id_format_to_hash_type = {
NAMEID_FORMAT_TRANSIENT: UserIdHashType.transient,
NAMEID_FORMAT_PERSISTENT: UserIdHashType.persistent,
NAMEID_FORMAT_EMAILADDRESS: UserIdHashType.emailaddress,
NAMEID_FORMAT_UNSPECIFIED: UserIdHashType.unspecified,
}
return name_id_format_to_hash_type.get(
name_format, UserIdHashType.transient
) |
Translate satosa format to pySAML2 name format
:type hash_type: satosa.internal_data.UserIdHashType
:rtype: str
:param hash_type: satosa format
:return: pySAML2 name format | def hash_type_to_saml_name_id_format(hash_type):
"""
Translate satosa format to pySAML2 name format
:type hash_type: satosa.internal_data.UserIdHashType
:rtype: str
:param hash_type: satosa format
:return: pySAML2 name format
"""
msg = "hash_type_to_saml_name_id_format is deprecated and will be removed."
_warnings.warn(msg, DeprecationWarning)
hash_type_to_name_id_format = {
UserIdHashType.transient: NAMEID_FORMAT_TRANSIENT,
UserIdHashType.persistent: NAMEID_FORMAT_PERSISTENT,
UserIdHashType.emailaddress: NAMEID_FORMAT_EMAILADDRESS,
UserIdHashType.unspecified: NAMEID_FORMAT_UNSPECIFIED,
}
return hash_type_to_name_id_format.get(hash_type, NAMEID_FORMAT_PERSISTENT) |