sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def read(path, mode='tsv'):
''' Helper function to read Document in TTL-TXT format (i.e. ${docname}_*.txt)
E.g. read('~/data/myfile') is the same as Document('myfile', '~/data/').read()
'''
if mode == 'tsv':
return TxtReader.from_path(path).read()
elif mode == 'json':
return read_json(path)
else:
raise Exception("Invalid mode - [{}] was provided".format(mode)) | Helper function to read Document in TTL-TXT format (i.e. ${docname}_*.txt)
E.g. read('~/data/myfile') is the same as Document('myfile', '~/data/').read() | entailment |
def write(path, doc, mode=MODE_TSV, **kwargs):
''' Helper function to write doc to TTL-TXT format '''
if mode == MODE_TSV:
with TxtWriter.from_path(path) as writer:
writer.write_doc(doc)
elif mode == MODE_JSON:
write_json(path, doc, **kwargs) | Helper function to write doc to TTL-TXT format | entailment |
def tcmap(self):
''' Create a tokens-concepts map '''
tcmap = dd(list)
for concept in self.__concept_map.values():
for w in concept.tokens:
tcmap[w].append(concept)
return tcmap | Create a tokens-concepts map | entailment |
def msw(self):
''' Return a generator of tokens with more than one sense. '''
return (t for t, c in self.tcmap().items() if len(c) > 1) | Return a generator of tokens with more than one sense. | entailment |
def surface(self, tag):
''' Get surface string that is associated with a tag object '''
if tag.cfrom is not None and tag.cto is not None and tag.cfrom >= 0 and tag.cto >= 0:
return self.text[tag.cfrom:tag.cto]
else:
return '' | Get surface string that is associated with a tag object | entailment |
def new_tag(self, label, cfrom=-1, cto=-1, tagtype='', **kwargs):
''' Create a sentence-level tag '''
tag_obj = Tag(label, cfrom, cto, tagtype=tagtype, **kwargs)
return self.add_tag(tag_obj) | Create a sentence-level tag | entailment |
def get_tag(self, tagtype):
''' Get the first tag of a particular type'''
for tag in self.__tags:
if tag.tagtype == tagtype:
return tag
return None | Get the first tag of a particular type | entailment |
def get_tags(self, tagtype):
''' Get all tags of a type '''
return [t for t in self.__tags if t.tagtype == tagtype] | Get all tags of a type | entailment |
def add_token_object(self, token):
''' Add a token object into this sentence '''
token.sent = self # take ownership of given token
self.__tokens.append(token)
return token | Add a token object into this sentence | entailment |
def new_concept(self, tag, clemma="", tokens=None, cidx=None, **kwargs):
''' Create a new concept object and add it to concept list
tokens can be a list of Token objects or token indices
'''
if cidx is None:
cidx = self.new_concept_id()
if tokens:
tokens = (t if isinstance(t, Token) else self[t] for t in tokens)
c = Concept(cidx=cidx, tag=tag, clemma=clemma, sent=self, tokens=tokens, **kwargs)
return self.add_concept(c) | Create a new concept object and add it to concept list
tokens can be a list of Token objects or token indices | entailment |
def add_concept(self, concept_obj):
''' Add a concept to current concept list '''
if concept_obj is None:
raise Exception("Concept object cannot be None")
elif concept_obj in self.__concepts:
raise Exception("Concept object is already inside")
elif concept_obj.cidx in self.__concept_map:
raise Exception("Duplicated concept ID ({})".format(concept_obj.cidx))
self.__concepts.append(concept_obj)
self.__concept_map[concept_obj.cidx] = concept_obj
concept_obj.sent = self
return concept_obj | Add a concept to current concept list | entailment |
def concept(self, cid, **kwargs):
''' Get concept by concept ID '''
if cid not in self.__concept_map:
if 'default' in kwargs:
return kwargs['default']
else:
raise KeyError("Invalid cid")
else:
return self.__concept_map[cid] | Get concept by concept ID | entailment |
def import_tokens(self, tokens, import_hook=None, ignorecase=True):
''' Import a list of string as tokens '''
text = self.text.lower() if ignorecase else self.text
has_hooker = import_hook and callable(import_hook)
cfrom = 0
if self.__tokens:
for tk in self.__tokens:
if tk.cfrom and tk.cfrom > cfrom:
cfrom = tk.cfrom
for token in tokens:
if has_hooker:
import_hook(token)
to_find = token.lower() if ignorecase else token
start = text.find(to_find, cfrom)
# stanford parser
if to_find == '``' or to_find == "''":
start_dq = text.find('"', cfrom)
if start_dq > -1 and (start == -1 or start > start_dq):
to_find = '"'
start = start_dq
if to_find == '`' or to_find == "'":
start_dq = text.find("'", cfrom)
if start_dq > -1 and (start == -1 or start > start_dq):
to_find = "'"
start = start_dq
if start == -1:
raise LookupError('Cannot find token `{t}` in sent `{s}`({l}) from {i} ({p})'.format(t=token, s=self.text, l=len(self.text), i=cfrom, p=self.text[cfrom:cfrom + 20]))
cfrom = start
cto = cfrom + len(to_find)
self.new_token(token, cfrom, cto)
cfrom = cto - 1 | Import a list of string as tokens | entailment |
def tag_map(self):
''' Build a map from tagtype to list of tags '''
tm = dd(list)
for tag in self.__tags:
tm[tag.tagtype].append(tag)
return tm | Build a map from tagtype to list of tags | entailment |
def find(self, tagtype, **kwargs):
'''Get the first tag with a type in this token '''
for t in self.__tags:
if t.tagtype == tagtype:
return t
if 'default' in kwargs:
return kwargs['default']
else:
raise LookupError("Token {} is not tagged with the speficied tagtype ({})".format(self, tagtype)) | Get the first tag with a type in this token | entailment |
def find_all(self, tagtype):
''' Find all token-level tags with the specified tagtype '''
return [t for t in self.__tags if t.tagtype == tagtype] | Find all token-level tags with the specified tagtype | entailment |
def new_tag(self, label, cfrom=None, cto=None, tagtype=None, **kwargs):
''' Create a new tag on this token '''
if cfrom is None:
cfrom = self.cfrom
if cto is None:
cto = self.cto
tag = Tag(label=label, cfrom=cfrom, cto=cto, tagtype=tagtype, **kwargs)
return self.add_tag(tag) | Create a new tag on this token | entailment |
def get(self, sent_id, **kwargs):
''' If sent_id exists, remove and return the associated sentence object else return default.
If no default is provided, KeyError will be raised.'''
if sent_id is not None and not isinstance(sent_id, int):
sent_id = int(sent_id)
if sent_id is None or not self.has_id(sent_id):
if 'default' in kwargs:
return kwargs['default']
else:
raise KeyError("Invalid sentence ID ({})".format(sent_id))
return self.__sent_map[sent_id] | If sent_id exists, remove and return the associated sentence object else return default.
If no default is provided, KeyError will be raised. | entailment |
def add_sent(self, sent_obj):
''' Add a ttl.Sentence object to this document '''
if sent_obj is None:
raise Exception("Sentence object cannot be None")
elif sent_obj.ID is None:
# if sentID is None, create a new ID
sent_obj.ID = next(self.__idgen)
elif self.has_id(sent_obj.ID):
raise Exception("Sentence ID {} exists".format(sent_obj.ID))
self.__sent_map[sent_obj.ID] = sent_obj
self.__sents.append(sent_obj)
return sent_obj | Add a ttl.Sentence object to this document | entailment |
def new_sent(self, text, ID=None, **kwargs):
''' Create a new sentence and add it to this Document '''
if ID is None:
ID = next(self.__idgen)
return self.add_sent(Sentence(text, ID=ID, **kwargs)) | Create a new sentence and add it to this Document | entailment |
def pop(self, sent_id, **kwargs):
''' If sent_id exists, remove and return the associated sentence object else return default.
If no default is provided, KeyError will be raised.'''
if sent_id is not None and not isinstance(sent_id, int):
sent_id = int(sent_id)
if not self.has_id(sent_id):
if 'default' in kwargs:
return kwargs['default']
else:
raise KeyError("Sentence ID {} does not exist".format(sent_id))
else:
# sentence exists ...
sent_obj = self.get(sent_id)
self.__sent_map.pop(sent_id)
self.__sents.remove(sent_obj)
return sent_obj | If sent_id exists, remove and return the associated sentence object else return default.
If no default is provided, KeyError will be raised. | entailment |
def read(self):
''' Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) '''
warnings.warn("Document.read() is deprecated and will be removed in near future.", DeprecationWarning)
with TxtReader.from_doc(self) as reader:
reader.read(self)
return self | Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) | entailment |
def read_ttl(path):
''' Helper function to read Document in TTL-TXT format (i.e. ${docname}_*.txt)
E.g. Document.read_ttl('~/data/myfile') is the same as Document('myfile', '~/data/').read()
'''
warnings.warn("Document.read_ttl() is deprecated and will be removed in near future. Use read() instead", DeprecationWarning)
doc_path = os.path.dirname(path)
doc_name = os.path.basename(path)
return Document(doc_name, doc_path).read() | Helper function to read Document in TTL-TXT format (i.e. ${docname}_*.txt)
E.g. Document.read_ttl('~/data/myfile') is the same as Document('myfile', '~/data/').read() | entailment |
def read(self, doc=None):
''' Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) '''
if not self.sent_stream:
raise Exception("There is no sentence data stream available")
if doc is None:
doc = Document(name=self.doc_name, path=self.doc_path)
for row in self.sent_reader():
if len(row) == 2:
sid, text = row
doc.new_sent(text.strip(), ID=sid)
elif len(row) == 4:
sid, text, flag, comment = row
sent = doc.new_sent(text.strip(), ID=sid)
sent.flag = flag
sent.comment = comment
# Read tokens if available
if self.token_stream:
# read all tokens first
sent_tokens_map = dd(list)
for token_row in self.token_reader():
if len(token_row) == 6:
sid, wid, token, lemma, pos, comment = token_row
else:
sid, wid, token, lemma, pos = token_row
comment = ''
sid = int(sid)
sent_tokens_map[sid].append((token, lemma, pos.strip(), wid, comment))
# TODO: verify wid?
# import tokens
for sent in doc:
sent_tokens = sent_tokens_map[sent.ID]
sent.import_tokens([x[0] for x in sent_tokens])
for ((tk, lemma, pos, wid, comment), token) in zip(sent_tokens, sent.tokens):
token.pos = pos
token.lemma = lemma
token.comment = comment
# only read concepts if tokens are available
if self.concept_stream:
# read concepts
for concept_row in self.concept_reader():
if len(concept_row) == 5:
sid, cid, clemma, tag, comment = concept_row
else:
sid, cid, clemma, tag = concept_row
comment = ''
cid = int(cid)
doc.get(sid).new_concept(tag.strip(), clemma=clemma, cidx=cid, comment=comment)
# only read concept-token links if tokens and concepts are available
for sid, cid, wid in self.link_reader():
sent = doc.get(sid)
cid = int(cid)
wid = int(wid.strip())
sent.concept(cid).add_token(sent[wid])
# read sentence level tags
if self.tag_stream:
for row in self.tag_reader():
if len(row) == 5:
sid, cfrom, cto, label, tagtype = row
wid = None
if len(row) == 6:
sid, cfrom, cto, label, tagtype, wid = row
if cfrom:
cfrom = int(cfrom)
if cto:
cto = int(cto)
if wid is None or wid == '':
doc.get(sid).new_tag(label, cfrom, cto, tagtype=tagtype)
else:
doc.get(sid)[int(wid)].new_tag(label, cfrom, cto, tagtype=tagtype)
return doc | Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) | entailment |
def format_page(text):
"""Format the text for output adding ASCII frame around the text.
Args:
text (str): Text that needs to be formatted.
Returns:
str: Formatted string.
"""
width = max(map(len, text.splitlines()))
page = "+-" + "-" * width + "-+\n"
for line in text.splitlines():
page += "| " + line.ljust(width) + " |\n"
page += "+-" + "-" * width + "-+\n"
return page | Format the text for output adding ASCII frame around the text.
Args:
text (str): Text that needs to be formatted.
Returns:
str: Formatted string. | entailment |
def table(text):
"""Format the text as a table.
Text in format:
first | second
row 2 col 1 | 4
Will be formatted as::
+-------------+--------+
| first | second |
+-------------+--------+
| row 2 col 1 | 4 |
+-------------+--------+
Args:
text (str): Text that needs to be formatted.
Returns:
str: Formatted string.
"""
def table_bar(col_lengths):
return "+-%s-+%s" % (
"-+-".join(["-" * length for length in col_lengths]),
os.linesep,
)
rows = []
for line in text.splitlines():
rows.append([part.strip() for part in line.split("|")])
max_cols = max(map(len, rows))
col_lengths = [0] * max_cols
for row in rows:
cols = len(row)
if cols < max_cols:
row.extend([""] * (max_cols - cols))
for i, col in enumerate(row):
col_length = len(col)
if col_length > col_lengths[i]:
col_lengths[i] = col_length
text = table_bar(col_lengths)
for i, row in enumerate(rows):
cols = []
for i, col in enumerate(row):
cols.append(col.ljust(col_lengths[i]))
text += "| %s |%s" % (" | ".join(cols), os.linesep)
text += table_bar(col_lengths)
return text | Format the text as a table.
Text in format:
first | second
row 2 col 1 | 4
Will be formatted as::
+-------------+--------+
| first | second |
+-------------+--------+
| row 2 col 1 | 4 |
+-------------+--------+
Args:
text (str): Text that needs to be formatted.
Returns:
str: Formatted string. | entailment |
def print_page(text):
"""Format the text and prints it on stdout.
Text is formatted by adding a ASCII frame around it and coloring the text.
Colors can be added to text using color tags, for example:
My [FG_BLUE]blue[NORMAL] text.
My [BG_BLUE]blue background[NORMAL] text.
"""
color_re = re.compile(r"\[(?P<color>[FB]G_[A-Z_]+|NORMAL)\]")
width = max([len(strip_colors(x)) for x in text.splitlines()])
print("\n" + hbar(width))
for line in text.splitlines():
if line == "[HBAR]":
print(hbar(width))
continue
tail = width - len(strip_colors(line))
sys.stdout.write("| ")
previous = 0
end = len(line)
for match in color_re.finditer(line):
sys.stdout.write(line[previous : match.start()])
set_color(match.groupdict()["color"])
previous = match.end()
sys.stdout.write(line[previous:end])
sys.stdout.write(" " * tail + " |\n")
print(hbar(width)) | Format the text and prints it on stdout.
Text is formatted by adding a ASCII frame around it and coloring the text.
Colors can be added to text using color tags, for example:
My [FG_BLUE]blue[NORMAL] text.
My [BG_BLUE]blue background[NORMAL] text. | entailment |
def wrap_text(text, width=80):
"""Wrap text lines to maximum *width* characters.
Wrapped text is aligned against the left text border.
Args:
text (str): Text to wrap.
width (int): Maximum number of characters per line.
Returns:
str: Wrapped text.
"""
text = re.sub(r"\s+", " ", text).strip()
wrapper = TextWrapper(
width=width, break_long_words=False, replace_whitespace=True
)
return wrapper.fill(text) | Wrap text lines to maximum *width* characters.
Wrapped text is aligned against the left text border.
Args:
text (str): Text to wrap.
width (int): Maximum number of characters per line.
Returns:
str: Wrapped text. | entailment |
def rjust_text(text, width=80, indent=0, subsequent=None):
"""Wrap text and adjust it to right border.
Same as L{wrap_text} with the difference that the text is aligned against
the right text border.
Args:
text (str): Text to wrap and align.
width (int): Maximum number of characters per line.
indent (int): Indentation of the first line.
subsequent (int or None): Indentation of all other lines, if it is
``None``, then the indentation will be same as for the first line.
"""
text = re.sub(r"\s+", " ", text).strip()
if subsequent is None:
subsequent = indent
wrapper = TextWrapper(
width=width,
break_long_words=False,
replace_whitespace=True,
initial_indent=" " * (indent + subsequent),
subsequent_indent=" " * subsequent,
)
return wrapper.fill(text)[subsequent:] | Wrap text and adjust it to right border.
Same as L{wrap_text} with the difference that the text is aligned against
the right text border.
Args:
text (str): Text to wrap and align.
width (int): Maximum number of characters per line.
indent (int): Indentation of the first line.
subsequent (int or None): Indentation of all other lines, if it is
``None``, then the indentation will be same as for the first line. | entailment |
def center_text(text, width=80):
"""Center all lines of the text.
It is assumed that all lines width is smaller then B{width}, because the
line width will not be checked.
Args:
text (str): Text to wrap.
width (int): Maximum number of characters per line.
Returns:
str: Centered text.
"""
centered = []
for line in text.splitlines():
centered.append(line.center(width))
return "\n".join(centered) | Center all lines of the text.
It is assumed that all lines width is smaller then B{width}, because the
line width will not be checked.
Args:
text (str): Text to wrap.
width (int): Maximum number of characters per line.
Returns:
str: Centered text. | entailment |
def check(qpi_or_h5file, checks=["attributes", "background"]):
"""Checks various properties of a :class:`qpimage.core.QPImage` instance
Parameters
----------
qpi_or_h5file: qpimage.core.QPImage or str
A QPImage object or a path to an hdf5 file
checks: list of str
Which checks to perform ("attributes" and/or "background")
Raises
------
IntegrityCheckError
if the checks fail
"""
if isinstance(checks, str):
checks = [checks]
for ch in checks:
if ch not in ["attributes", "background"]:
raise ValueError("Unknown check: {}".format(check))
if isinstance(qpi_or_h5file, QPImage):
qpi = qpi_or_h5file
else:
qpi = QPImage(h5file=qpi_or_h5file, h5mode="r")
# check attributes
if "attributes" in checks:
check_attributes(qpi)
# check background estimation
if "background" in checks:
check_background(qpi) | Checks various properties of a :class:`qpimage.core.QPImage` instance
Parameters
----------
qpi_or_h5file: qpimage.core.QPImage or str
A QPImage object or a path to an hdf5 file
checks: list of str
Which checks to perform ("attributes" and/or "background")
Raises
------
IntegrityCheckError
if the checks fail | entailment |
def check_attributes(qpi):
"""Check QPimage attributes
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails
"""
missing_attrs = []
for key in DATA_KEYS:
if key not in qpi.meta:
missing_attrs.append(key)
if missing_attrs:
msg = "Attributes are missing: {} ".format(missing_attrs) \
+ "in {}!".format(qpi)
raise IntegrityCheckError(msg) | Check QPimage attributes
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails | entailment |
def check_background(qpi):
"""Check QPimage background data
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails
"""
for imdat in [qpi._amp, qpi._pha]:
try:
fit, attrs = imdat.get_bg(key="fit", ret_attrs=True)
except KeyError:
# No bg correction performed
pass
else:
kwargs = dict(attrs)
# check if we have a user-defined mask image
binkey = "estimate_bg_from_mask"
if binkey in imdat.h5:
kwargs["from_mask"] = imdat.h5[binkey][:]
else:
kwargs["from_mask"] = None
# compute background correction
with h5py.File("check.h5",
driver="core",
backing_store=False) as h5:
# imdat.__class__ is "Amplitude" or "Phase"
testimdat = imdat.__class__(h5)
testimdat["raw"] = imdat.raw
# Set experimental bg data if given
try:
bg = imdat.get_bg("data")
except KeyError:
pass
else:
testimdat.set_bg(bg, key="data")
# fit bg
testimdat.estimate_bg(**kwargs)
# compare
if not np.allclose(testimdat.get_bg(key="fit"), fit):
msg = "Wrong estimated (fitted) background!"
raise IntegrityCheckError(msg) | Check QPimage background data
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails | entailment |
def write_image_dataset(group, key, data, h5dtype=None):
"""Write an image to an hdf5 group as a dataset
This convenience function sets all attributes such that the image
can be visualized with HDFView, sets the compression and fletcher32
filters, and sets the chunk size to the image shape.
Parameters
----------
group: h5py.Group
HDF5 group to store data to
key: str
Dataset identifier
data: np.ndarray of shape (M,N)
Image data to store
h5dtype: str
The datatype in which to store the image data. The default
is the datatype of `data`.
Returns
-------
dataset: h5py.Dataset
The created HDF5 dataset object
"""
if h5dtype is None:
h5dtype = data.dtype
if key in group:
del group[key]
if group.file.driver == "core":
kwargs = {}
else:
kwargs = {"fletcher32": True,
"chunks": data.shape}
kwargs.update(COMPRESSION)
dset = group.create_dataset(key,
data=data.astype(h5dtype),
**kwargs)
# Create and Set image attributes
# HDFView recognizes this as a series of images
dset.attrs.create('CLASS', b'IMAGE')
dset.attrs.create('IMAGE_VERSION', b'1.2')
dset.attrs.create('IMAGE_SUBCLASS', b'IMAGE_GRAYSCALE')
return dset | Write an image to an hdf5 group as a dataset
This convenience function sets all attributes such that the image
can be visualized with HDFView, sets the compression and fletcher32
filters, and sets the chunk size to the image shape.
Parameters
----------
group: h5py.Group
HDF5 group to store data to
key: str
Dataset identifier
data: np.ndarray of shape (M,N)
Image data to store
h5dtype: str
The datatype in which to store the image data. The default
is the datatype of `data`.
Returns
-------
dataset: h5py.Dataset
The created HDF5 dataset object | entailment |
def info(self):
"""list of background correction parameters"""
info = []
name = self.__class__.__name__.lower()
# get bg information
for key in VALID_BG_KEYS:
if key in self.h5["bg_data"]:
attrs = self.h5["bg_data"][key].attrs
for akey in attrs:
atr = attrs[akey]
var = "{} background {}".format(name, akey)
info.append((var, atr))
if "fit" in self.h5["bg_data"]:
# mask background
var_mask = "{} background from mask".format(name)
if ("estimate_bg_from_mask" in self.h5
and self.h5["estimate_bg_from_mask"] is not None):
# bg was computed from mask image
info.append((var_mask, True))
elif ("estimate_bg_from_binary" in self.h5
and self.h5["estimate_bg_from_binary"] is not None):
# bg was computed from mask image (old notation)
warnings.warn("Old file format detected!", DeprecationWarning)
info.append((var_mask, True))
else:
info.append((var_mask, False))
return info | list of background correction parameters | entailment |
def del_bg(self, key):
"""Remove the background image data
Parameters
----------
key: str
One of :const:`VALID_BG_KEYS`
"""
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
if key in self.h5["bg_data"]:
del self.h5["bg_data"][key]
else:
msg = "No bg data to clear for '{}' in {}.".format(key, self)
warnings.warn(msg) | Remove the background image data
Parameters
----------
key: str
One of :const:`VALID_BG_KEYS` | entailment |
def estimate_bg(self, fit_offset="mean", fit_profile="tilt",
border_px=0, from_mask=None, ret_mask=False):
"""Estimate image background
Parameters
----------
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The mask image must have the same shape as the
input data.`True` elements are used for background
estimation.
ret_mask: bool
Return the mask image used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two resulting mask images is used.
The arguments passed to this method are stored in the
hdf5 file `self.h5` and are used for optional integrity
checking using `qpimage.integrity_check.check`.
See Also
--------
qpimage.bg_estimate.estimate
"""
# remove existing bg before accessing imdat.image
self.set_bg(bg=None, key="fit")
# compute bg
bgimage, mask = bg_estimate.estimate(data=self.image,
fit_offset=fit_offset,
fit_profile=fit_profile,
border_px=border_px,
from_mask=from_mask,
ret_mask=True)
attrs = {"fit_offset": fit_offset,
"fit_profile": fit_profile,
"border_px": border_px}
self.set_bg(bg=bgimage, key="fit", attrs=attrs)
# save `from_mask` separately (arrays vs. h5 attributes)
# (if `from_mask` is `None`, this will remove the array)
self["estimate_bg_from_mask"] = from_mask
# return mask image
if ret_mask:
return mask | Estimate image background
Parameters
----------
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The mask image must have the same shape as the
input data.`True` elements are used for background
estimation.
ret_mask: bool
Return the mask image used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two resulting mask images is used.
The arguments passed to this method are stored in the
hdf5 file `self.h5` and are used for optional integrity
checking using `qpimage.integrity_check.check`.
See Also
--------
qpimage.bg_estimate.estimate | entailment |
def get_bg(self, key=None, ret_attrs=False):
"""Get the background data
Parameters
----------
key: None or str
A user-defined key that identifies the background data.
Examples are "data" for experimental data, or "fit"
for an estimated background correction
(see :const:`VALID_BG_KEYS`). If set to `None`,
returns the combined background image (:const:`ImageData.bg`).
ret_attrs: bool
Also returns the attributes of the background data.
"""
if key is None:
if ret_attrs:
raise ValueError("No attributes for combined background!")
return self.bg
else:
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
if key in self.h5["bg_data"]:
data = self.h5["bg_data"][key][:]
if ret_attrs:
attrs = dict(self.h5["bg_data"][key].attrs)
# remove keys for image visualization in hdf5 files
for h5k in ["CLASS", "IMAGE_VERSION", "IMAGE_SUBCLASS"]:
if h5k in attrs:
attrs.pop(h5k)
ret = (data, attrs)
else:
ret = data
else:
raise KeyError("No background data for {}!".format(key))
return ret | Get the background data
Parameters
----------
key: None or str
A user-defined key that identifies the background data.
Examples are "data" for experimental data, or "fit"
for an estimated background correction
(see :const:`VALID_BG_KEYS`). If set to `None`,
returns the combined background image (:const:`ImageData.bg`).
ret_attrs: bool
Also returns the attributes of the background data. | entailment |
def set_bg(self, bg, key="data", attrs={}):
"""Set the background data
Parameters
----------
bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset
The background data. If `bg` is an `h5py.Dataset` object,
it must exist in the same hdf5 file (a hard link is created).
If set to `None`, the data will be removed.
key: str
One of :const:`VALID_BG_KEYS`)
attrs: dict
List of background attributes
See Also
--------
del_bg: removing background data
"""
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
# remove previous background key
if key in self.h5["bg_data"]:
del self.h5["bg_data"][key]
# set background
if isinstance(bg, (numbers.Real, np.ndarray)):
dset = write_image_dataset(group=self.h5["bg_data"],
key=key,
data=bg,
h5dtype=self.h5dtype)
for kw in attrs:
dset.attrs[kw] = attrs[kw]
elif isinstance(bg, h5py.Dataset):
# Create a hard link
# (This functionality was intended for saving memory when storing
# large QPSeries with universal background data, i.e. when using
# `QPSeries.add_qpimage` with the `bg_from_idx` keyword.)
self.h5["bg_data"][key] = bg
elif bg is not None:
msg = "Unknown background data type: {}".format(bg)
raise ValueError(msg) | Set the background data
Parameters
----------
bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset
The background data. If `bg` is an `h5py.Dataset` object,
it must exist in the same hdf5 file (a hard link is created).
If set to `None`, the data will be removed.
key: str
One of :const:`VALID_BG_KEYS`)
attrs: dict
List of background attributes
See Also
--------
del_bg: removing background data | entailment |
def _bg_combine(self, bgs):
"""Combine several background amplitude images"""
out = np.ones(self.h5["raw"].shape, dtype=float)
# bg is an h5py.DataSet
for bg in bgs:
out *= bg[:]
return out | Combine several background amplitude images | entailment |
def git_tags() -> str:
"""
Calls ``git tag -l --sort=-v:refname`` (sorts output) and returns the
output as a UTF-8 encoded string. Raises a NoGitTagsException if the
repository doesn't contain any Git tags.
"""
try:
subprocess.check_call(['git', 'fetch', '--tags'])
except CalledProcessError:
pass
cmd = ['git', 'tag', '--list', '--sort=-v:refname']
rv = subprocess.check_output(cmd).decode('utf-8')
if rv == '':
raise NoGitTagsException('No Git tags are present in current repo.')
return rv | Calls ``git tag -l --sort=-v:refname`` (sorts output) and returns the
output as a UTF-8 encoded string. Raises a NoGitTagsException if the
repository doesn't contain any Git tags. | entailment |
def git_tag_to_semver(git_tag: str) -> SemVer:
"""
:git_tag: A string representation of a Git tag.
Searches a Git tag's string representation for a SemVer, and returns that
as a SemVer object.
"""
pattern = re.compile(r'[0-9]+\.[0-9]+\.[0-9]+$')
match = pattern.search(git_tag)
if match:
version = match.group(0)
else:
raise InvalidTagFormatException('Tag passed contains no SemVer.')
return SemVer.from_str(version) | :git_tag: A string representation of a Git tag.
Searches a Git tag's string representation for a SemVer, and returns that
as a SemVer object. | entailment |
def last_git_release_tag(git_tags: str) -> str:
"""
:git_tags: chronos.helpers.git_tags() function output.
Returns the latest Git tag ending with a SemVer as a string.
"""
semver_re = re.compile(r'[0-9]+\.[0-9]+\.[0-9]+$')
str_ver = []
for i in git_tags.split():
if semver_re.search(i):
str_ver.append(i)
try:
return str_ver[0]
except IndexError:
raise NoGitTagsException | :git_tags: chronos.helpers.git_tags() function output.
Returns the latest Git tag ending with a SemVer as a string. | entailment |
def git_commits_since_last_tag(last_tag: str) -> dict:
"""
:last_tag: The Git tag that should serve as the starting point for the
commit log lookup.
Calls ``git log <last_tag>.. --format='%H %s'`` and returns the output as a
dict of hash-message pairs.
"""
try:
cmd = ['git', 'log', last_tag + '..', "--format='%H %s'"]
commit_log = subprocess.check_output(cmd).decode('utf-8')
except CalledProcessError:
raise GitTagDoesNotExistError('No such tag:', last_tag)
if not commit_log:
raise NoGitCommitSinceLastTagException('No commits since last tag.')
pattern = re.compile(r'([a-f0-9]{40})\ (.*)')
rv = {}
for line in commit_log.split('\n'):
match = pattern.search(line)
if match:
commit_hash = match.group(1)
commit_msg = match.group(2)
rv[commit_hash] = commit_msg
return rv | :last_tag: The Git tag that should serve as the starting point for the
commit log lookup.
Calls ``git log <last_tag>.. --format='%H %s'`` and returns the output as a
dict of hash-message pairs. | entailment |
def parse_commit_log(commit_log: dict) -> str:
"""
:commit_log: chronos.helpers.git_commits_since_last_tag() output.
Parse Git log and return either 'maj', 'min', or 'pat'.
"""
rv = 'pat'
cc_patterns = patterns()
for value in commit_log.values():
if re.search(cc_patterns['feat'], value):
rv = 'min'
if re.search(cc_patterns['BREAKING CHANGE'], value):
rv = 'maj'
return rv | :commit_log: chronos.helpers.git_commits_since_last_tag() output.
Parse Git log and return either 'maj', 'min', or 'pat'. | entailment |
def from_str(cls, version_str: str):
"""
Alternate constructor that accepts a string SemVer.
"""
o = cls()
o.version = version_str
return o | Alternate constructor that accepts a string SemVer. | entailment |
def major(self, major: int) -> None:
"""
param major
Major version number property. Must be a non-negative integer.
"""
self.filter_negatives(major)
self._major = major | param major
Major version number property. Must be a non-negative integer. | entailment |
def minor(self, minor: int) -> None:
"""
param minor
Minor version number property. Must be a non-negative integer.
"""
self.filter_negatives(minor)
self._minor = minor | param minor
Minor version number property. Must be a non-negative integer. | entailment |
def patch(self, patch: int) -> None:
"""
param patch
Patch version number property. Must be a non-negative integer.
"""
self.filter_negatives(patch)
self._patch = patch | param patch
Patch version number property. Must be a non-negative integer. | entailment |
def version(self) -> str:
"""
Version version number property. Must be a string consisting of three
non-negative integers delimited by periods (eg. '1.0.1').
"""
version: str = (
str(self._major) + '.' +
str(self._minor) + '.' +
str(self._patch)
)
return version | Version version number property. Must be a string consisting of three
non-negative integers delimited by periods (eg. '1.0.1'). | entailment |
def version(self, version_str: str) -> None:
"""
param version
Version version number property. Must be a string consisting of three
non-negative integers delimited by periods (eg. '1.0.1').
"""
ver = []
for i in version_str.split('.'):
ver.append(int(i))
self.filter_negatives(int(i))
self._major, self._minor, self._patch = ver[0], ver[1], ver[2] | param version
Version version number property. Must be a string consisting of three
non-negative integers delimited by periods (eg. '1.0.1'). | entailment |
def estimate(data, fit_offset="mean", fit_profile="tilt",
border_px=0, from_mask=None, ret_mask=False):
"""Estimate the background value of an image
Parameters
----------
data: np.ndarray
Data from which to compute the background value
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two is used, i.e. the positions
where both, the frame mask and `from_mask`, are
`True`.
"""
if fit_profile not in VALID_FIT_PROFILES:
msg = "`fit_profile` must be one of {}, got '{}'".format(
VALID_FIT_PROFILES,
fit_profile)
raise ValueError(msg)
if fit_offset not in VALID_FIT_OFFSETS:
msg = "`fit_offset` must be one of {}, got '{}'".format(
VALID_FIT_OFFSETS,
fit_offset)
raise ValueError(msg)
# initial mask image
if from_mask is not None:
assert isinstance(from_mask, np.ndarray)
mask = from_mask.copy()
else:
mask = np.ones_like(data, dtype=bool)
# multiply with border mask image (intersection)
if border_px > 0:
border_px = int(np.round(border_px))
mask_px = np.zeros_like(mask)
mask_px[:border_px, :] = True
mask_px[-border_px:, :] = True
mask_px[:, :border_px] = True
mask_px[:, -border_px:] = True
# intersection
np.logical_and(mask, mask_px, out=mask)
# compute background image
if fit_profile == "tilt":
bgimg = profile_tilt(data, mask)
elif fit_profile == "poly2o":
bgimg = profile_poly2o(data, mask)
else:
bgimg = np.zeros_like(data, dtype=float)
# add offsets
if fit_offset == "fit":
if fit_profile == "offset":
msg = "`fit_offset=='fit'` only valid when `fit_profile!='offset`"
raise ValueError(msg)
# nothing else to do here, using offset from fit
elif fit_offset == "gauss":
bgimg += offset_gaussian((data - bgimg)[mask])
elif fit_offset == "mean":
bgimg += np.mean((data - bgimg)[mask])
elif fit_offset == "mode":
bgimg += offset_mode((data - bgimg)[mask])
if ret_mask:
ret = (bgimg, mask)
else:
ret = bgimg
return ret | Estimate the background value of an image
Parameters
----------
data: np.ndarray
Data from which to compute the background value
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two is used, i.e. the positions
where both, the frame mask and `from_mask`, are
`True`. | entailment |
def offset_gaussian(data):
"""Fit a gaussian model to `data` and return its center"""
nbins = 2 * int(np.ceil(np.sqrt(data.size)))
mind, maxd = data.min(), data.max()
drange = (mind - (maxd - mind) / 2, maxd + (maxd - mind) / 2)
histo = np.histogram(data, nbins, density=True, range=drange)
dx = abs(histo[1][1] - histo[1][2]) / 2
hx = histo[1][1:] - dx
hy = histo[0]
# fit gaussian
gauss = lmfit.models.GaussianModel()
pars = gauss.guess(hy, x=hx)
out = gauss.fit(hy, pars, x=hx)
return out.params["center"] | Fit a gaussian model to `data` and return its center | entailment |
def offset_mode(data):
"""Compute Mode using a histogram with `sqrt(data.size)` bins"""
nbins = int(np.ceil(np.sqrt(data.size)))
mind, maxd = data.min(), data.max()
histo = np.histogram(data, nbins, density=True, range=(mind, maxd))
dx = abs(histo[1][1] - histo[1][2]) / 2
hx = histo[1][1:] - dx
hy = histo[0]
idmax = np.argmax(hy)
return hx[idmax] | Compute Mode using a histogram with `sqrt(data.size)` bins | entailment |
def profile_tilt(data, mask):
"""Fit a 2D tilt to `data[mask]`"""
params = lmfit.Parameters()
params.add(name="mx", value=0)
params.add(name="my", value=0)
params.add(name="off", value=np.average(data[mask]))
fr = lmfit.minimize(tilt_residual, params, args=(data, mask))
bg = tilt_model(fr.params, data.shape)
return bg | Fit a 2D tilt to `data[mask]` | entailment |
def profile_poly2o(data, mask):
"""Fit a 2D 2nd order polynomial to `data[mask]`"""
# lmfit
params = lmfit.Parameters()
params.add(name="mx", value=0)
params.add(name="my", value=0)
params.add(name="mxy", value=0)
params.add(name="ax", value=0)
params.add(name="ay", value=0)
params.add(name="off", value=np.average(data[mask]))
fr = lmfit.minimize(poly2o_residual, params, args=(data, mask))
bg = poly2o_model(fr.params, data.shape)
return bg | Fit a 2D 2nd order polynomial to `data[mask]` | entailment |
def poly2o_model(params, shape):
"""lmfit 2nd order polynomial model"""
mx = params["mx"].value
my = params["my"].value
mxy = params["mxy"].value
ax = params["ax"].value
ay = params["ay"].value
off = params["off"].value
bg = np.zeros(shape, dtype=float) + off
x = np.arange(bg.shape[0]) - bg.shape[0] // 2
y = np.arange(bg.shape[1]) - bg.shape[1] // 2
x = x.reshape(-1, 1)
y = y.reshape(1, -1)
bg += ax * x**2 + ay * y**2 + mx * x + my * y + mxy * x * y
return bg | lmfit 2nd order polynomial model | entailment |
def poly2o_residual(params, data, mask):
"""lmfit 2nd order polynomial residuals"""
bg = poly2o_model(params, shape=data.shape)
res = (data - bg)[mask]
return res.flatten() | lmfit 2nd order polynomial residuals | entailment |
def tilt_model(params, shape):
"""lmfit tilt model"""
mx = params["mx"].value
my = params["my"].value
off = params["off"].value
bg = np.zeros(shape, dtype=float) + off
x = np.arange(bg.shape[0]) - bg.shape[0] // 2
y = np.arange(bg.shape[1]) - bg.shape[1] // 2
x = x.reshape(-1, 1)
y = y.reshape(1, -1)
bg += mx * x + my * y
return bg | lmfit tilt model | entailment |
def tilt_residual(params, data, mask):
"""lmfit tilt residuals"""
bg = tilt_model(params, shape=data.shape)
res = (data - bg)[mask]
return res.flatten() | lmfit tilt residuals | entailment |
def main(cmd_args: list = None) -> None:
"""
:cmd_args: An optional list of command line arguments.
Main function of chronos CLI tool.
"""
parser = argparse.ArgumentParser(description='Auto-versioning utility.')
subparsers = parser.add_subparsers()
infer_parser = subparsers.add_parser('infer', help='Infers next version.')
infer_parser.set_defaults(func=infer)
commit_parser = subparsers.add_parser('commit',
help='Makes release commit.')
commit_parser.set_defaults(func=commit)
bump_parser = subparsers.add_parser('bump', help='Bumps version.')
bump_parser.add_argument('type', nargs='?', default='patch',
choices=['patch', 'minor', 'major'],
help='The type of version to bump.')
bump_parser.set_defaults(func=bump)
try:
if cmd_args:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
args.func(args)
except AttributeError:
parser.print_help() | :cmd_args: An optional list of command line arguments.
Main function of chronos CLI tool. | entailment |
def infer(args: argparse.Namespace) -> None:
"""
:args: An argparse.Namespace object.
This is the function called when the 'infer' sub-command is passed as an
argument to the CLI.
"""
try:
last_tag = last_git_release_tag(git_tags())
except NoGitTagsException:
print(SemVer(0, 1, 0))
exit(0)
commit_log = git_commits_since_last_tag(last_tag)
action = parse_commit_log(commit_log)
last_ver = git_tag_to_semver(last_tag)
if action == 'min':
new_ver = last_ver.bump_minor()
elif action == 'maj':
new_ver = last_ver.bump_major()
else:
new_ver = last_ver.bump_patch()
print(new_ver) | :args: An argparse.Namespace object.
This is the function called when the 'infer' sub-command is passed as an
argument to the CLI. | entailment |
def bump(args: argparse.Namespace) -> None:
"""
:args: An argparse.Namespace object.
This function is bound to the 'bump' sub-command. It increments the version
integer of the user's choice ('major', 'minor', or 'patch').
"""
try:
last_tag = last_git_release_tag(git_tags())
except NoGitTagsException:
print(SemVer(0, 1, 0))
exit(0)
last_ver = git_tag_to_semver(last_tag)
if args.type == 'patch':
print(last_ver.bump_patch())
elif args.type == 'minor':
print(last_ver.bump_minor())
elif args.type == 'major':
print(last_ver.bump_major()) | :args: An argparse.Namespace object.
This function is bound to the 'bump' sub-command. It increments the version
integer of the user's choice ('major', 'minor', or 'patch'). | entailment |
def find_sideband(ft_data, which=+1, copy=True):
"""Find the side band position of a hologram
The hologram is Fourier-transformed and the side band
is determined by finding the maximum amplitude in
Fourier space.
Parameters
----------
ft_data: 2d ndarray
Fourier transform of the hologram image
which: +1 or -1
which sideband to search for:
- +1: upper half
- -1: lower half
copy: bool
copy `ft_data` before modification
Returns
-------
fsx, fsy : tuple of floats
coordinates of the side band in Fourier space frequencies
"""
if copy:
ft_data = ft_data.copy()
if which not in [+1, -1]:
raise ValueError("`which` must be +1 or -1!")
ox, oy = ft_data.shape
cx = ox // 2
cy = oy // 2
minlo = max(int(np.ceil(ox / 42)), 5)
if which == +1:
# remove lower part
ft_data[cx - minlo:] = 0
else:
ft_data[:cx + minlo] = 0
# remove values around axes
ft_data[cx - 3:cx + 3, :] = 0
ft_data[:, cy - 3:cy + 3] = 0
# find maximum
am = np.argmax(np.abs(ft_data))
iy = am % oy
ix = int((am - iy) / oy)
fx = np.fft.fftshift(np.fft.fftfreq(ft_data.shape[0]))[ix]
fy = np.fft.fftshift(np.fft.fftfreq(ft_data.shape[1]))[iy]
return (fx, fy) | Find the side band position of a hologram
The hologram is Fourier-transformed and the side band
is determined by finding the maximum amplitude in
Fourier space.
Parameters
----------
ft_data: 2d ndarray
Fourier transform of the hologram image
which: +1 or -1
which sideband to search for:
- +1: upper half
- -1: lower half
copy: bool
copy `ft_data` before modification
Returns
-------
fsx, fsy : tuple of floats
coordinates of the side band in Fourier space frequencies | entailment |
def fourier2dpad(data, zero_pad=True):
"""Compute the 2D Fourier transform with zero padding
Parameters
----------
data: 2d fload ndarray
real-valued image data
zero_pad: bool
perform zero-padding to next order of 2
"""
if zero_pad:
# zero padding size is next order of 2
(N, M) = data.shape
order = np.int(max(64., 2**np.ceil(np.log(2 * max(N, M)) / np.log(2))))
# this is faster than np.pad
datapad = np.zeros((order, order), dtype=float)
datapad[:data.shape[0], :data.shape[1]] = data
else:
datapad = data
# Fourier transform
fft = np.fft.fftshift(np.fft.fft2(datapad))
return fft | Compute the 2D Fourier transform with zero padding
Parameters
----------
data: 2d fload ndarray
real-valued image data
zero_pad: bool
perform zero-padding to next order of 2 | entailment |
def get_field(hologram, sideband=+1, filter_name="disk", filter_size=1 / 3,
subtract_mean=True, zero_pad=True, copy=True):
"""Compute the complex field from a hologram using Fourier analysis
Parameters
----------
hologram: real-valued 2d ndarray
hologram data
sideband: +1, -1, or tuple of (float, float)
specifies the location of the sideband:
- +1: sideband in the upper half in Fourier space,
exact location is found automatically
- -1: sideband in the lower half in Fourier space,
exact location is found automatically
- (float, float): sideband coordinates in
frequencies in interval [1/"axes size", .5]
filter_name: str
specifies the filter to use, one of
- "disk": binary disk with radius `filter_size`
- "smooth disk": disk with radius `filter_size` convolved
with a radial gaussian (`sigma=filter_size/5`)
- "gauss": radial gaussian (`sigma=0.6*filter_size`)
- "square": binary square with side length `filter_size`
- "smooth square": square with side length `filter_size`
convolved with square gaussian (`sigma=filter_size/5`)
- "tukey": a square tukey window of width `2*filter_size` and
`alpha=0.1`
filter_size: float
Size of the filter in Fourier space in fractions of the
distance between central band and sideband.
See `filter_shape` for interpretation of `filter_size`.
subtract_mean: bool
If True, remove the mean of the hologram before performing
the Fourier transform. This setting is recommended as it
can reduce artifacts from frequencies around the central
band.
zero_pad: bool
Perform zero-padding before applying the FFT. Setting
`zero_pad` to `False` increases speed but might
introduce image distortions such as tilts in the phase
and amplitude data or dark borders in the amplitude data.
copy: bool
If set to True, input `data` is not edited.
x0 and y0 are center of the filter
R is factor for "radius" of filter (sqrt(x0² + y0²)/np.pi)
filter_type can be "disk" or "gauss"
Notes
-----
Even though the size of the "gauss" filter approximately matches
the frequencies of the "disk" filter, it takes into account
higher frequencies as well and thus suppresses ringing artifacts
for data that contain jumps in the phase image.
"""
if copy:
hologram = hologram.astype(dtype=np.float, copy=True)
if subtract_mean:
# remove contributions of the central band
# (this affects more than one pixel in the FFT
# because of zero-padding)
if issubclass(hologram.dtype.type, np.integer):
hologram = hologram.astype(np.float)
hologram -= hologram.mean()
# Fourier transform
fft = fourier2dpad(hologram, zero_pad=zero_pad)
if sideband in [+1, -1]:
fsx, fsy = find_sideband(fft, which=sideband)
else:
fsx, fsy = sideband
# shift fft to sideband location
shifted = np.roll(np.roll(fft, -int(fsx * fft.shape[0]), axis=0),
-int(fsy * fft.shape[1]), axis=1)
# coordinates in Fourier space
fx = np.fft.fftshift(np.fft.fftfreq(fft.shape[0])).reshape(-1, 1)
fy = np.fft.fftshift(np.fft.fftfreq(fft.shape[1])).reshape(1, -1)
# filter size based on central band - sideband - distance
if filter_size >= 1:
raise ValueError("`filter_size` must be < 1!")
fsize = np.sqrt(fsx**2 + fsy**2) * filter_size
if filter_name == "disk":
afilter = (fx**2 + fy**2) < fsize**2
elif filter_name == "smooth disk":
sigma = fsize / 5
tau = 2 * sigma**2
radsq = fx**2 + fy**2
disk = radsq < fsize**2
gauss = np.exp(-radsq / tau)
afilter = signal.convolve(gauss, disk, mode="same")
afilter /= afilter.max()
elif filter_name == "gauss":
sigma = fsize * .6
tau = 2 * sigma**2
afilter = np.exp(-(fx**2 + fy**2) / tau)
afilter /= afilter.max()
elif filter_name == "square":
afilter = (np.abs(fx) < fsize) * (np.abs(fy) < fsize)
elif filter_name == "smooth square":
blur = fsize / 5
tau = 2 * blur**2
square = (np.abs(fx) < fsize) * (np.abs(fy) < fsize)
gauss = np.exp(-(fy**2) / tau) * np.exp(-(fy**2) / tau)
afilter = signal.convolve(square, gauss, mode="same")
afilter /= afilter.max()
elif filter_name == "tukey":
alpha = 0.1
rsize = np.int(min(fx.size, fy.size)*fsize) * 2
tukey_window_x = signal.tukey(rsize, alpha=alpha).reshape(-1, 1)
tukey_window_y = signal.tukey(rsize, alpha=alpha).reshape(1, -1)
tukey = tukey_window_x * tukey_window_y
afilter = np.zeros(shifted.shape)
s1 = (np.array(shifted.shape) - rsize)//2
s2 = (np.array(shifted.shape) + rsize)//2
afilter[s1[0]:s2[0], s1[1]:s2[1]] = tukey
else:
raise ValueError("Unknown filter: {}".format(filter_name))
# apply filter
fft_filt = afilter * shifted
# inverse Fourier transform
field = np.fft.ifft2(np.fft.ifftshift(fft_filt))
return field[:hologram.shape[0], :hologram.shape[1]] | Compute the complex field from a hologram using Fourier analysis
Parameters
----------
hologram: real-valued 2d ndarray
hologram data
sideband: +1, -1, or tuple of (float, float)
specifies the location of the sideband:
- +1: sideband in the upper half in Fourier space,
exact location is found automatically
- -1: sideband in the lower half in Fourier space,
exact location is found automatically
- (float, float): sideband coordinates in
frequencies in interval [1/"axes size", .5]
filter_name: str
specifies the filter to use, one of
- "disk": binary disk with radius `filter_size`
- "smooth disk": disk with radius `filter_size` convolved
with a radial gaussian (`sigma=filter_size/5`)
- "gauss": radial gaussian (`sigma=0.6*filter_size`)
- "square": binary square with side length `filter_size`
- "smooth square": square with side length `filter_size`
convolved with square gaussian (`sigma=filter_size/5`)
- "tukey": a square tukey window of width `2*filter_size` and
`alpha=0.1`
filter_size: float
Size of the filter in Fourier space in fractions of the
distance between central band and sideband.
See `filter_shape` for interpretation of `filter_size`.
subtract_mean: bool
If True, remove the mean of the hologram before performing
the Fourier transform. This setting is recommended as it
can reduce artifacts from frequencies around the central
band.
zero_pad: bool
Perform zero-padding before applying the FFT. Setting
`zero_pad` to `False` increases speed but might
introduce image distortions such as tilts in the phase
and amplitude data or dark borders in the amplitude data.
copy: bool
If set to True, input `data` is not edited.
x0 and y0 are center of the filter
R is factor for "radius" of filter (sqrt(x0² + y0²)/np.pi)
filter_type can be "disk" or "gauss"
Notes
-----
Even though the size of the "gauss" filter approximately matches
the frequencies of the "disk" filter, it takes into account
higher frequencies as well and thus suppresses ringing artifacts
for data that contain jumps in the phase image. | entailment |
def copyh5(inh5, outh5):
"""Recursively copy all hdf5 data from one group to another
Data from links is copied.
Parameters
----------
inh5: str, h5py.File, or h5py.Group
The input hdf5 data. This can be either a file name or
an hdf5 object.
outh5: str, h5py.File, h5py.Group, or None
The output hdf5 data. This can be either a file name or
an hdf5 object. If set to `None`, a new hdf5 object is
created in memory.
Notes
-----
All data in outh5 are overridden by the inh5 data.
"""
if not isinstance(inh5, h5py.Group):
inh5 = h5py.File(inh5, mode="r")
if outh5 is None:
# create file in memory
h5kwargs = {"name": "qpimage{}.h5".format(QPImage._instances),
"driver": "core",
"backing_store": False,
"mode": "a"}
outh5 = h5py.File(**h5kwargs)
return_h5obj = True
QPImage._instances += 1
elif not isinstance(outh5, h5py.Group):
# create new file
outh5 = h5py.File(outh5, mode="w")
return_h5obj = False
else:
return_h5obj = True
# begin iteration
for key in inh5:
if key in outh5:
del outh5[key]
if isinstance(inh5[key], h5py.Group):
outh5.create_group(key)
copyh5(inh5[key], outh5[key])
else:
dset = write_image_dataset(group=outh5,
key=key,
data=inh5[key][:],
h5dtype=inh5[key].dtype)
dset.attrs.update(inh5[key].attrs)
outh5.attrs.update(inh5.attrs)
if return_h5obj:
# in-memory or previously created instance of h5py.File
return outh5
else:
# properly close the file and return its name
fn = outh5.filename
outh5.flush()
outh5.close()
return fn | Recursively copy all hdf5 data from one group to another
Data from links is copied.
Parameters
----------
inh5: str, h5py.File, or h5py.Group
The input hdf5 data. This can be either a file name or
an hdf5 object.
outh5: str, h5py.File, h5py.Group, or None
The output hdf5 data. This can be either a file name or
an hdf5 object. If set to `None`, a new hdf5 object is
created in memory.
Notes
-----
All data in outh5 are overridden by the inh5 data. | entailment |
def _conv_which_data(which_data):
"""Convert which data to string or tuple
This function improves user convenience,
as `which_data` may be of several types
(str, ,str with spaces and commas, list, tuple) which
is internally handled by this method.
"""
if isinstance(which_data, str):
which_data = which_data.lower().strip()
if which_data.count(","):
# convert comma string to list
which_data = [w.strip() for w in which_data.split(",")]
# remove empty strings
which_data = [w for w in which_data if w]
if len(which_data) == 1:
return which_data[0]
else:
# convert to tuple
return tuple(which_data)
else:
return which_data
elif isinstance(which_data, (list, tuple)):
which_data = [w.lower().strip() for w in which_data]
return tuple(which_data)
elif which_data is None:
return None
else:
msg = "unknown type for `which_data`: {}".format(which_data)
raise ValueError(msg) | Convert which data to string or tuple
This function improves user convenience,
as `which_data` may be of several types
(str, ,str with spaces and commas, list, tuple) which
is internally handled by this method. | entailment |
def _get_amp_pha(self, data, which_data):
"""Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`)
"""
which_data = QPImage._conv_which_data(which_data)
if which_data not in VALID_INPUT_DATA:
msg = "`which_data` must be one of {}!".format(VALID_INPUT_DATA)
raise ValueError(msg)
if which_data == "field":
amp = np.abs(data)
pha = np.angle(data)
elif which_data == "phase":
pha = data
amp = np.ones_like(data)
elif which_data == ("phase", "amplitude"):
amp = data[1]
pha = data[0]
elif which_data == ("phase", "intensity"):
amp = np.sqrt(data[1])
pha = data[0]
elif which_data == "hologram":
amp, pha = self._get_amp_pha(holo.get_field(data, **self.holo_kw),
which_data="field")
if amp.size == 0 or pha.size == 0:
msg = "`data` with shape {} has zero size!".format(amp.shape)
raise ValueError(msg)
# phase unwrapping (take into account nans)
nanmask = np.isnan(pha)
if np.sum(nanmask):
# create masked array
# skimage.restoration.unwrap_phase cannot handle nan data
# (even if masked)
pham = pha.copy()
pham[nanmask] = 0
pham = np.ma.masked_array(pham, mask=nanmask)
pha = unwrap_phase(pham, seed=47)
pha[nanmask] = np.nan
else:
pha = unwrap_phase(pha, seed=47)
return amp, pha | Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`) | entailment |
def info(self):
"""list of tuples with QPImage meta data"""
info = []
# meta data
meta = self.meta
for key in meta:
info.append((key, self.meta[key]))
# background correction
for imdat in [self._amp, self._pha]:
info += imdat.info
return info | list of tuples with QPImage meta data | entailment |
def clear_bg(self, which_data=("amplitude", "phase"), keys="fit"):
"""Clear background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
keys: str or list of str
Which type of background data to remove. One of:
- "fit": the background data computed with
:func:`qpimage.QPImage.compute_bg`
- "data": the experimentally obtained background image
"""
which_data = QPImage._conv_which_data(which_data)
if isinstance(keys, str):
# make sure keys is a list of strings
keys = [keys]
# Get image data for clearing
imdats = []
if "amplitude" in which_data:
imdats.append(self._amp)
if "phase" in which_data:
imdats.append(self._pha)
if not imdats:
msg = "`which_data` must contain 'phase' or 'amplitude'!"
raise ValueError(msg)
# Perform clearing of backgrounds
for imdat in imdats:
for key in keys:
imdat.del_bg(key) | Clear background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
keys: str or list of str
Which type of background data to remove. One of:
- "fit": the background data computed with
:func:`qpimage.QPImage.compute_bg`
- "data": the experimentally obtained background image | entailment |
def compute_bg(self, which_data="phase",
fit_offset="mean", fit_profile="tilt",
border_m=0, border_perc=0, border_px=0,
from_mask=None, ret_mask=False):
"""Compute background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_m: float
Assume that a frame of `border_m` meters around the
image is background. The value is converted to
pixels and rounded.
border_perc: float
Assume that a frame of `border_perc` percent around
the image is background. The value is converted to
pixels and rounded. If the aspect ratio of the image
is not one, then the average of the data's shape is
used to compute the percentage in pixels.
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
The `border_*` values are translated to pixel values and
the largest pixel border is used to generate a mask
image for background computation.
If any of the `border_*` arguments are non-zero and
`from_mask` is given, the intersection of the two
is used, i.e. the positions where both, the frame
mask and `from_mask`, are `True`.
See Also
--------
qpimage.bg_estimate.estimate
"""
which_data = QPImage._conv_which_data(which_data)
# check validity
if not ("amplitude" in which_data or
"phase" in which_data):
msg = "`which_data` must contain 'phase' or 'amplitude'!"
raise ValueError(msg)
# get border in px
border_list = []
if border_m:
if border_m < 0:
raise ValueError("`border_m` must be greater than zero!")
border_list.append(border_m / self.meta["pixel size"])
if border_perc:
if border_perc < 0 or border_perc > 50:
raise ValueError("`border_perc` must be in interval [0, 50]!")
size = np.average(self.shape)
border_list.append(size * border_perc / 100)
if border_px:
border_list.append(border_px)
# get maximum border size
if border_list:
border_px = np.int(np.round(np.max(border_list)))
elif from_mask is None:
raise ValueError("Neither `from_mask` nor `border_*` given!")
elif np.all(from_mask == 0):
raise ValueError("`from_mask` must not be all-zero!")
# Get affected image data
imdat_list = []
if "amplitude" in which_data:
imdat_list.append(self._amp)
if "phase" in which_data:
imdat_list.append(self._pha)
# Perform correction
for imdat in imdat_list:
mask = imdat.estimate_bg(fit_offset=fit_offset,
fit_profile=fit_profile,
border_px=border_px,
from_mask=from_mask,
ret_mask=ret_mask)
return mask | Compute background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_m: float
Assume that a frame of `border_m` meters around the
image is background. The value is converted to
pixels and rounded.
border_perc: float
Assume that a frame of `border_perc` percent around
the image is background. The value is converted to
pixels and rounded. If the aspect ratio of the image
is not one, then the average of the data's shape is
used to compute the percentage in pixels.
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
The `border_*` values are translated to pixel values and
the largest pixel border is used to generate a mask
image for background computation.
If any of the `border_*` arguments are non-zero and
`from_mask` is given, the intersection of the two
is used, i.e. the positions where both, the frame
mask and `from_mask`, are `True`.
See Also
--------
qpimage.bg_estimate.estimate | entailment |
def copy(self, h5file=None):
"""Create a copy of the current instance
This is done by recursively copying the underlying hdf5 data.
Parameters
----------
h5file: str, h5py.File, h5py.Group, or None
see `QPImage.__init__`
"""
h5 = copyh5(self.h5, h5file)
return QPImage(h5file=h5, h5dtype=self.h5dtype) | Create a copy of the current instance
This is done by recursively copying the underlying hdf5 data.
Parameters
----------
h5file: str, h5py.File, h5py.Group, or None
see `QPImage.__init__` | entailment |
def refocus(self, distance, method="helmholtz", h5file=None, h5mode="a"):
"""Compute a numerically refocused QPImage
Parameters
----------
distance: float
Focusing distance [m]
method: str
Refocusing method, one of ["helmholtz","fresnel"]
h5file: str, h5py.Group, h5py.File, or None
A path to an hdf5 data file where the QPImage is cached.
If set to `None` (default), all data will be handled in
memory using the "core" driver of the :mod:`h5py`'s
:class:`h5py:File` class. If the file does not exist,
it is created. If the file already exists, it is opened
with the file mode defined by `hdf5_mode`. If this is
an instance of h5py.Group or h5py.File, then this will
be used to internally store all data.
h5mode: str
Valid file modes are (only applies if `h5file` is a path)
- "r": Readonly, file must exist
- "r+": Read/write, file must exist
- "w": Create file, truncate if exists
- "w-" or "x": Create file, fail if exists
- "a": Read/write if exists, create otherwise (default)
Returns
-------
qpi: qpimage.QPImage
Refocused phase and amplitude data
See Also
--------
:mod:`nrefocus`: library used for numerical focusing
"""
field2 = nrefocus.refocus(field=self.field,
d=distance/self["pixel size"],
nm=self["medium index"],
res=self["wavelength"]/self["pixel size"],
method=method
)
if "identifier" in self:
ident = self["identifier"]
else:
ident = ""
meta_data = self.meta
meta_data["identifier"] = "{}@{}{:.5e}m".format(ident,
method[0],
distance)
qpi2 = QPImage(data=field2,
which_data="field",
meta_data=meta_data,
h5file=h5file,
h5mode=h5mode)
return qpi2 | Compute a numerically refocused QPImage
Parameters
----------
distance: float
Focusing distance [m]
method: str
Refocusing method, one of ["helmholtz","fresnel"]
h5file: str, h5py.Group, h5py.File, or None
A path to an hdf5 data file where the QPImage is cached.
If set to `None` (default), all data will be handled in
memory using the "core" driver of the :mod:`h5py`'s
:class:`h5py:File` class. If the file does not exist,
it is created. If the file already exists, it is opened
with the file mode defined by `hdf5_mode`. If this is
an instance of h5py.Group or h5py.File, then this will
be used to internally store all data.
h5mode: str
Valid file modes are (only applies if `h5file` is a path)
- "r": Readonly, file must exist
- "r+": Read/write, file must exist
- "w": Create file, truncate if exists
- "w-" or "x": Create file, fail if exists
- "a": Read/write if exists, create otherwise (default)
Returns
-------
qpi: qpimage.QPImage
Refocused phase and amplitude data
See Also
--------
:mod:`nrefocus`: library used for numerical focusing | entailment |
def set_bg_data(self, bg_data, which_data=None):
"""Set background amplitude and phase data
Parameters
----------
bg_data: 2d ndarray (float or complex), list, QPImage, or `None`
The background data (must be same type as `data`).
If set to `None`, the background data is reset.
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "phase,amplitude", or "phase,intensity",
where the latter two require an indexable object for
`bg_data` with the phase data as first element.
"""
if isinstance(bg_data, QPImage):
if which_data is not None:
msg = "`which_data` must not be set if `bg_data` is QPImage!"
raise ValueError(msg)
pha, amp = bg_data.pha, bg_data.amp
elif bg_data is None:
# Reset phase and amplitude
amp, pha = None, None
else:
# Compute phase and amplitude from data and which_data
amp, pha = self._get_amp_pha(bg_data, which_data)
# Set background data
self._amp.set_bg(amp, key="data")
self._pha.set_bg(pha, key="data") | Set background amplitude and phase data
Parameters
----------
bg_data: 2d ndarray (float or complex), list, QPImage, or `None`
The background data (must be same type as `data`).
If set to `None`, the background data is reset.
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "phase,amplitude", or "phase,intensity",
where the latter two require an indexable object for
`bg_data` with the phase data as first element. | entailment |
def add_qpimage(self, qpi, identifier=None, bg_from_idx=None):
"""Add a QPImage instance to the QPSeries
Parameters
----------
qpi: qpimage.QPImage
The QPImage that is added to the series
identifier: str
Identifier key for `qpi`
bg_from_idx: int or None
Use the background data from the data stored in this index,
creating hard links within the hdf5 file.
(Saves memory if e.g. all qpimages is corrected with the same data)
"""
if not isinstance(qpi, QPImage):
raise ValueError("`fli` must be instance of QPImage!")
if "identifier" in qpi and identifier is None:
identifier = qpi["identifier"]
if identifier and identifier in self:
msg = "The identifier '{}' already ".format(identifier) \
+ "exists! You can either change the identifier of " \
+ " '{}' or remove it.".format(qpi)
raise ValueError(msg)
# determine number of qpimages
num = len(self)
# indices start at zero; do not add 1
name = "qpi_{}".format(num)
group = self.h5.create_group(name)
thisqpi = qpi.copy(h5file=group)
if bg_from_idx is not None:
# Create hard links
refqpi = self[bg_from_idx]
thisqpi._amp.set_bg(bg=refqpi._amp.h5["bg_data"]["data"])
thisqpi._pha.set_bg(bg=refqpi._pha.h5["bg_data"]["data"])
if identifier:
# set identifier
group.attrs["identifier"] = identifier | Add a QPImage instance to the QPSeries
Parameters
----------
qpi: qpimage.QPImage
The QPImage that is added to the series
identifier: str
Identifier key for `qpi`
bg_from_idx: int or None
Use the background data from the data stored in this index,
creating hard links within the hdf5 file.
(Saves memory if e.g. all qpimages is corrected with the same data) | entailment |
def get_qpimage(self, index):
"""Return a single QPImage of the series
Parameters
----------
index: int or str
Index or identifier of the QPImage
Notes
-----
Instead of ``qps.get_qpimage(index)``, it is possible
to use the short-hand ``qps[index]``.
"""
if isinstance(index, str):
# search for the identifier
for ii in range(len(self)):
qpi = self[ii]
if "identifier" in qpi and qpi["identifier"] == index:
group = self.h5["qpi_{}".format(ii)]
break
else:
msg = "QPImage identifier '{}' not found!".format(index)
raise KeyError(msg)
else:
# integer index
if index < -len(self):
msg = "Index {} out of bounds for QPSeries of size {}!".format(
index, len(self))
raise ValueError(msg)
elif index < 0:
index += len(self)
name = "qpi_{}".format(index)
if name in self.h5:
group = self.h5[name]
else:
msg = "Index {} not found for QPSeries of length {}".format(
index, len(self))
raise KeyError(msg)
return QPImage(h5file=group) | Return a single QPImage of the series
Parameters
----------
index: int or str
Index or identifier of the QPImage
Notes
-----
Instead of ``qps.get_qpimage(index)``, it is possible
to use the short-hand ``qps[index]``. | entailment |
def main() -> int:
""""
Main routine
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"--overwrite",
help="Overwrites the unformatted source files with the well-formatted code in place. "
"If not set, an exception is raised if any of the files do not conform to the style guide.",
action='store_true')
args = parser.parse_args()
overwrite = bool(args.overwrite)
repo_root = pathlib.Path(__file__).parent
print("YAPF'ing...")
if overwrite:
subprocess.check_call(
[
"yapf", "--in-place", "--style=style.yapf", "--recursive", "tests", "icontract", "setup.py",
"precommit.py"
],
cwd=repo_root.as_posix())
else:
subprocess.check_call(
["yapf", "--diff", "--style=style.yapf", "--recursive", "tests", "icontract", "setup.py", "precommit.py"],
cwd=repo_root.as_posix())
print("Mypy'ing...")
subprocess.check_call(["mypy", "icontract", "tests"], cwd=repo_root.as_posix())
print("Pylint'ing...")
subprocess.check_call(["pylint", "--rcfile=pylint.rc", "tests", "icontract"], cwd=repo_root.as_posix())
print("Pydocstyle'ing...")
subprocess.check_call(["pydocstyle", "icontract"], cwd=repo_root.as_posix())
print("Testing...")
env = os.environ.copy()
env['ICONTRACT_SLOW'] = 'true'
# yapf: disable
subprocess.check_call(
["coverage", "run",
"--source", "icontract",
"-m", "unittest", "discover", "tests"],
cwd=repo_root.as_posix(),
env=env)
# yapf: enable
subprocess.check_call(["coverage", "report"])
print("Doctesting...")
subprocess.check_call(["python3", "-m", "doctest", "README.rst"])
for pth in (repo_root / "icontract").glob("**/*.py"):
subprocess.check_call(["python3", "-m", "doctest", pth.as_posix()])
print("Checking the restructured text of the readme...")
subprocess.check_call(['python3', 'setup.py', 'check', '--restructuredtext', '--strict'])
return 0 | Main routine | entailment |
def _collapse_invariants(bases: List[type], namespace: MutableMapping[str, Any]) -> None:
"""Collect invariants from the bases and merge them with the invariants in the namespace."""
invariants = [] # type: List[Contract]
# Add invariants of the bases
for base in bases:
if hasattr(base, "__invariants__"):
invariants.extend(getattr(base, "__invariants__"))
# Add invariants in the current namespace
if '__invariants__' in namespace:
invariants.extend(namespace['__invariants__'])
# Change the final invariants in the namespace
if invariants:
namespace["__invariants__"] = invariants | Collect invariants from the bases and merge them with the invariants in the namespace. | entailment |
def _collapse_preconditions(base_preconditions: List[List[Contract]], bases_have_func: bool,
preconditions: List[List[Contract]], func: Callable[..., Any]) -> List[List[Contract]]:
"""
Collapse function preconditions with the preconditions collected from the base classes.
:param base_preconditions: preconditions collected from the base classes (grouped by base class)
:param bases_have_func: True if one of the base classes has the function
:param preconditions: preconditions of the function (before the collapse)
:param func: function whose preconditions we are collapsing
:return: collapsed sequence of precondition groups
"""
if not base_preconditions and bases_have_func and preconditions:
raise TypeError(("The function {} can not weaken the preconditions because the bases specify "
"no preconditions at all. Hence this function must accept all possible input since "
"the preconditions are OR'ed and no precondition implies a dummy precondition which is always "
"fulfilled.").format(func.__qualname__))
return base_preconditions + preconditions | Collapse function preconditions with the preconditions collected from the base classes.
:param base_preconditions: preconditions collected from the base classes (grouped by base class)
:param bases_have_func: True if one of the base classes has the function
:param preconditions: preconditions of the function (before the collapse)
:param func: function whose preconditions we are collapsing
:return: collapsed sequence of precondition groups | entailment |
def _collapse_snapshots(base_snapshots: List[Snapshot], snapshots: List[Snapshot]) -> List[Snapshot]:
"""
Collapse snapshots of pre-invocation values with the snapshots collected from the base classes.
:param base_snapshots: snapshots collected from the base classes
:param snapshots: snapshots of the function (before the collapse)
:return: collapsed sequence of snapshots
"""
seen_names = set() # type: Set[str]
collapsed = base_snapshots + snapshots
for snap in collapsed:
if snap.name in seen_names:
raise ValueError("There are conflicting snapshots with the name: {!r}.\n\n"
"Please mind that the snapshots are inherited from the base classes. "
"Does one of the base classes defines a snapshot with the same name?".format(snap.name))
seen_names.add(snap.name)
return collapsed | Collapse snapshots of pre-invocation values with the snapshots collected from the base classes.
:param base_snapshots: snapshots collected from the base classes
:param snapshots: snapshots of the function (before the collapse)
:return: collapsed sequence of snapshots | entailment |
def _collapse_postconditions(base_postconditions: List[Contract], postconditions: List[Contract]) -> List[Contract]:
"""
Collapse function postconditions with the postconditions collected from the base classes.
:param base_postconditions: postconditions collected from the base classes
:param postconditions: postconditions of the function (before the collapse)
:return: collapsed sequence of postconditions
"""
return base_postconditions + postconditions | Collapse function postconditions with the postconditions collected from the base classes.
:param base_postconditions: postconditions collected from the base classes
:param postconditions: postconditions of the function (before the collapse)
:return: collapsed sequence of postconditions | entailment |
def _decorate_namespace_function(bases: List[type], namespace: MutableMapping[str, Any], key: str) -> None:
"""Collect preconditions and postconditions from the bases and decorate the function at the ``key``."""
# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
value = namespace[key]
assert inspect.isfunction(value) or isinstance(value, (staticmethod, classmethod))
# Determine the function to be decorated
if inspect.isfunction(value):
func = value
elif isinstance(value, (staticmethod, classmethod)):
func = value.__func__
else:
raise NotImplementedError("Unexpected value for a function: {}".format(value))
# Collect preconditions and postconditions of the function
preconditions = [] # type: List[List[Contract]]
snapshots = [] # type: List[Snapshot]
postconditions = [] # type: List[Contract]
contract_checker = icontract._checkers.find_checker(func=func)
if contract_checker is not None:
preconditions = contract_checker.__preconditions__ # type: ignore
snapshots = contract_checker.__postcondition_snapshots__ # type: ignore
postconditions = contract_checker.__postconditions__ # type: ignore
# Collect the preconditions and postconditions from bases.
#
# Preconditions and postconditions of __init__ of base classes are deliberately ignored (and not collapsed) since
# initialization is an operation specific to the concrete class and does not relate to the class hierarchy.
if key not in ['__init__']:
base_preconditions = [] # type: List[List[Contract]]
base_snapshots = [] # type: List[Snapshot]
base_postconditions = [] # type: List[Contract]
bases_have_func = False
for base in bases:
if hasattr(base, key):
bases_have_func = True
# Check if there is a checker function in the base class
base_func = getattr(base, key)
base_contract_checker = icontract._checkers.find_checker(func=base_func)
# Ignore functions which don't have preconditions or postconditions
if base_contract_checker is not None:
base_preconditions.extend(base_contract_checker.__preconditions__) # type: ignore
base_snapshots.extend(base_contract_checker.__postcondition_snapshots__) # type: ignore
base_postconditions.extend(base_contract_checker.__postconditions__) # type: ignore
# Collapse preconditions and postconditions from the bases with the the function's own ones
preconditions = _collapse_preconditions(
base_preconditions=base_preconditions,
bases_have_func=bases_have_func,
preconditions=preconditions,
func=func)
snapshots = _collapse_snapshots(base_snapshots=base_snapshots, snapshots=snapshots)
postconditions = _collapse_postconditions(
base_postconditions=base_postconditions, postconditions=postconditions)
if preconditions or postconditions:
if contract_checker is None:
contract_checker = icontract._checkers.decorate_with_checker(func=func)
# Replace the function with the function decorated with contract checks
if inspect.isfunction(value):
namespace[key] = contract_checker
elif isinstance(value, staticmethod):
namespace[key] = staticmethod(contract_checker)
elif isinstance(value, classmethod):
namespace[key] = classmethod(contract_checker)
else:
raise NotImplementedError("Unexpected value for a function: {}".format(value))
# Override the preconditions and postconditions
contract_checker.__preconditions__ = preconditions # type: ignore
contract_checker.__postcondition_snapshots__ = snapshots # type: ignore
contract_checker.__postconditions__ = postconditions | Collect preconditions and postconditions from the bases and decorate the function at the ``key``. | entailment |
def _decorate_namespace_property(bases: List[type], namespace: MutableMapping[str, Any], key: str) -> None:
"""Collect contracts for all getters/setters/deleters corresponding to ``key`` and decorate them."""
# pylint: disable=too-many-locals
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
value = namespace[key]
assert isinstance(value, property)
fget = value.fget # type: Optional[Callable[..., Any]]
fset = value.fset # type: Optional[Callable[..., Any]]
fdel = value.fdel # type: Optional[Callable[..., Any]]
for func in [value.fget, value.fset, value.fdel]:
func = cast(Callable[..., Any], func)
if func is None:
continue
# Collect the preconditions and postconditions from bases
base_preconditions = [] # type: List[List[Contract]]
base_snapshots = [] # type: List[Snapshot]
base_postconditions = [] # type: List[Contract]
bases_have_func = False
for base in bases:
if hasattr(base, key):
base_property = getattr(base, key)
assert isinstance(base_property, property), \
"Expected base {} to have {} as property, but got: {}".format(base, key, base_property)
if func == value.fget:
base_func = getattr(base, key).fget
elif func == value.fset:
base_func = getattr(base, key).fset
elif func == value.fdel:
base_func = getattr(base, key).fdel
else:
raise NotImplementedError("Unhandled case: func neither value.fget, value.fset nor value.fdel")
if base_func is None:
continue
bases_have_func = True
# Check if there is a checker function in the base class
base_contract_checker = icontract._checkers.find_checker(func=base_func)
# Ignore functions which don't have preconditions or postconditions
if base_contract_checker is not None:
base_preconditions.extend(base_contract_checker.__preconditions__) # type: ignore
base_snapshots.extend(base_contract_checker.__postcondition_snapshots__) # type: ignore
base_postconditions.extend(base_contract_checker.__postconditions__) # type: ignore
# Add preconditions and postconditions of the function
preconditions = [] # type: List[List[Contract]]
snapshots = [] # type: List[Snapshot]
postconditions = [] # type: List[Contract]
contract_checker = icontract._checkers.find_checker(func=func)
if contract_checker is not None:
preconditions = contract_checker.__preconditions__ # type: ignore
snapshots = contract_checker.__postcondition_snapshots__
postconditions = contract_checker.__postconditions__ # type: ignore
preconditions = _collapse_preconditions(
base_preconditions=base_preconditions,
bases_have_func=bases_have_func,
preconditions=preconditions,
func=func)
snapshots = _collapse_snapshots(base_snapshots=base_snapshots, snapshots=snapshots)
postconditions = _collapse_postconditions(
base_postconditions=base_postconditions, postconditions=postconditions)
if preconditions or postconditions:
if contract_checker is None:
contract_checker = icontract._checkers.decorate_with_checker(func=func)
# Replace the function with the function decorated with contract checks
if func == value.fget:
fget = contract_checker
elif func == value.fset:
fset = contract_checker
elif func == value.fdel:
fdel = contract_checker
else:
raise NotImplementedError("Unhandled case: func neither fget, fset nor fdel")
# Override the preconditions and postconditions
contract_checker.__preconditions__ = preconditions # type: ignore
contract_checker.__postcondition_snapshots__ = snapshots # type: ignore
contract_checker.__postconditions__ = postconditions # type: ignore
if fget != value.fget or fset != value.fset or fdel != value.fdel:
namespace[key] = property(fget=fget, fset=fset, fdel=fdel) | Collect contracts for all getters/setters/deleters corresponding to ``key`` and decorate them. | entailment |
def _dbc_decorate_namespace(bases: List[type], namespace: MutableMapping[str, Any]) -> None:
"""
Collect invariants, preconditions and postconditions from the bases and decorate all the methods.
Instance methods are simply replaced with the decorated function/ Properties, class methods and static methods are
overridden with new instances of ``property``, ``classmethod`` and ``staticmethod``, respectively.
"""
_collapse_invariants(bases=bases, namespace=namespace)
for key, value in namespace.items():
if inspect.isfunction(value) or isinstance(value, (staticmethod, classmethod)):
_decorate_namespace_function(bases=bases, namespace=namespace, key=key)
elif isinstance(value, property):
_decorate_namespace_property(bases=bases, namespace=namespace, key=key)
else:
# Ignore the value which is neither a function nor a property
pass | Collect invariants, preconditions and postconditions from the bases and decorate all the methods.
Instance methods are simply replaced with the decorated function/ Properties, class methods and static methods are
overridden with new instances of ``property``, ``classmethod`` and ``staticmethod``, respectively. | entailment |
def _representable(value: Any) -> bool:
"""
Check whether we want to represent the value in the error message on contract breach.
We do not want to represent classes, methods, modules and functions.
:param value: value related to an AST node
:return: True if we want to represent it in the violation error
"""
return not inspect.isclass(value) and not inspect.isfunction(value) and not inspect.ismethod(value) and not \
inspect.ismodule(value) and not inspect.isbuiltin(value) | Check whether we want to represent the value in the error message on contract breach.
We do not want to represent classes, methods, modules and functions.
:param value: value related to an AST node
:return: True if we want to represent it in the violation error | entailment |
def inspect_decorator(lines: List[str], lineno: int, filename: str) -> DecoratorInspection:
"""
Parse the file in which the decorator is called and figure out the corresponding call AST node.
:param lines: lines of the source file corresponding to the decorator call
:param lineno: line index (starting with 0) of one of the lines in the decorator call
:param filename: name of the file where decorator is called
:return: inspected decorator call
"""
if lineno < 0 or lineno >= len(lines):
raise ValueError(("Given line number {} of one of the decorator lines "
"is not within the range [{}, {}) of lines in {}").format(lineno, 0, len(lines), filename))
# Go up till a line starts with a decorator
decorator_lineno = None # type: Optional[int]
for i in range(lineno, -1, -1):
if _DECORATOR_RE.match(lines[i]):
decorator_lineno = i
break
if decorator_lineno is None:
raise SyntaxError("Decorator corresponding to the line {} could not be found in file {}: {!r}".format(
lineno + 1, filename, lines[lineno]))
# Find the decorator end -- it's either a function definition, a class definition or another decorator
decorator_end_lineno = None # type: Optional[int]
for i in range(lineno + 1, len(lines)):
line = lines[i]
if _DECORATOR_RE.match(line) or _DEF_CLASS_RE.match(line):
decorator_end_lineno = i
break
if decorator_end_lineno is None:
raise SyntaxError(("The next statement following the decorator corresponding to the line {} "
"could not be found in file {}: {!r}").format(lineno + 1, filename, lines[lineno]))
decorator_lines = lines[decorator_lineno:decorator_end_lineno]
# We need to dedent the decorator and add a dummy decoratee so that we can parse its text as valid source code.
decorator_text = textwrap.dedent("".join(decorator_lines)) + "def dummy_{}(): pass".format(uuid.uuid4().hex)
atok = asttokens.ASTTokens(decorator_text, parse=True)
assert isinstance(atok.tree, ast.Module), "Expected the parsed decorator text to live in an AST module."
module_node = atok.tree
assert len(module_node.body) == 1, "Expected the module AST of the decorator text to have a single statement."
assert isinstance(module_node.body[0], ast.FunctionDef), \
"Expected the only statement in the AST module corresponding to the decorator text to be a function definition."
func_def_node = module_node.body[0]
assert len(func_def_node.decorator_list) == 1, \
"Expected the function AST node corresponding to the decorator text to have a single decorator."
assert isinstance(func_def_node.decorator_list[0], ast.Call), \
"Expected the only decorator in the function definition AST node corresponding to the decorator text " \
"to be a call node."
call_node = func_def_node.decorator_list[0]
return DecoratorInspection(atok=atok, node=call_node) | Parse the file in which the decorator is called and figure out the corresponding call AST node.
:param lines: lines of the source file corresponding to the decorator call
:param lineno: line index (starting with 0) of one of the lines in the decorator call
:param filename: name of the file where decorator is called
:return: inspected decorator call | entailment |
def find_lambda_condition(decorator_inspection: DecoratorInspection) -> Optional[ConditionLambdaInspection]:
"""
Inspect the decorator and extract the condition as lambda.
If the condition is not given as a lambda function, return None.
"""
call_node = decorator_inspection.node
lambda_node = None # type: Optional[ast.Lambda]
if len(call_node.args) > 0:
assert isinstance(call_node.args[0], ast.Lambda), \
("Expected the first argument to the decorator to be a condition as lambda AST node, "
"but got: {}").format(type(call_node.args[0]))
lambda_node = call_node.args[0]
elif len(call_node.keywords) > 0:
for keyword in call_node.keywords:
if keyword.arg == "condition":
assert isinstance(keyword.value, ast.Lambda), \
"Expected lambda node as value of the 'condition' argument to the decorator."
lambda_node = keyword.value
break
assert lambda_node is not None, "Expected to find a keyword AST node with 'condition' arg, but found none"
else:
raise AssertionError(
"Expected a call AST node of a decorator to have either args or keywords, but got: {}".format(
ast.dump(call_node)))
return ConditionLambdaInspection(atok=decorator_inspection.atok, node=lambda_node) | Inspect the decorator and extract the condition as lambda.
If the condition is not given as a lambda function, return None. | entailment |
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection],
condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]:
# pylint: disable=too-many-locals
"""
Represent function arguments and frame values in the error message on contract breach.
:param condition: condition function of the contract
:param lambda_inspection:
inspected lambda AST node corresponding to the condition function (None if the condition was not given as a
lambda function)
:param condition_kwargs: condition arguments
:param a_repr: representation instance that defines how the values are represented.
:return: list of value representations
"""
if _is_lambda(a_function=condition):
assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function"
else:
assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function"
reprs = dict() # type: MutableMapping[str, Any]
if lambda_inspection is not None:
# Collect the variable lookup of the condition function:
variable_lookup = [] # type: List[Mapping[str, Any]]
# Add condition arguments to the lookup
variable_lookup.append(condition_kwargs)
# Add closure to the lookup
closure_dict = dict() # type: Dict[str, Any]
if condition.__closure__ is not None: # type: ignore
closure_cells = condition.__closure__ # type: ignore
freevars = condition.__code__.co_freevars
assert len(closure_cells) == len(freevars), \
"Number of closure cells of a condition function ({}) == number of free vars ({})".format(
len(closure_cells), len(freevars))
for cell, freevar in zip(closure_cells, freevars):
closure_dict[freevar] = cell.cell_contents
variable_lookup.append(closure_dict)
# Add globals to the lookup
if condition.__globals__ is not None: # type: ignore
variable_lookup.append(condition.__globals__) # type: ignore
# pylint: disable=protected-access
recompute_visitor = icontract._recompute.Visitor(variable_lookup=variable_lookup)
recompute_visitor.visit(node=lambda_inspection.node.body)
recomputed_values = recompute_visitor.recomputed_values
repr_visitor = Visitor(
recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok)
repr_visitor.visit(node=lambda_inspection.node.body)
reprs = repr_visitor.reprs
else:
for key, val in condition_kwargs.items():
if _representable(value=val):
reprs[key] = val
parts = [] # type: List[str]
for key in sorted(reprs.keys()):
parts.append('{} was {}'.format(key, a_repr.repr(reprs[key])))
return parts | Represent function arguments and frame values in the error message on contract breach.
:param condition: condition function of the contract
:param lambda_inspection:
inspected lambda AST node corresponding to the condition function (None if the condition was not given as a
lambda function)
:param condition_kwargs: condition arguments
:param a_repr: representation instance that defines how the values are represented.
:return: list of value representations | entailment |
def generate_message(contract: Contract, condition_kwargs: Mapping[str, Any]) -> str:
"""Generate the message upon contract violation."""
# pylint: disable=protected-access
parts = [] # type: List[str]
if contract.location is not None:
parts.append("{}:\n".format(contract.location))
if contract.description is not None:
parts.append("{}: ".format(contract.description))
lambda_inspection = None # type: Optional[ConditionLambdaInspection]
if not _is_lambda(a_function=contract.condition):
condition_text = contract.condition.__name__
else:
# We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with
# lambdas.
# Find the line corresponding to the condition lambda
lines, condition_lineno = inspect.findsource(contract.condition)
filename = inspect.getsourcefile(contract.condition)
decorator_inspection = inspect_decorator(lines=lines, lineno=condition_lineno, filename=filename)
lambda_inspection = find_lambda_condition(decorator_inspection=decorator_inspection)
assert lambda_inspection is not None, \
"Expected lambda_inspection to be non-None if _is_lambda is True on: {}".format(contract.condition)
condition_text = lambda_inspection.text
parts.append(condition_text)
repr_vals = repr_values(
condition=contract.condition,
lambda_inspection=lambda_inspection,
condition_kwargs=condition_kwargs,
a_repr=contract._a_repr)
if len(repr_vals) == 0:
# Do not append anything since no value could be represented as a string.
# This could appear in case we have, for example, a generator expression as the return value of a lambda.
pass
elif len(repr_vals) == 1:
parts.append(': ')
parts.append(repr_vals[0])
else:
parts.append(':\n')
parts.append('\n'.join(repr_vals))
msg = "".join(parts)
return msg | Generate the message upon contract violation. | entailment |
def visit_Name(self, node: ast.Name) -> None:
"""
Resolve the name from the variable look-up and the built-ins.
Due to possible branching (e.g., If-expressions), some nodes might lack the recomputed values. These nodes
are ignored.
"""
if node in self._recomputed_values:
value = self._recomputed_values[node]
# Check if it is a non-built-in
is_builtin = True
for lookup in self._variable_lookup:
if node.id in lookup:
is_builtin = False
break
if not is_builtin and _representable(value=value):
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Resolve the name from the variable look-up and the built-ins.
Due to possible branching (e.g., If-expressions), some nodes might lack the recomputed values. These nodes
are ignored. | entailment |
def visit_Attribute(self, node: ast.Attribute) -> None:
"""Represent the attribute by dumping its source code."""
if node in self._recomputed_values:
value = self._recomputed_values[node]
if _representable(value=value):
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Represent the attribute by dumping its source code. | entailment |
def visit_Call(self, node: ast.Call) -> None:
"""Represent the call by dumping its source code."""
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Represent the call by dumping its source code. | entailment |
def visit_ListComp(self, node: ast.ListComp) -> None:
"""Represent the list comprehension by dumping its source code."""
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Represent the list comprehension by dumping its source code. | entailment |
def visit_DictComp(self, node: ast.DictComp) -> None:
"""Represent the dictionary comprehension by dumping its source code."""
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Represent the dictionary comprehension by dumping its source code. | entailment |
def _walk_decorator_stack(func: CallableT) -> Iterable['CallableT']:
"""
Iterate through the stack of decorated functions until the original function.
Assume that all decorators used functools.update_wrapper.
"""
while hasattr(func, "__wrapped__"):
yield func
func = getattr(func, "__wrapped__")
yield func | Iterate through the stack of decorated functions until the original function.
Assume that all decorators used functools.update_wrapper. | entailment |
def find_checker(func: CallableT) -> Optional[CallableT]:
"""Iterate through the decorator stack till we find the contract checker."""
contract_checker = None # type: Optional[CallableT]
for a_wrapper in _walk_decorator_stack(func):
if hasattr(a_wrapper, "__preconditions__") or hasattr(a_wrapper, "__postconditions__"):
contract_checker = a_wrapper
return contract_checker | Iterate through the decorator stack till we find the contract checker. | entailment |
def _kwargs_from_call(param_names: List[str], kwdefaults: Dict[str, Any], args: Tuple[Any, ...],
kwargs: Dict[str, Any]) -> MutableMapping[str, Any]:
"""
Inspect the input values received at the wrapper for the actual function call.
:param param_names: parameter (*i.e.* argument) names of the original (decorated) function
:param kwdefaults: default argument values of the original function
:param args: arguments supplied to the call
:param kwargs: keyword arguments supplied to the call
:return: resolved arguments as they would be passed to the function
"""
# pylint: disable=too-many-arguments
mapping = dict() # type: MutableMapping[str, Any]
# Set the default argument values as condition parameters.
for param_name, param_value in kwdefaults.items():
mapping[param_name] = param_value
# Override the defaults with the values actually suplied to the function.
for i, func_arg in enumerate(args):
mapping[param_names[i]] = func_arg
for key, val in kwargs.items():
mapping[key] = val
return mapping | Inspect the input values received at the wrapper for the actual function call.
:param param_names: parameter (*i.e.* argument) names of the original (decorated) function
:param kwdefaults: default argument values of the original function
:param args: arguments supplied to the call
:param kwargs: keyword arguments supplied to the call
:return: resolved arguments as they would be passed to the function | entailment |
def _assert_precondition(contract: Contract, resolved_kwargs: Mapping[str, Any]) -> None:
"""
Assert that the contract holds as a precondition.
:param contract: contract to be verified
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return:
"""
# Check that all arguments to the condition function have been set.
missing_args = [arg_name for arg_name in contract.condition_args if arg_name not in resolved_kwargs]
if missing_args:
raise TypeError(
("The argument(s) of the precondition have not been set: {}. "
"Does the original function define them? Did you supply them in the call?").format(missing_args))
condition_kwargs = {
arg_name: value
for arg_name, value in resolved_kwargs.items() if arg_name in contract.condition_arg_set
}
check = contract.condition(**condition_kwargs)
if not check:
if contract.error is not None and (inspect.ismethod(contract.error) or inspect.isfunction(contract.error)):
assert contract.error_arg_set is not None, "Expected error_arg_set non-None if contract.error a function."
assert contract.error_args is not None, "Expected error_args non-None if contract.error a function."
error_kwargs = {
arg_name: value
for arg_name, value in resolved_kwargs.items() if arg_name in contract.error_arg_set
}
missing_args = [arg_name for arg_name in contract.error_args if arg_name not in resolved_kwargs]
if missing_args:
msg_parts = [] # type: List[str]
if contract.location is not None:
msg_parts.append("{}:\n".format(contract.location))
msg_parts.append(
("The argument(s) of the precondition error have not been set: {}. "
"Does the original function define them? Did you supply them in the call?").format(missing_args))
raise TypeError(''.join(msg_parts))
raise contract.error(**error_kwargs)
else:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs=condition_kwargs)
if contract.error is None:
raise ViolationError(msg)
elif isinstance(contract.error, type):
raise contract.error(msg) | Assert that the contract holds as a precondition.
:param contract: contract to be verified
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return: | entailment |
def _assert_invariant(contract: Contract, instance: Any) -> None:
"""Assert that the contract holds as a class invariant given the instance of the class."""
if 'self' in contract.condition_arg_set:
check = contract.condition(self=instance)
else:
check = contract.condition()
if not check:
if contract.error is not None and (inspect.ismethod(contract.error) or inspect.isfunction(contract.error)):
assert contract.error_arg_set is not None, "Expected error_arg_set non-None if contract.error a function."
assert contract.error_args is not None, "Expected error_args non-None if contract.error a function."
if 'self' in contract.error_arg_set:
raise contract.error(self=instance)
else:
raise contract.error()
else:
if 'self' in contract.condition_arg_set:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs={"self": instance})
else:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs=dict())
if contract.error is None:
raise ViolationError(msg)
elif isinstance(contract.error, type):
raise contract.error(msg)
else:
raise NotImplementedError("Unhandled contract.error: {}".format(contract.error)) | Assert that the contract holds as a class invariant given the instance of the class. | entailment |
Subsets and Splits