text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Execute each passed endpoint and collect the results. If a result
<END_TASK>
<USER_TASK:>
Description:
def exec_all_endpoints(self, *args, **kwargs):
"""Execute each passed endpoint and collect the results. If a result
is anoter `MultipleResults` it will extend the results with those
contained therein. If the result is `NoResult`, skip the addition.""" |
results = []
for handler in self.endpoints:
if isinstance(handler, weakref.ref):
handler = handler()
if self.adapt_params:
bind = self._adapt_call_params(handler, args, kwargs)
res = handler(*bind.args, **bind.kwargs)
else:
res = handler(*args, **kwargs)
if isinstance(res, MultipleResults):
if res.done:
results += res.results
else:
results += res._results
elif res is not NoResult:
results.append(res)
return MultipleResults(results, concurrent=self.concurrent, owner=self) |
<SYSTEM_TASK:>
Call all the registered handlers with the arguments passed.
<END_TASK>
<USER_TASK:>
Description:
def run(self, *args, **kwargs):
"""Call all the registered handlers with the arguments passed.
If this signal is a class member, call also the handlers registered
at class-definition time. If an external publish function is
supplied, call it with the provided arguments.
:returns: an instance of `~.utils.MultipleResults`
""" |
if self.fvalidation is not None:
try:
if self.fvalidation(*args, **kwargs) is False:
raise ExecutionError("Validation returned ``False``")
except Exception as e:
if __debug__:
logger.exception("Validation failed")
else:
logger.error("Validation failed")
raise ExecutionError(
"The validation of the arguments specified to ``run()`` "
"has failed") from e
try:
if self.exec_wrapper is None:
return self.exec_all_endpoints(*args, **kwargs)
else:
# if a exec wrapper is defined, defer notification to it,
# a callback to execute the default notification process
result = self.exec_wrapper(self.endpoints,
self.exec_all_endpoints,
*args, **kwargs)
if inspect.isawaitable(result):
result = pull_result(result)
return result
except Exception as e:
if __debug__:
logger.exception("Error while executing handlers")
else:
logger.error("Error while executing handlers")
raise ExecutionError("Error while executing handlers") from e |
<SYSTEM_TASK:>
Discover the minimum and maximum UID number.
<END_TASK>
<USER_TASK:>
Description:
def login_defs():
"""Discover the minimum and maximum UID number.""" |
uid_min = None
uid_max = None
login_defs_path = '/etc/login.defs'
if os.path.exists(login_defs_path):
with io.open(text_type(login_defs_path), encoding=text_type('utf-8')) as log_defs_file:
login_data = log_defs_file.readlines()
for line in login_data:
if PY3: # pragma: no cover
line = str(line)
if PY2: # pragma: no cover
line = line.encode(text_type('utf8'))
if line[:7] == text_type('UID_MIN'):
uid_min = int(line.split()[1].strip())
if line[:7] == text_type('UID_MAX'):
uid_max = int(line.split()[1].strip())
if not uid_min: # pragma: no cover
uid_min = DEFAULT_UID_MIN
if not uid_max: # pragma: no cover
uid_max = DEFAULT_UID_MAX
return uid_min, uid_max |
<SYSTEM_TASK:>
Runs Django ``collectstatic`` command in silent mode.
<END_TASK>
<USER_TASK:>
Description:
def collect_static() -> bool:
"""
Runs Django ``collectstatic`` command in silent mode.
:return: always ``True``
""" |
from django.core.management import execute_from_command_line
# from django.conf import settings
# if not os.listdir(settings.STATIC_ROOT):
wf('Collecting static files... ', False)
execute_from_command_line(['./manage.py', 'collectstatic', '-c', '--noinput', '-v0'])
wf('[+]\n')
return True |
<SYSTEM_TASK:>
Generate command to add a user.
<END_TASK>
<USER_TASK:>
Description:
def generate_add_user_command(proposed_user=None, manage_home=None):
"""Generate command to add a user.
args:
proposed_user (User): User
manage_home: bool
returns:
list: The command string split into shell-like syntax
""" |
command = None
if get_platform() in ('Linux', 'OpenBSD'):
command = '{0} {1}'.format(sudo_check(), LINUX_CMD_USERADD)
if proposed_user.uid:
command = '{0} -u {1}'.format(command, proposed_user.uid)
if proposed_user.gid:
command = '{0} -g {1}'.format(command, proposed_user.gid)
if proposed_user.gecos:
command = '{0} -c \'{1}\''.format(command, proposed_user.gecos)
if manage_home:
if proposed_user.home_dir:
if os.path.exists(proposed_user.home_dir):
command = '{0} -d {1}'.format(command, proposed_user.home_dir)
elif not os.path.exists('/home/{0}'.format(proposed_user.name)):
command = '{0} -m'.format(command)
if proposed_user.shell:
command = '{0} -s {1}'.format(command, proposed_user.shell)
command = '{0} {1}'.format(command, proposed_user.name)
elif get_platform() == 'FreeBSD': # pragma: FreeBSD
command = '{0} {1} useradd'.format(sudo_check(), FREEBSD_CMD_PW)
if proposed_user.uid:
command = '{0} -u {1}'.format(command, proposed_user.uid)
if proposed_user.gid:
command = '{0} -g {1}'.format(command, proposed_user.gid)
if proposed_user.gecos:
command = '{0} -c \'{1}\''.format(command, proposed_user.gecos)
if manage_home:
if proposed_user.home_dir:
command = '{0} -d {1}'.format(command, proposed_user.home_dir)
else:
command = '{0} -m'.format(command)
if proposed_user.shell:
command = '{0} -s {1}'.format(command, proposed_user.shell)
command = '{0} -n {1}'.format(command, proposed_user.name)
if command:
return shlex.split(str(command)) |
<SYSTEM_TASK:>
Generate command to modify existing user to become the proposed user.
<END_TASK>
<USER_TASK:>
Description:
def generate_modify_user_command(task=None, manage_home=None):
"""Generate command to modify existing user to become the proposed user.
args:
task (dict): A proposed user and the differences between it and the existing user
returns:
list: The command string split into shell-like syntax
""" |
name = task['proposed_user'].name
comparison_result = task['user_comparison']['result']
command = None
if get_platform() in ('Linux', 'OpenBSD'):
command = '{0} {1}'.format(sudo_check(), LINUX_CMD_USERMOD)
if comparison_result.get('replacement_uid_value'):
command = '{0} -u {1}'.format(command, comparison_result.get('replacement_uid_value'))
if comparison_result.get('replacement_gid_value'):
command = '{0} -g {1}'.format(command, comparison_result.get('replacement_gid_value'))
if comparison_result.get('replacement_gecos_value'):
command = '{0} -c {1}'.format(command, comparison_result.get('replacement_gecos_value'))
if comparison_result.get('replacement_shell_value'):
command = '{0} -s {1}'.format(command, comparison_result.get('replacement_shell_value'))
if manage_home and comparison_result.get('replacement_home_dir_value'):
command = '{0} -d {1}'.format(command, comparison_result.get('replacement_home_dir_value'))
command = '{0} {1}'.format(command, name)
if get_platform() == 'FreeBSD': # pragma: FreeBSD
command = '{0} {1} usermod'.format(sudo_check(), FREEBSD_CMD_PW)
if comparison_result.get('replacement_uid_value'):
command = '{0} -u {1}'.format(command, comparison_result.get('replacement_uid_value'))
if comparison_result.get('replacement_gid_value'):
command = '{0} -g {1}'.format(command, comparison_result.get('replacement_gid_value'))
if comparison_result.get('replacement_gecos_value'):
command = '{0} -c {1}'.format(command, comparison_result.get('replacement_gecos_value'))
if comparison_result.get('replacement_shell_value'):
command = '{0} -s {1}'.format(command, comparison_result.get('replacement_shell_value'))
if manage_home and comparison_result.get('replacement_home_dir_value'):
command = '{0} -d {1}'.format(command, comparison_result.get('replacement_home_dir_value'))
command = '{0} -n {1}'.format(command, name)
if command:
return shlex.split(str(command)) |
<SYSTEM_TASK:>
Generate command to delete a user.
<END_TASK>
<USER_TASK:>
Description:
def generate_delete_user_command(username=None, manage_home=None):
"""Generate command to delete a user.
args:
username (str): user name
manage_home (bool): manage home directory
returns:
list: The user delete command string split into shell-like syntax
""" |
command = None
remove_home = '-r' if manage_home else ''
if get_platform() in ('Linux', 'OpenBSD'):
command = '{0} {1} {2} {3}'.format(sudo_check(), LINUX_CMD_USERDEL, remove_home, username)
elif get_platform() == 'FreeBSD': # pragma: FreeBSD
command = '{0} {1} userdel {2} -n {3}'.format(sudo_check(), FREEBSD_CMD_PW, remove_home, username)
if command:
return shlex.split(str(command)) |
<SYSTEM_TASK:>
Check if supplied User instance exists in supplied Users list and, if so, return the differences.
<END_TASK>
<USER_TASK:>
Description:
def compare_user(passed_user=None, user_list=None):
"""Check if supplied User instance exists in supplied Users list and, if so, return the differences.
args:
passed_user (User): the user instance to check for differences
user_list (Users): the Users instance containing a list of Users instances
returns:
dict: Details of the matching user and a list of differences
""" |
# Check if user exists
returned = user_list.describe_users(users_filter=dict(name=passed_user.name))
replace_keys = False
# User exists, so compare attributes
comparison_result = dict()
if passed_user.uid and (not returned[0].uid == passed_user.uid):
comparison_result['uid_action'] = 'modify'
comparison_result['current_uid_value'] = returned[0].uid
comparison_result['replacement_uid_value'] = passed_user.uid
if passed_user.gid and (not returned[0].gid == passed_user.gid):
comparison_result['gid_action'] = 'modify'
comparison_result['current_gid_value'] = returned[0].gid
comparison_result['replacement_gid_value'] = passed_user.gid
if passed_user.gecos and (not returned[0].gecos == passed_user.gecos):
comparison_result['gecos_action'] = 'modify'
comparison_result['current_gecos_value'] = returned[0].gecos
comparison_result['replacement_gecos_value'] = passed_user.gecos
if passed_user.home_dir and (not returned[0].home_dir == passed_user.home_dir):
comparison_result['home_dir_action'] = 'modify'
comparison_result['current_home_dir_value'] = returned[0].home_dir
comparison_result['replacement_home_dir_value'] = passed_user.home_dir
# (Re)set keys if home dir changed
replace_keys = True
if passed_user.shell and (not returned[0].shell == passed_user.shell):
comparison_result['shell_action'] = 'modify'
comparison_result['current_shell_value'] = returned[0].shell
comparison_result['replacement_shell_value'] = passed_user.shell
if passed_user.sudoers_entry and (not returned[0].sudoers_entry == passed_user.sudoers_entry):
comparison_result['sudoers_entry_action'] = 'modify'
comparison_result['current_sudoers_entry'] = returned[0].sudoers_entry
comparison_result['replacement_sudoers_entry'] = passed_user.sudoers_entry
# if passed_user.public_keys and (not returned[0].public_keys == passed_user.public_keys):
existing_keys = returned[0].public_keys
passed_keys = passed_user.public_keys
# Check if existing and passed keys exist, and if so, compare
if all((existing_keys, passed_keys)) and len(existing_keys) == len(passed_user.public_keys):
# Compare each key, and if any differences, replace
existing = set(key.raw for key in existing_keys)
replacement = set(key.raw for key in passed_keys)
if set.difference(existing, replacement):
replace_keys = True
# If not existing keys but keys passed set, then
elif passed_keys and not existing_keys:
replace_keys = True
if replace_keys:
comparison_result['public_keys_action'] = 'modify'
comparison_result['current_public_keys_value'] = existing_keys
comparison_result['replacement_public_keys_value'] = passed_keys
return dict(state='existing', result=comparison_result, existing_user=returned) |
<SYSTEM_TASK:>
Force double quoted gecos.
<END_TASK>
<USER_TASK:>
Description:
def gecos(self):
"""Force double quoted gecos.
returns:
str: The double quoted gecos.
""" |
if not self._gecos:
return None
if self._gecos.startswith(text_type('\'')) and self._gecos.endswith(text_type('\'')):
self._gecos = '\"{0}\"'.format(self._gecos[1:-1])
return self._gecos
elif self._gecos.startswith(text_type('\"')) and self._gecos.endswith(text_type('\"')):
return self._gecos
else:
return '\"{0}\"'.format(self._gecos) |
<SYSTEM_TASK:>
Return the user as a dict.
<END_TASK>
<USER_TASK:>
Description:
def to_dict(self):
""" Return the user as a dict. """ |
public_keys = [public_key.b64encoded for public_key in self.public_keys]
return dict(name=self.name, passwd=self.passwd, uid=self.uid, gid=self.gid, gecos=self.gecos,
home_dir=self.home_dir, shell=self.shell, public_keys=public_keys) |
<SYSTEM_TASK:>
Insert an instance of User into the collection.
<END_TASK>
<USER_TASK:>
Description:
def insert(self, index, value):
"""Insert an instance of User into the collection.""" |
self.check(value)
self._user_list.insert(index, value) |
<SYSTEM_TASK:>
Remove User instance based on supplied user name.
<END_TASK>
<USER_TASK:>
Description:
def remove(self, username=None):
"""Remove User instance based on supplied user name.""" |
self._user_list = [user for user in self._user_list if user.name != username] |
<SYSTEM_TASK:>
Construct a list of User objects from a list of dicts.
<END_TASK>
<USER_TASK:>
Description:
def construct_user_list(raw_users=None):
"""Construct a list of User objects from a list of dicts.""" |
users = Users(oktypes=User)
for user_dict in raw_users:
public_keys = None
if user_dict.get('public_keys'):
public_keys = [PublicKey(b64encoded=x, raw=None)
for x in user_dict.get('public_keys')]
users.append(User(name=user_dict.get('name'),
passwd=user_dict.get('passwd'),
uid=user_dict.get('uid'),
gid=user_dict.get('gid'),
home_dir=user_dict.get('home_dir'),
gecos=user_dict.get('gecos'),
shell=user_dict.get('shell'),
public_keys=public_keys,
sudoers_entry=user_dict.get('sudoers_entry')))
return users |
<SYSTEM_TASK:>
Write the users to a file.
<END_TASK>
<USER_TASK:>
Description:
def export(self, file_path=None, export_format=None):
""" Write the users to a file. """ |
with io.open(file_path, mode='w', encoding="utf-8") as export_file:
if export_format == 'yaml':
import yaml
yaml.safe_dump(self.to_dict(), export_file, default_flow_style=False)
elif export_format == 'json':
export_file.write(text_type(json.dumps(self.to_dict(), ensure_ascii=False)))
return True |
<SYSTEM_TASK:>
Write an image to an hdf5 group as a dataset
<END_TASK>
<USER_TASK:>
Description:
def write_image_dataset(group, key, data, h5dtype=None):
"""Write an image to an hdf5 group as a dataset
This convenience function sets all attributes such that the image
can be visualized with HDFView, sets the compression and fletcher32
filters, and sets the chunk size to the image shape.
Parameters
----------
group: h5py.Group
HDF5 group to store data to
key: str
Dataset identifier
data: np.ndarray of shape (M,N)
Image data to store
h5dtype: str
The datatype in which to store the image data. The default
is the datatype of `data`.
Returns
-------
dataset: h5py.Dataset
The created HDF5 dataset object
""" |
if h5dtype is None:
h5dtype = data.dtype
if key in group:
del group[key]
if group.file.driver == "core":
kwargs = {}
else:
kwargs = {"fletcher32": True,
"chunks": data.shape}
kwargs.update(COMPRESSION)
dset = group.create_dataset(key,
data=data.astype(h5dtype),
**kwargs)
# Create and Set image attributes
# HDFView recognizes this as a series of images
dset.attrs.create('CLASS', b'IMAGE')
dset.attrs.create('IMAGE_VERSION', b'1.2')
dset.attrs.create('IMAGE_SUBCLASS', b'IMAGE_GRAYSCALE')
return dset |
<SYSTEM_TASK:>
list of background correction parameters
<END_TASK>
<USER_TASK:>
Description:
def info(self):
"""list of background correction parameters""" |
info = []
name = self.__class__.__name__.lower()
# get bg information
for key in VALID_BG_KEYS:
if key in self.h5["bg_data"]:
attrs = self.h5["bg_data"][key].attrs
for akey in attrs:
atr = attrs[akey]
var = "{} background {}".format(name, akey)
info.append((var, atr))
if "fit" in self.h5["bg_data"]:
# mask background
var_mask = "{} background from mask".format(name)
if ("estimate_bg_from_mask" in self.h5
and self.h5["estimate_bg_from_mask"] is not None):
# bg was computed from mask image
info.append((var_mask, True))
elif ("estimate_bg_from_binary" in self.h5
and self.h5["estimate_bg_from_binary"] is not None):
# bg was computed from mask image (old notation)
warnings.warn("Old file format detected!", DeprecationWarning)
info.append((var_mask, True))
else:
info.append((var_mask, False))
return info |
<SYSTEM_TASK:>
Remove the background image data
<END_TASK>
<USER_TASK:>
Description:
def del_bg(self, key):
"""Remove the background image data
Parameters
----------
key: str
One of :const:`VALID_BG_KEYS`
""" |
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
if key in self.h5["bg_data"]:
del self.h5["bg_data"][key]
else:
msg = "No bg data to clear for '{}' in {}.".format(key, self)
warnings.warn(msg) |
<SYSTEM_TASK:>
Estimate image background
<END_TASK>
<USER_TASK:>
Description:
def estimate_bg(self, fit_offset="mean", fit_profile="tilt",
border_px=0, from_mask=None, ret_mask=False):
"""Estimate image background
Parameters
----------
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The mask image must have the same shape as the
input data.`True` elements are used for background
estimation.
ret_mask: bool
Return the mask image used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two resulting mask images is used.
The arguments passed to this method are stored in the
hdf5 file `self.h5` and are used for optional integrity
checking using `qpimage.integrity_check.check`.
See Also
--------
qpimage.bg_estimate.estimate
""" |
# remove existing bg before accessing imdat.image
self.set_bg(bg=None, key="fit")
# compute bg
bgimage, mask = bg_estimate.estimate(data=self.image,
fit_offset=fit_offset,
fit_profile=fit_profile,
border_px=border_px,
from_mask=from_mask,
ret_mask=True)
attrs = {"fit_offset": fit_offset,
"fit_profile": fit_profile,
"border_px": border_px}
self.set_bg(bg=bgimage, key="fit", attrs=attrs)
# save `from_mask` separately (arrays vs. h5 attributes)
# (if `from_mask` is `None`, this will remove the array)
self["estimate_bg_from_mask"] = from_mask
# return mask image
if ret_mask:
return mask |
<SYSTEM_TASK:>
Get the background data
<END_TASK>
<USER_TASK:>
Description:
def get_bg(self, key=None, ret_attrs=False):
"""Get the background data
Parameters
----------
key: None or str
A user-defined key that identifies the background data.
Examples are "data" for experimental data, or "fit"
for an estimated background correction
(see :const:`VALID_BG_KEYS`). If set to `None`,
returns the combined background image (:const:`ImageData.bg`).
ret_attrs: bool
Also returns the attributes of the background data.
""" |
if key is None:
if ret_attrs:
raise ValueError("No attributes for combined background!")
return self.bg
else:
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
if key in self.h5["bg_data"]:
data = self.h5["bg_data"][key][:]
if ret_attrs:
attrs = dict(self.h5["bg_data"][key].attrs)
# remove keys for image visualization in hdf5 files
for h5k in ["CLASS", "IMAGE_VERSION", "IMAGE_SUBCLASS"]:
if h5k in attrs:
attrs.pop(h5k)
ret = (data, attrs)
else:
ret = data
else:
raise KeyError("No background data for {}!".format(key))
return ret |
<SYSTEM_TASK:>
Set the background data
<END_TASK>
<USER_TASK:>
Description:
def set_bg(self, bg, key="data", attrs={}):
"""Set the background data
Parameters
----------
bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset
The background data. If `bg` is an `h5py.Dataset` object,
it must exist in the same hdf5 file (a hard link is created).
If set to `None`, the data will be removed.
key: str
One of :const:`VALID_BG_KEYS`)
attrs: dict
List of background attributes
See Also
--------
del_bg: removing background data
""" |
if key not in VALID_BG_KEYS:
raise ValueError("Invalid bg key: {}".format(key))
# remove previous background key
if key in self.h5["bg_data"]:
del self.h5["bg_data"][key]
# set background
if isinstance(bg, (numbers.Real, np.ndarray)):
dset = write_image_dataset(group=self.h5["bg_data"],
key=key,
data=bg,
h5dtype=self.h5dtype)
for kw in attrs:
dset.attrs[kw] = attrs[kw]
elif isinstance(bg, h5py.Dataset):
# Create a hard link
# (This functionality was intended for saving memory when storing
# large QPSeries with universal background data, i.e. when using
# `QPSeries.add_qpimage` with the `bg_from_idx` keyword.)
self.h5["bg_data"][key] = bg
elif bg is not None:
msg = "Unknown background data type: {}".format(bg)
raise ValueError(msg) |
<SYSTEM_TASK:>
Combine several background amplitude images
<END_TASK>
<USER_TASK:>
Description:
def _bg_combine(self, bgs):
"""Combine several background amplitude images""" |
out = np.ones(self.h5["raw"].shape, dtype=float)
# bg is an h5py.DataSet
for bg in bgs:
out *= bg[:]
return out |
<SYSTEM_TASK:>
Alternate constructor that accepts a string SemVer.
<END_TASK>
<USER_TASK:>
Description:
def from_str(cls, version_str: str):
"""
Alternate constructor that accepts a string SemVer.
""" |
o = cls()
o.version = version_str
return o |
<SYSTEM_TASK:>
Estimate the background value of an image
<END_TASK>
<USER_TASK:>
Description:
def estimate(data, fit_offset="mean", fit_profile="tilt",
border_px=0, from_mask=None, ret_mask=False):
"""Estimate the background value of an image
Parameters
----------
data: np.ndarray
Data from which to compute the background value
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
If both `border_px` and `from_mask` are given, the
intersection of the two is used, i.e. the positions
where both, the frame mask and `from_mask`, are
`True`.
""" |
if fit_profile not in VALID_FIT_PROFILES:
msg = "`fit_profile` must be one of {}, got '{}'".format(
VALID_FIT_PROFILES,
fit_profile)
raise ValueError(msg)
if fit_offset not in VALID_FIT_OFFSETS:
msg = "`fit_offset` must be one of {}, got '{}'".format(
VALID_FIT_OFFSETS,
fit_offset)
raise ValueError(msg)
# initial mask image
if from_mask is not None:
assert isinstance(from_mask, np.ndarray)
mask = from_mask.copy()
else:
mask = np.ones_like(data, dtype=bool)
# multiply with border mask image (intersection)
if border_px > 0:
border_px = int(np.round(border_px))
mask_px = np.zeros_like(mask)
mask_px[:border_px, :] = True
mask_px[-border_px:, :] = True
mask_px[:, :border_px] = True
mask_px[:, -border_px:] = True
# intersection
np.logical_and(mask, mask_px, out=mask)
# compute background image
if fit_profile == "tilt":
bgimg = profile_tilt(data, mask)
elif fit_profile == "poly2o":
bgimg = profile_poly2o(data, mask)
else:
bgimg = np.zeros_like(data, dtype=float)
# add offsets
if fit_offset == "fit":
if fit_profile == "offset":
msg = "`fit_offset=='fit'` only valid when `fit_profile!='offset`"
raise ValueError(msg)
# nothing else to do here, using offset from fit
elif fit_offset == "gauss":
bgimg += offset_gaussian((data - bgimg)[mask])
elif fit_offset == "mean":
bgimg += np.mean((data - bgimg)[mask])
elif fit_offset == "mode":
bgimg += offset_mode((data - bgimg)[mask])
if ret_mask:
ret = (bgimg, mask)
else:
ret = bgimg
return ret |
<SYSTEM_TASK:>
Fit a gaussian model to `data` and return its center
<END_TASK>
<USER_TASK:>
Description:
def offset_gaussian(data):
"""Fit a gaussian model to `data` and return its center""" |
nbins = 2 * int(np.ceil(np.sqrt(data.size)))
mind, maxd = data.min(), data.max()
drange = (mind - (maxd - mind) / 2, maxd + (maxd - mind) / 2)
histo = np.histogram(data, nbins, density=True, range=drange)
dx = abs(histo[1][1] - histo[1][2]) / 2
hx = histo[1][1:] - dx
hy = histo[0]
# fit gaussian
gauss = lmfit.models.GaussianModel()
pars = gauss.guess(hy, x=hx)
out = gauss.fit(hy, pars, x=hx)
return out.params["center"] |
<SYSTEM_TASK:>
lmfit 2nd order polynomial residuals
<END_TASK>
<USER_TASK:>
Description:
def poly2o_residual(params, data, mask):
"""lmfit 2nd order polynomial residuals""" |
bg = poly2o_model(params, shape=data.shape)
res = (data - bg)[mask]
return res.flatten() |
<SYSTEM_TASK:>
lmfit tilt residuals
<END_TASK>
<USER_TASK:>
Description:
def tilt_residual(params, data, mask):
"""lmfit tilt residuals""" |
bg = tilt_model(params, shape=data.shape)
res = (data - bg)[mask]
return res.flatten() |
<SYSTEM_TASK:>
Find the side band position of a hologram
<END_TASK>
<USER_TASK:>
Description:
def find_sideband(ft_data, which=+1, copy=True):
"""Find the side band position of a hologram
The hologram is Fourier-transformed and the side band
is determined by finding the maximum amplitude in
Fourier space.
Parameters
----------
ft_data: 2d ndarray
Fourier transform of the hologram image
which: +1 or -1
which sideband to search for:
- +1: upper half
- -1: lower half
copy: bool
copy `ft_data` before modification
Returns
-------
fsx, fsy : tuple of floats
coordinates of the side band in Fourier space frequencies
""" |
if copy:
ft_data = ft_data.copy()
if which not in [+1, -1]:
raise ValueError("`which` must be +1 or -1!")
ox, oy = ft_data.shape
cx = ox // 2
cy = oy // 2
minlo = max(int(np.ceil(ox / 42)), 5)
if which == +1:
# remove lower part
ft_data[cx - minlo:] = 0
else:
ft_data[:cx + minlo] = 0
# remove values around axes
ft_data[cx - 3:cx + 3, :] = 0
ft_data[:, cy - 3:cy + 3] = 0
# find maximum
am = np.argmax(np.abs(ft_data))
iy = am % oy
ix = int((am - iy) / oy)
fx = np.fft.fftshift(np.fft.fftfreq(ft_data.shape[0]))[ix]
fy = np.fft.fftshift(np.fft.fftfreq(ft_data.shape[1]))[iy]
return (fx, fy) |
<SYSTEM_TASK:>
Compute the 2D Fourier transform with zero padding
<END_TASK>
<USER_TASK:>
Description:
def fourier2dpad(data, zero_pad=True):
"""Compute the 2D Fourier transform with zero padding
Parameters
----------
data: 2d fload ndarray
real-valued image data
zero_pad: bool
perform zero-padding to next order of 2
""" |
if zero_pad:
# zero padding size is next order of 2
(N, M) = data.shape
order = np.int(max(64., 2**np.ceil(np.log(2 * max(N, M)) / np.log(2))))
# this is faster than np.pad
datapad = np.zeros((order, order), dtype=float)
datapad[:data.shape[0], :data.shape[1]] = data
else:
datapad = data
# Fourier transform
fft = np.fft.fftshift(np.fft.fft2(datapad))
return fft |
<SYSTEM_TASK:>
Recursively copy all hdf5 data from one group to another
<END_TASK>
<USER_TASK:>
Description:
def copyh5(inh5, outh5):
"""Recursively copy all hdf5 data from one group to another
Data from links is copied.
Parameters
----------
inh5: str, h5py.File, or h5py.Group
The input hdf5 data. This can be either a file name or
an hdf5 object.
outh5: str, h5py.File, h5py.Group, or None
The output hdf5 data. This can be either a file name or
an hdf5 object. If set to `None`, a new hdf5 object is
created in memory.
Notes
-----
All data in outh5 are overridden by the inh5 data.
""" |
if not isinstance(inh5, h5py.Group):
inh5 = h5py.File(inh5, mode="r")
if outh5 is None:
# create file in memory
h5kwargs = {"name": "qpimage{}.h5".format(QPImage._instances),
"driver": "core",
"backing_store": False,
"mode": "a"}
outh5 = h5py.File(**h5kwargs)
return_h5obj = True
QPImage._instances += 1
elif not isinstance(outh5, h5py.Group):
# create new file
outh5 = h5py.File(outh5, mode="w")
return_h5obj = False
else:
return_h5obj = True
# begin iteration
for key in inh5:
if key in outh5:
del outh5[key]
if isinstance(inh5[key], h5py.Group):
outh5.create_group(key)
copyh5(inh5[key], outh5[key])
else:
dset = write_image_dataset(group=outh5,
key=key,
data=inh5[key][:],
h5dtype=inh5[key].dtype)
dset.attrs.update(inh5[key].attrs)
outh5.attrs.update(inh5.attrs)
if return_h5obj:
# in-memory or previously created instance of h5py.File
return outh5
else:
# properly close the file and return its name
fn = outh5.filename
outh5.flush()
outh5.close()
return fn |
<SYSTEM_TASK:>
Convert which data to string or tuple
<END_TASK>
<USER_TASK:>
Description:
def _conv_which_data(which_data):
"""Convert which data to string or tuple
This function improves user convenience,
as `which_data` may be of several types
(str, ,str with spaces and commas, list, tuple) which
is internally handled by this method.
""" |
if isinstance(which_data, str):
which_data = which_data.lower().strip()
if which_data.count(","):
# convert comma string to list
which_data = [w.strip() for w in which_data.split(",")]
# remove empty strings
which_data = [w for w in which_data if w]
if len(which_data) == 1:
return which_data[0]
else:
# convert to tuple
return tuple(which_data)
else:
return which_data
elif isinstance(which_data, (list, tuple)):
which_data = [w.lower().strip() for w in which_data]
return tuple(which_data)
elif which_data is None:
return None
else:
msg = "unknown type for `which_data`: {}".format(which_data)
raise ValueError(msg) |
<SYSTEM_TASK:>
Convert input data to phase and amplitude
<END_TASK>
<USER_TASK:>
Description:
def _get_amp_pha(self, data, which_data):
"""Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`)
""" |
which_data = QPImage._conv_which_data(which_data)
if which_data not in VALID_INPUT_DATA:
msg = "`which_data` must be one of {}!".format(VALID_INPUT_DATA)
raise ValueError(msg)
if which_data == "field":
amp = np.abs(data)
pha = np.angle(data)
elif which_data == "phase":
pha = data
amp = np.ones_like(data)
elif which_data == ("phase", "amplitude"):
amp = data[1]
pha = data[0]
elif which_data == ("phase", "intensity"):
amp = np.sqrt(data[1])
pha = data[0]
elif which_data == "hologram":
amp, pha = self._get_amp_pha(holo.get_field(data, **self.holo_kw),
which_data="field")
if amp.size == 0 or pha.size == 0:
msg = "`data` with shape {} has zero size!".format(amp.shape)
raise ValueError(msg)
# phase unwrapping (take into account nans)
nanmask = np.isnan(pha)
if np.sum(nanmask):
# create masked array
# skimage.restoration.unwrap_phase cannot handle nan data
# (even if masked)
pham = pha.copy()
pham[nanmask] = 0
pham = np.ma.masked_array(pham, mask=nanmask)
pha = unwrap_phase(pham, seed=47)
pha[nanmask] = np.nan
else:
pha = unwrap_phase(pha, seed=47)
return amp, pha |
<SYSTEM_TASK:>
list of tuples with QPImage meta data
<END_TASK>
<USER_TASK:>
Description:
def info(self):
"""list of tuples with QPImage meta data""" |
info = []
# meta data
meta = self.meta
for key in meta:
info.append((key, self.meta[key]))
# background correction
for imdat in [self._amp, self._pha]:
info += imdat.info
return info |
<SYSTEM_TASK:>
Clear background correction
<END_TASK>
<USER_TASK:>
Description:
def clear_bg(self, which_data=("amplitude", "phase"), keys="fit"):
"""Clear background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
keys: str or list of str
Which type of background data to remove. One of:
- "fit": the background data computed with
:func:`qpimage.QPImage.compute_bg`
- "data": the experimentally obtained background image
""" |
which_data = QPImage._conv_which_data(which_data)
if isinstance(keys, str):
# make sure keys is a list of strings
keys = [keys]
# Get image data for clearing
imdats = []
if "amplitude" in which_data:
imdats.append(self._amp)
if "phase" in which_data:
imdats.append(self._pha)
if not imdats:
msg = "`which_data` must contain 'phase' or 'amplitude'!"
raise ValueError(msg)
# Perform clearing of backgrounds
for imdat in imdats:
for key in keys:
imdat.del_bg(key) |
<SYSTEM_TASK:>
Compute background correction
<END_TASK>
<USER_TASK:>
Description:
def compute_bg(self, which_data="phase",
fit_offset="mean", fit_profile="tilt",
border_m=0, border_perc=0, border_px=0,
from_mask=None, ret_mask=False):
"""Compute background correction
Parameters
----------
which_data: str or list of str
From which type of data to remove the background
information. The list contains either "amplitude",
"phase", or both.
fit_profile: str
The type of background profile to fit:
- "offset": offset only
- "poly2o": 2D 2nd order polynomial with mixed terms
- "tilt": 2D linear tilt with offset (default)
fit_offset: str
The method for computing the profile offset
- "fit": offset as fitting parameter
- "gauss": center of a gaussian fit
- "mean": simple average
- "mode": mode (see `qpimage.bg_estimate.mode`)
border_m: float
Assume that a frame of `border_m` meters around the
image is background. The value is converted to
pixels and rounded.
border_perc: float
Assume that a frame of `border_perc` percent around
the image is background. The value is converted to
pixels and rounded. If the aspect ratio of the image
is not one, then the average of the data's shape is
used to compute the percentage in pixels.
border_px: float
Assume that a frame of `border_px` pixels around
the image is background.
from_mask: boolean np.ndarray or None
Use a boolean array to define the background area.
The boolean mask must have the same shape as the
input data. `True` elements are used for background
estimation.
ret_mask: bool
Return the boolean mask used to compute the background.
Notes
-----
The `border_*` values are translated to pixel values and
the largest pixel border is used to generate a mask
image for background computation.
If any of the `border_*` arguments are non-zero and
`from_mask` is given, the intersection of the two
is used, i.e. the positions where both, the frame
mask and `from_mask`, are `True`.
See Also
--------
qpimage.bg_estimate.estimate
""" |
which_data = QPImage._conv_which_data(which_data)
# check validity
if not ("amplitude" in which_data or
"phase" in which_data):
msg = "`which_data` must contain 'phase' or 'amplitude'!"
raise ValueError(msg)
# get border in px
border_list = []
if border_m:
if border_m < 0:
raise ValueError("`border_m` must be greater than zero!")
border_list.append(border_m / self.meta["pixel size"])
if border_perc:
if border_perc < 0 or border_perc > 50:
raise ValueError("`border_perc` must be in interval [0, 50]!")
size = np.average(self.shape)
border_list.append(size * border_perc / 100)
if border_px:
border_list.append(border_px)
# get maximum border size
if border_list:
border_px = np.int(np.round(np.max(border_list)))
elif from_mask is None:
raise ValueError("Neither `from_mask` nor `border_*` given!")
elif np.all(from_mask == 0):
raise ValueError("`from_mask` must not be all-zero!")
# Get affected image data
imdat_list = []
if "amplitude" in which_data:
imdat_list.append(self._amp)
if "phase" in which_data:
imdat_list.append(self._pha)
# Perform correction
for imdat in imdat_list:
mask = imdat.estimate_bg(fit_offset=fit_offset,
fit_profile=fit_profile,
border_px=border_px,
from_mask=from_mask,
ret_mask=ret_mask)
return mask |
<SYSTEM_TASK:>
Create a copy of the current instance
<END_TASK>
<USER_TASK:>
Description:
def copy(self, h5file=None):
"""Create a copy of the current instance
This is done by recursively copying the underlying hdf5 data.
Parameters
----------
h5file: str, h5py.File, h5py.Group, or None
see `QPImage.__init__`
""" |
h5 = copyh5(self.h5, h5file)
return QPImage(h5file=h5, h5dtype=self.h5dtype) |
<SYSTEM_TASK:>
Compute a numerically refocused QPImage
<END_TASK>
<USER_TASK:>
Description:
def refocus(self, distance, method="helmholtz", h5file=None, h5mode="a"):
"""Compute a numerically refocused QPImage
Parameters
----------
distance: float
Focusing distance [m]
method: str
Refocusing method, one of ["helmholtz","fresnel"]
h5file: str, h5py.Group, h5py.File, or None
A path to an hdf5 data file where the QPImage is cached.
If set to `None` (default), all data will be handled in
memory using the "core" driver of the :mod:`h5py`'s
:class:`h5py:File` class. If the file does not exist,
it is created. If the file already exists, it is opened
with the file mode defined by `hdf5_mode`. If this is
an instance of h5py.Group or h5py.File, then this will
be used to internally store all data.
h5mode: str
Valid file modes are (only applies if `h5file` is a path)
- "r": Readonly, file must exist
- "r+": Read/write, file must exist
- "w": Create file, truncate if exists
- "w-" or "x": Create file, fail if exists
- "a": Read/write if exists, create otherwise (default)
Returns
-------
qpi: qpimage.QPImage
Refocused phase and amplitude data
See Also
--------
:mod:`nrefocus`: library used for numerical focusing
""" |
field2 = nrefocus.refocus(field=self.field,
d=distance/self["pixel size"],
nm=self["medium index"],
res=self["wavelength"]/self["pixel size"],
method=method
)
if "identifier" in self:
ident = self["identifier"]
else:
ident = ""
meta_data = self.meta
meta_data["identifier"] = "{}@{}{:.5e}m".format(ident,
method[0],
distance)
qpi2 = QPImage(data=field2,
which_data="field",
meta_data=meta_data,
h5file=h5file,
h5mode=h5mode)
return qpi2 |
<SYSTEM_TASK:>
Set background amplitude and phase data
<END_TASK>
<USER_TASK:>
Description:
def set_bg_data(self, bg_data, which_data=None):
"""Set background amplitude and phase data
Parameters
----------
bg_data: 2d ndarray (float or complex), list, QPImage, or `None`
The background data (must be same type as `data`).
If set to `None`, the background data is reset.
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "phase,amplitude", or "phase,intensity",
where the latter two require an indexable object for
`bg_data` with the phase data as first element.
""" |
if isinstance(bg_data, QPImage):
if which_data is not None:
msg = "`which_data` must not be set if `bg_data` is QPImage!"
raise ValueError(msg)
pha, amp = bg_data.pha, bg_data.amp
elif bg_data is None:
# Reset phase and amplitude
amp, pha = None, None
else:
# Compute phase and amplitude from data and which_data
amp, pha = self._get_amp_pha(bg_data, which_data)
# Set background data
self._amp.set_bg(amp, key="data")
self._pha.set_bg(pha, key="data") |
<SYSTEM_TASK:>
Add a QPImage instance to the QPSeries
<END_TASK>
<USER_TASK:>
Description:
def add_qpimage(self, qpi, identifier=None, bg_from_idx=None):
"""Add a QPImage instance to the QPSeries
Parameters
----------
qpi: qpimage.QPImage
The QPImage that is added to the series
identifier: str
Identifier key for `qpi`
bg_from_idx: int or None
Use the background data from the data stored in this index,
creating hard links within the hdf5 file.
(Saves memory if e.g. all qpimages is corrected with the same data)
""" |
if not isinstance(qpi, QPImage):
raise ValueError("`fli` must be instance of QPImage!")
if "identifier" in qpi and identifier is None:
identifier = qpi["identifier"]
if identifier and identifier in self:
msg = "The identifier '{}' already ".format(identifier) \
+ "exists! You can either change the identifier of " \
+ " '{}' or remove it.".format(qpi)
raise ValueError(msg)
# determine number of qpimages
num = len(self)
# indices start at zero; do not add 1
name = "qpi_{}".format(num)
group = self.h5.create_group(name)
thisqpi = qpi.copy(h5file=group)
if bg_from_idx is not None:
# Create hard links
refqpi = self[bg_from_idx]
thisqpi._amp.set_bg(bg=refqpi._amp.h5["bg_data"]["data"])
thisqpi._pha.set_bg(bg=refqpi._pha.h5["bg_data"]["data"])
if identifier:
# set identifier
group.attrs["identifier"] = identifier |
<SYSTEM_TASK:>
Return a single QPImage of the series
<END_TASK>
<USER_TASK:>
Description:
def get_qpimage(self, index):
"""Return a single QPImage of the series
Parameters
----------
index: int or str
Index or identifier of the QPImage
Notes
-----
Instead of ``qps.get_qpimage(index)``, it is possible
to use the short-hand ``qps[index]``.
""" |
if isinstance(index, str):
# search for the identifier
for ii in range(len(self)):
qpi = self[ii]
if "identifier" in qpi and qpi["identifier"] == index:
group = self.h5["qpi_{}".format(ii)]
break
else:
msg = "QPImage identifier '{}' not found!".format(index)
raise KeyError(msg)
else:
# integer index
if index < -len(self):
msg = "Index {} out of bounds for QPSeries of size {}!".format(
index, len(self))
raise ValueError(msg)
elif index < 0:
index += len(self)
name = "qpi_{}".format(index)
if name in self.h5:
group = self.h5[name]
else:
msg = "Index {} not found for QPSeries of length {}".format(
index, len(self))
raise KeyError(msg)
return QPImage(h5file=group) |
<SYSTEM_TASK:>
Collect invariants from the bases and merge them with the invariants in the namespace.
<END_TASK>
<USER_TASK:>
Description:
def _collapse_invariants(bases: List[type], namespace: MutableMapping[str, Any]) -> None:
"""Collect invariants from the bases and merge them with the invariants in the namespace.""" |
invariants = [] # type: List[Contract]
# Add invariants of the bases
for base in bases:
if hasattr(base, "__invariants__"):
invariants.extend(getattr(base, "__invariants__"))
# Add invariants in the current namespace
if '__invariants__' in namespace:
invariants.extend(namespace['__invariants__'])
# Change the final invariants in the namespace
if invariants:
namespace["__invariants__"] = invariants |
<SYSTEM_TASK:>
Collapse function preconditions with the preconditions collected from the base classes.
<END_TASK>
<USER_TASK:>
Description:
def _collapse_preconditions(base_preconditions: List[List[Contract]], bases_have_func: bool,
preconditions: List[List[Contract]], func: Callable[..., Any]) -> List[List[Contract]]:
"""
Collapse function preconditions with the preconditions collected from the base classes.
:param base_preconditions: preconditions collected from the base classes (grouped by base class)
:param bases_have_func: True if one of the base classes has the function
:param preconditions: preconditions of the function (before the collapse)
:param func: function whose preconditions we are collapsing
:return: collapsed sequence of precondition groups
""" |
if not base_preconditions and bases_have_func and preconditions:
raise TypeError(("The function {} can not weaken the preconditions because the bases specify "
"no preconditions at all. Hence this function must accept all possible input since "
"the preconditions are OR'ed and no precondition implies a dummy precondition which is always "
"fulfilled.").format(func.__qualname__))
return base_preconditions + preconditions |
<SYSTEM_TASK:>
Collapse snapshots of pre-invocation values with the snapshots collected from the base classes.
<END_TASK>
<USER_TASK:>
Description:
def _collapse_snapshots(base_snapshots: List[Snapshot], snapshots: List[Snapshot]) -> List[Snapshot]:
"""
Collapse snapshots of pre-invocation values with the snapshots collected from the base classes.
:param base_snapshots: snapshots collected from the base classes
:param snapshots: snapshots of the function (before the collapse)
:return: collapsed sequence of snapshots
""" |
seen_names = set() # type: Set[str]
collapsed = base_snapshots + snapshots
for snap in collapsed:
if snap.name in seen_names:
raise ValueError("There are conflicting snapshots with the name: {!r}.\n\n"
"Please mind that the snapshots are inherited from the base classes. "
"Does one of the base classes defines a snapshot with the same name?".format(snap.name))
seen_names.add(snap.name)
return collapsed |
<SYSTEM_TASK:>
Collapse function postconditions with the postconditions collected from the base classes.
<END_TASK>
<USER_TASK:>
Description:
def _collapse_postconditions(base_postconditions: List[Contract], postconditions: List[Contract]) -> List[Contract]:
"""
Collapse function postconditions with the postconditions collected from the base classes.
:param base_postconditions: postconditions collected from the base classes
:param postconditions: postconditions of the function (before the collapse)
:return: collapsed sequence of postconditions
""" |
return base_postconditions + postconditions |
<SYSTEM_TASK:>
Collect preconditions and postconditions from the bases and decorate the function at the ``key``.
<END_TASK>
<USER_TASK:>
Description:
def _decorate_namespace_function(bases: List[type], namespace: MutableMapping[str, Any], key: str) -> None:
"""Collect preconditions and postconditions from the bases and decorate the function at the ``key``.""" |
# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
value = namespace[key]
assert inspect.isfunction(value) or isinstance(value, (staticmethod, classmethod))
# Determine the function to be decorated
if inspect.isfunction(value):
func = value
elif isinstance(value, (staticmethod, classmethod)):
func = value.__func__
else:
raise NotImplementedError("Unexpected value for a function: {}".format(value))
# Collect preconditions and postconditions of the function
preconditions = [] # type: List[List[Contract]]
snapshots = [] # type: List[Snapshot]
postconditions = [] # type: List[Contract]
contract_checker = icontract._checkers.find_checker(func=func)
if contract_checker is not None:
preconditions = contract_checker.__preconditions__ # type: ignore
snapshots = contract_checker.__postcondition_snapshots__ # type: ignore
postconditions = contract_checker.__postconditions__ # type: ignore
# Collect the preconditions and postconditions from bases.
#
# Preconditions and postconditions of __init__ of base classes are deliberately ignored (and not collapsed) since
# initialization is an operation specific to the concrete class and does not relate to the class hierarchy.
if key not in ['__init__']:
base_preconditions = [] # type: List[List[Contract]]
base_snapshots = [] # type: List[Snapshot]
base_postconditions = [] # type: List[Contract]
bases_have_func = False
for base in bases:
if hasattr(base, key):
bases_have_func = True
# Check if there is a checker function in the base class
base_func = getattr(base, key)
base_contract_checker = icontract._checkers.find_checker(func=base_func)
# Ignore functions which don't have preconditions or postconditions
if base_contract_checker is not None:
base_preconditions.extend(base_contract_checker.__preconditions__) # type: ignore
base_snapshots.extend(base_contract_checker.__postcondition_snapshots__) # type: ignore
base_postconditions.extend(base_contract_checker.__postconditions__) # type: ignore
# Collapse preconditions and postconditions from the bases with the the function's own ones
preconditions = _collapse_preconditions(
base_preconditions=base_preconditions,
bases_have_func=bases_have_func,
preconditions=preconditions,
func=func)
snapshots = _collapse_snapshots(base_snapshots=base_snapshots, snapshots=snapshots)
postconditions = _collapse_postconditions(
base_postconditions=base_postconditions, postconditions=postconditions)
if preconditions or postconditions:
if contract_checker is None:
contract_checker = icontract._checkers.decorate_with_checker(func=func)
# Replace the function with the function decorated with contract checks
if inspect.isfunction(value):
namespace[key] = contract_checker
elif isinstance(value, staticmethod):
namespace[key] = staticmethod(contract_checker)
elif isinstance(value, classmethod):
namespace[key] = classmethod(contract_checker)
else:
raise NotImplementedError("Unexpected value for a function: {}".format(value))
# Override the preconditions and postconditions
contract_checker.__preconditions__ = preconditions # type: ignore
contract_checker.__postcondition_snapshots__ = snapshots # type: ignore
contract_checker.__postconditions__ = postconditions |
<SYSTEM_TASK:>
Collect invariants, preconditions and postconditions from the bases and decorate all the methods.
<END_TASK>
<USER_TASK:>
Description:
def _dbc_decorate_namespace(bases: List[type], namespace: MutableMapping[str, Any]) -> None:
"""
Collect invariants, preconditions and postconditions from the bases and decorate all the methods.
Instance methods are simply replaced with the decorated function/ Properties, class methods and static methods are
overridden with new instances of ``property``, ``classmethod`` and ``staticmethod``, respectively.
""" |
_collapse_invariants(bases=bases, namespace=namespace)
for key, value in namespace.items():
if inspect.isfunction(value) or isinstance(value, (staticmethod, classmethod)):
_decorate_namespace_function(bases=bases, namespace=namespace, key=key)
elif isinstance(value, property):
_decorate_namespace_property(bases=bases, namespace=namespace, key=key)
else:
# Ignore the value which is neither a function nor a property
pass |
<SYSTEM_TASK:>
Check whether we want to represent the value in the error message on contract breach.
<END_TASK>
<USER_TASK:>
Description:
def _representable(value: Any) -> bool:
"""
Check whether we want to represent the value in the error message on contract breach.
We do not want to represent classes, methods, modules and functions.
:param value: value related to an AST node
:return: True if we want to represent it in the violation error
""" |
return not inspect.isclass(value) and not inspect.isfunction(value) and not inspect.ismethod(value) and not \
inspect.ismodule(value) and not inspect.isbuiltin(value) |
<SYSTEM_TASK:>
Parse the file in which the decorator is called and figure out the corresponding call AST node.
<END_TASK>
<USER_TASK:>
Description:
def inspect_decorator(lines: List[str], lineno: int, filename: str) -> DecoratorInspection:
"""
Parse the file in which the decorator is called and figure out the corresponding call AST node.
:param lines: lines of the source file corresponding to the decorator call
:param lineno: line index (starting with 0) of one of the lines in the decorator call
:param filename: name of the file where decorator is called
:return: inspected decorator call
""" |
if lineno < 0 or lineno >= len(lines):
raise ValueError(("Given line number {} of one of the decorator lines "
"is not within the range [{}, {}) of lines in {}").format(lineno, 0, len(lines), filename))
# Go up till a line starts with a decorator
decorator_lineno = None # type: Optional[int]
for i in range(lineno, -1, -1):
if _DECORATOR_RE.match(lines[i]):
decorator_lineno = i
break
if decorator_lineno is None:
raise SyntaxError("Decorator corresponding to the line {} could not be found in file {}: {!r}".format(
lineno + 1, filename, lines[lineno]))
# Find the decorator end -- it's either a function definition, a class definition or another decorator
decorator_end_lineno = None # type: Optional[int]
for i in range(lineno + 1, len(lines)):
line = lines[i]
if _DECORATOR_RE.match(line) or _DEF_CLASS_RE.match(line):
decorator_end_lineno = i
break
if decorator_end_lineno is None:
raise SyntaxError(("The next statement following the decorator corresponding to the line {} "
"could not be found in file {}: {!r}").format(lineno + 1, filename, lines[lineno]))
decorator_lines = lines[decorator_lineno:decorator_end_lineno]
# We need to dedent the decorator and add a dummy decoratee so that we can parse its text as valid source code.
decorator_text = textwrap.dedent("".join(decorator_lines)) + "def dummy_{}(): pass".format(uuid.uuid4().hex)
atok = asttokens.ASTTokens(decorator_text, parse=True)
assert isinstance(atok.tree, ast.Module), "Expected the parsed decorator text to live in an AST module."
module_node = atok.tree
assert len(module_node.body) == 1, "Expected the module AST of the decorator text to have a single statement."
assert isinstance(module_node.body[0], ast.FunctionDef), \
"Expected the only statement in the AST module corresponding to the decorator text to be a function definition."
func_def_node = module_node.body[0]
assert len(func_def_node.decorator_list) == 1, \
"Expected the function AST node corresponding to the decorator text to have a single decorator."
assert isinstance(func_def_node.decorator_list[0], ast.Call), \
"Expected the only decorator in the function definition AST node corresponding to the decorator text " \
"to be a call node."
call_node = func_def_node.decorator_list[0]
return DecoratorInspection(atok=atok, node=call_node) |
<SYSTEM_TASK:>
Inspect the decorator and extract the condition as lambda.
<END_TASK>
<USER_TASK:>
Description:
def find_lambda_condition(decorator_inspection: DecoratorInspection) -> Optional[ConditionLambdaInspection]:
"""
Inspect the decorator and extract the condition as lambda.
If the condition is not given as a lambda function, return None.
""" |
call_node = decorator_inspection.node
lambda_node = None # type: Optional[ast.Lambda]
if len(call_node.args) > 0:
assert isinstance(call_node.args[0], ast.Lambda), \
("Expected the first argument to the decorator to be a condition as lambda AST node, "
"but got: {}").format(type(call_node.args[0]))
lambda_node = call_node.args[0]
elif len(call_node.keywords) > 0:
for keyword in call_node.keywords:
if keyword.arg == "condition":
assert isinstance(keyword.value, ast.Lambda), \
"Expected lambda node as value of the 'condition' argument to the decorator."
lambda_node = keyword.value
break
assert lambda_node is not None, "Expected to find a keyword AST node with 'condition' arg, but found none"
else:
raise AssertionError(
"Expected a call AST node of a decorator to have either args or keywords, but got: {}".format(
ast.dump(call_node)))
return ConditionLambdaInspection(atok=decorator_inspection.atok, node=lambda_node) |
<SYSTEM_TASK:>
Represent function arguments and frame values in the error message on contract breach.
<END_TASK>
<USER_TASK:>
Description:
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection],
condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]:
# pylint: disable=too-many-locals
"""
Represent function arguments and frame values in the error message on contract breach.
:param condition: condition function of the contract
:param lambda_inspection:
inspected lambda AST node corresponding to the condition function (None if the condition was not given as a
lambda function)
:param condition_kwargs: condition arguments
:param a_repr: representation instance that defines how the values are represented.
:return: list of value representations
""" |
if _is_lambda(a_function=condition):
assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function"
else:
assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function"
reprs = dict() # type: MutableMapping[str, Any]
if lambda_inspection is not None:
# Collect the variable lookup of the condition function:
variable_lookup = [] # type: List[Mapping[str, Any]]
# Add condition arguments to the lookup
variable_lookup.append(condition_kwargs)
# Add closure to the lookup
closure_dict = dict() # type: Dict[str, Any]
if condition.__closure__ is not None: # type: ignore
closure_cells = condition.__closure__ # type: ignore
freevars = condition.__code__.co_freevars
assert len(closure_cells) == len(freevars), \
"Number of closure cells of a condition function ({}) == number of free vars ({})".format(
len(closure_cells), len(freevars))
for cell, freevar in zip(closure_cells, freevars):
closure_dict[freevar] = cell.cell_contents
variable_lookup.append(closure_dict)
# Add globals to the lookup
if condition.__globals__ is not None: # type: ignore
variable_lookup.append(condition.__globals__) # type: ignore
# pylint: disable=protected-access
recompute_visitor = icontract._recompute.Visitor(variable_lookup=variable_lookup)
recompute_visitor.visit(node=lambda_inspection.node.body)
recomputed_values = recompute_visitor.recomputed_values
repr_visitor = Visitor(
recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok)
repr_visitor.visit(node=lambda_inspection.node.body)
reprs = repr_visitor.reprs
else:
for key, val in condition_kwargs.items():
if _representable(value=val):
reprs[key] = val
parts = [] # type: List[str]
for key in sorted(reprs.keys()):
parts.append('{} was {}'.format(key, a_repr.repr(reprs[key])))
return parts |
<SYSTEM_TASK:>
Generate the message upon contract violation.
<END_TASK>
<USER_TASK:>
Description:
def generate_message(contract: Contract, condition_kwargs: Mapping[str, Any]) -> str:
"""Generate the message upon contract violation.""" |
# pylint: disable=protected-access
parts = [] # type: List[str]
if contract.location is not None:
parts.append("{}:\n".format(contract.location))
if contract.description is not None:
parts.append("{}: ".format(contract.description))
lambda_inspection = None # type: Optional[ConditionLambdaInspection]
if not _is_lambda(a_function=contract.condition):
condition_text = contract.condition.__name__
else:
# We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with
# lambdas.
# Find the line corresponding to the condition lambda
lines, condition_lineno = inspect.findsource(contract.condition)
filename = inspect.getsourcefile(contract.condition)
decorator_inspection = inspect_decorator(lines=lines, lineno=condition_lineno, filename=filename)
lambda_inspection = find_lambda_condition(decorator_inspection=decorator_inspection)
assert lambda_inspection is not None, \
"Expected lambda_inspection to be non-None if _is_lambda is True on: {}".format(contract.condition)
condition_text = lambda_inspection.text
parts.append(condition_text)
repr_vals = repr_values(
condition=contract.condition,
lambda_inspection=lambda_inspection,
condition_kwargs=condition_kwargs,
a_repr=contract._a_repr)
if len(repr_vals) == 0:
# Do not append anything since no value could be represented as a string.
# This could appear in case we have, for example, a generator expression as the return value of a lambda.
pass
elif len(repr_vals) == 1:
parts.append(': ')
parts.append(repr_vals[0])
else:
parts.append(':\n')
parts.append('\n'.join(repr_vals))
msg = "".join(parts)
return msg |
<SYSTEM_TASK:>
Resolve the name from the variable look-up and the built-ins.
<END_TASK>
<USER_TASK:>
Description:
def visit_Name(self, node: ast.Name) -> None:
"""
Resolve the name from the variable look-up and the built-ins.
Due to possible branching (e.g., If-expressions), some nodes might lack the recomputed values. These nodes
are ignored.
""" |
if node in self._recomputed_values:
value = self._recomputed_values[node]
# Check if it is a non-built-in
is_builtin = True
for lookup in self._variable_lookup:
if node.id in lookup:
is_builtin = False
break
if not is_builtin and _representable(value=value):
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) |
<SYSTEM_TASK:>
Represent the attribute by dumping its source code.
<END_TASK>
<USER_TASK:>
Description:
def visit_Attribute(self, node: ast.Attribute) -> None:
"""Represent the attribute by dumping its source code.""" |
if node in self._recomputed_values:
value = self._recomputed_values[node]
if _representable(value=value):
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) |
<SYSTEM_TASK:>
Represent the call by dumping its source code.
<END_TASK>
<USER_TASK:>
Description:
def visit_Call(self, node: ast.Call) -> None:
"""Represent the call by dumping its source code.""" |
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) |
<SYSTEM_TASK:>
Represent the list comprehension by dumping its source code.
<END_TASK>
<USER_TASK:>
Description:
def visit_ListComp(self, node: ast.ListComp) -> None:
"""Represent the list comprehension by dumping its source code.""" |
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) |
<SYSTEM_TASK:>
Represent the dictionary comprehension by dumping its source code.
<END_TASK>
<USER_TASK:>
Description:
def visit_DictComp(self, node: ast.DictComp) -> None:
"""Represent the dictionary comprehension by dumping its source code.""" |
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) |
<SYSTEM_TASK:>
Iterate through the stack of decorated functions until the original function.
<END_TASK>
<USER_TASK:>
Description:
def _walk_decorator_stack(func: CallableT) -> Iterable['CallableT']:
"""
Iterate through the stack of decorated functions until the original function.
Assume that all decorators used functools.update_wrapper.
""" |
while hasattr(func, "__wrapped__"):
yield func
func = getattr(func, "__wrapped__")
yield func |
<SYSTEM_TASK:>
Iterate through the decorator stack till we find the contract checker.
<END_TASK>
<USER_TASK:>
Description:
def find_checker(func: CallableT) -> Optional[CallableT]:
"""Iterate through the decorator stack till we find the contract checker.""" |
contract_checker = None # type: Optional[CallableT]
for a_wrapper in _walk_decorator_stack(func):
if hasattr(a_wrapper, "__preconditions__") or hasattr(a_wrapper, "__postconditions__"):
contract_checker = a_wrapper
return contract_checker |
<SYSTEM_TASK:>
Inspect the input values received at the wrapper for the actual function call.
<END_TASK>
<USER_TASK:>
Description:
def _kwargs_from_call(param_names: List[str], kwdefaults: Dict[str, Any], args: Tuple[Any, ...],
kwargs: Dict[str, Any]) -> MutableMapping[str, Any]:
"""
Inspect the input values received at the wrapper for the actual function call.
:param param_names: parameter (*i.e.* argument) names of the original (decorated) function
:param kwdefaults: default argument values of the original function
:param args: arguments supplied to the call
:param kwargs: keyword arguments supplied to the call
:return: resolved arguments as they would be passed to the function
""" |
# pylint: disable=too-many-arguments
mapping = dict() # type: MutableMapping[str, Any]
# Set the default argument values as condition parameters.
for param_name, param_value in kwdefaults.items():
mapping[param_name] = param_value
# Override the defaults with the values actually suplied to the function.
for i, func_arg in enumerate(args):
mapping[param_names[i]] = func_arg
for key, val in kwargs.items():
mapping[key] = val
return mapping |
<SYSTEM_TASK:>
Assert that the contract holds as a precondition.
<END_TASK>
<USER_TASK:>
Description:
def _assert_precondition(contract: Contract, resolved_kwargs: Mapping[str, Any]) -> None:
"""
Assert that the contract holds as a precondition.
:param contract: contract to be verified
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return:
""" |
# Check that all arguments to the condition function have been set.
missing_args = [arg_name for arg_name in contract.condition_args if arg_name not in resolved_kwargs]
if missing_args:
raise TypeError(
("The argument(s) of the precondition have not been set: {}. "
"Does the original function define them? Did you supply them in the call?").format(missing_args))
condition_kwargs = {
arg_name: value
for arg_name, value in resolved_kwargs.items() if arg_name in contract.condition_arg_set
}
check = contract.condition(**condition_kwargs)
if not check:
if contract.error is not None and (inspect.ismethod(contract.error) or inspect.isfunction(contract.error)):
assert contract.error_arg_set is not None, "Expected error_arg_set non-None if contract.error a function."
assert contract.error_args is not None, "Expected error_args non-None if contract.error a function."
error_kwargs = {
arg_name: value
for arg_name, value in resolved_kwargs.items() if arg_name in contract.error_arg_set
}
missing_args = [arg_name for arg_name in contract.error_args if arg_name not in resolved_kwargs]
if missing_args:
msg_parts = [] # type: List[str]
if contract.location is not None:
msg_parts.append("{}:\n".format(contract.location))
msg_parts.append(
("The argument(s) of the precondition error have not been set: {}. "
"Does the original function define them? Did you supply them in the call?").format(missing_args))
raise TypeError(''.join(msg_parts))
raise contract.error(**error_kwargs)
else:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs=condition_kwargs)
if contract.error is None:
raise ViolationError(msg)
elif isinstance(contract.error, type):
raise contract.error(msg) |
<SYSTEM_TASK:>
Assert that the contract holds as a class invariant given the instance of the class.
<END_TASK>
<USER_TASK:>
Description:
def _assert_invariant(contract: Contract, instance: Any) -> None:
"""Assert that the contract holds as a class invariant given the instance of the class.""" |
if 'self' in contract.condition_arg_set:
check = contract.condition(self=instance)
else:
check = contract.condition()
if not check:
if contract.error is not None and (inspect.ismethod(contract.error) or inspect.isfunction(contract.error)):
assert contract.error_arg_set is not None, "Expected error_arg_set non-None if contract.error a function."
assert contract.error_args is not None, "Expected error_args non-None if contract.error a function."
if 'self' in contract.error_arg_set:
raise contract.error(self=instance)
else:
raise contract.error()
else:
if 'self' in contract.condition_arg_set:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs={"self": instance})
else:
msg = icontract._represent.generate_message(contract=contract, condition_kwargs=dict())
if contract.error is None:
raise ViolationError(msg)
elif isinstance(contract.error, type):
raise contract.error(msg)
else:
raise NotImplementedError("Unhandled contract.error: {}".format(contract.error)) |
<SYSTEM_TASK:>
Decorate the function with a checker that verifies the preconditions and postconditions.
<END_TASK>
<USER_TASK:>
Description:
def decorate_with_checker(func: CallableT) -> CallableT:
"""Decorate the function with a checker that verifies the preconditions and postconditions.""" |
assert not hasattr(func, "__preconditions__"), \
"Expected func to have no list of preconditions (there should be only a single contract checker per function)."
assert not hasattr(func, "__postconditions__"), \
"Expected func to have no list of postconditions (there should be only a single contract checker per function)."
assert not hasattr(func, "__postcondition_snapshots__"), \
"Expected func to have no list of postcondition snapshots (there should be only a single contract checker " \
"per function)."
sign = inspect.signature(func)
param_names = list(sign.parameters.keys())
# Determine the default argument values.
kwdefaults = dict() # type: Dict[str, Any]
# Add to the defaults all the values that are needed by the contracts.
for param in sign.parameters.values():
if param.default != inspect.Parameter.empty:
kwdefaults[param.name] = param.default
def wrapper(*args, **kwargs):
"""Wrap func by checking the preconditions and postconditions."""
preconditions = getattr(wrapper, "__preconditions__") # type: List[List[Contract]]
snapshots = getattr(wrapper, "__postcondition_snapshots__") # type: List[Snapshot]
postconditions = getattr(wrapper, "__postconditions__") # type: List[Contract]
resolved_kwargs = _kwargs_from_call(param_names=param_names, kwdefaults=kwdefaults, args=args, kwargs=kwargs)
if postconditions:
if 'result' in resolved_kwargs:
raise TypeError("Unexpected argument 'result' in a function decorated with postconditions.")
if 'OLD' in resolved_kwargs:
raise TypeError("Unexpected argument 'OLD' in a function decorated with postconditions.")
# Assert the preconditions in groups. This is necessary to implement "require else" logic when a class
# weakens the preconditions of its base class.
violation_err = None # type: Optional[ViolationError]
for group in preconditions:
violation_err = None
try:
for contract in group:
_assert_precondition(contract=contract, resolved_kwargs=resolved_kwargs)
break
except ViolationError as err:
violation_err = err
if violation_err is not None:
raise violation_err # pylint: disable=raising-bad-type
# Capture the snapshots
if postconditions:
old_as_mapping = dict() # type: MutableMapping[str, Any]
for snap in snapshots:
# This assert is just a last defense.
# Conflicting snapshot names should have been caught before, either during the decoration or
# in the meta-class.
assert snap.name not in old_as_mapping, "Snapshots with the conflicting name: {}"
old_as_mapping[snap.name] = _capture_snapshot(a_snapshot=snap, resolved_kwargs=resolved_kwargs)
resolved_kwargs['OLD'] = _Old(mapping=old_as_mapping)
# Execute the wrapped function
result = func(*args, **kwargs)
if postconditions:
resolved_kwargs['result'] = result
# Assert the postconditions as a conjunction
for contract in postconditions:
_assert_postcondition(contract=contract, resolved_kwargs=resolved_kwargs)
return result # type: ignore
# Copy __doc__ and other properties so that doctests can run
functools.update_wrapper(wrapper=wrapper, wrapped=func)
assert not hasattr(wrapper, "__preconditions__"), "Expected no preconditions set on a pristine contract checker."
assert not hasattr(wrapper, "__postcondition_snapshots__"), \
"Expected no postcondition snapshots set on a pristine contract checker."
assert not hasattr(wrapper, "__postconditions__"), "Expected no postconditions set on a pristine contract checker."
# Precondition is a list of condition groups (i.e. disjunctive normal form):
# each group consists of AND'ed preconditions, while the groups are OR'ed.
#
# This is necessary in order to implement "require else" logic when a class weakens the preconditions of
# its base class.
setattr(wrapper, "__preconditions__", [])
setattr(wrapper, "__postcondition_snapshots__", [])
setattr(wrapper, "__postconditions__", [])
return wrapper |
<SYSTEM_TASK:>
Find the instance of ``self`` in the arguments.
<END_TASK>
<USER_TASK:>
Description:
def _find_self(param_names: List[str], args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Any:
"""Find the instance of ``self`` in the arguments.""" |
instance_i = param_names.index("self")
if instance_i < len(args):
instance = args[instance_i]
else:
instance = kwargs["self"]
return instance |
<SYSTEM_TASK:>
Decorate the function ``func`` of the class ``cls`` with invariant checks.
<END_TASK>
<USER_TASK:>
Description:
def _decorate_with_invariants(func: CallableT, is_init: bool) -> CallableT:
"""
Decorate the function ``func`` of the class ``cls`` with invariant checks.
If the function has been already decorated with invariant checks, the function returns immediately.
:param func: function to be wrapped
:param is_init: True if the ``func`` is __init__
:return: function wrapped with invariant checks
""" |
if _already_decorated_with_invariants(func=func):
return func
sign = inspect.signature(func)
param_names = list(sign.parameters.keys())
if is_init:
def wrapper(*args, **kwargs):
"""Wrap __init__ method of a class by checking the invariants *after* the invocation."""
result = func(*args, **kwargs)
instance = _find_self(param_names=param_names, args=args, kwargs=kwargs)
for contract in instance.__class__.__invariants__:
_assert_invariant(contract=contract, instance=instance)
return result
else:
def wrapper(*args, **kwargs):
"""Wrap a function of a class by checking the invariants *before* and *after* the invocation."""
instance = _find_self(param_names=param_names, args=args, kwargs=kwargs)
for contract in instance.__class__.__invariants__:
_assert_invariant(contract=contract, instance=instance)
result = func(*args, **kwargs)
for contract in instance.__class__.__invariants__:
_assert_invariant(contract=contract, instance=instance)
return result
functools.update_wrapper(wrapper=wrapper, wrapped=func)
setattr(wrapper, "__is_invariant_check__", True)
return wrapper |
<SYSTEM_TASK:>
Check if the function has been already decorated with an invariant check by going through its decorator stack.
<END_TASK>
<USER_TASK:>
Description:
def _already_decorated_with_invariants(func: CallableT) -> bool:
"""Check if the function has been already decorated with an invariant check by going through its decorator stack.""" |
already_decorated = False
for a_decorator in _walk_decorator_stack(func=func):
if getattr(a_decorator, "__is_invariant_check__", False):
already_decorated = True
break
return already_decorated |
<SYSTEM_TASK:>
Decorate each of the class functions with invariant checks if not already decorated.
<END_TASK>
<USER_TASK:>
Description:
def add_invariant_checks(cls: type) -> None:
"""Decorate each of the class functions with invariant checks if not already decorated.""" |
# Candidates for the decoration as list of (name, dir() value)
init_name_func = None # type: Optional[Tuple[str, Callable[..., None]]]
names_funcs = [] # type: List[Tuple[str, Callable[..., None]]]
names_properties = [] # type: List[Tuple[str, property]]
# Filter out entries in the directory which are certainly not candidates for decoration.
for name, value in [(name, getattr(cls, name)) for name in dir(cls)]:
# We need to ignore __repr__ to prevent endless loops when generating error messages.
# __getattribute__, __setattr__ and __delattr__ are too invasive and alter the state of the instance.
# Hence we don't consider them "public".
if name in ["__repr__", "__getattribute__", "__setattr__", "__delattr__"]:
continue
if name == "__init__":
assert inspect.isfunction(value) or isinstance(value, _SLOT_WRAPPER_TYPE), \
"Expected __init__ to be either a function or a slot wrapper, but got: {}".format(type(value))
init_name_func = (name, value)
continue
if not inspect.isfunction(value) and not isinstance(value, _SLOT_WRAPPER_TYPE) and \
not isinstance(value, property):
continue
# Ignore class methods
if getattr(value, "__self__", None) is cls:
continue
# Ignore "protected"/"private" methods
if name.startswith("_") and not (name.startswith("__") and name.endswith("__")):
continue
if inspect.isfunction(value) or isinstance(value, _SLOT_WRAPPER_TYPE):
names_funcs.append((name, value))
elif isinstance(value, property):
names_properties.append((name, value))
else:
raise NotImplementedError("Unhandled directory entry of class {} for {}: {}".format(cls, name, value))
if init_name_func:
name, func = init_name_func
wrapper = _decorate_with_invariants(func=func, is_init=True)
setattr(cls, name, wrapper)
for name, func in names_funcs:
wrapper = _decorate_with_invariants(func=func, is_init=False)
setattr(cls, name, wrapper)
for name, prop in names_properties:
new_prop = property( # type: ignore
fget=_decorate_with_invariants(func=prop.fget, is_init=False) if prop.fget else None,
fset=_decorate_with_invariants(func=prop.fset, is_init=False) if prop.fset else None,
fdel=_decorate_with_invariants(func=prop.fdel, is_init=False) if prop.fdel else None,
doc=prop.__doc__)
setattr(cls, name, new_prop) |
<SYSTEM_TASK:>
Recompute the value as the number at the node.
<END_TASK>
<USER_TASK:>
Description:
def visit_Num(self, node: ast.Num) -> Union[int, float]:
"""Recompute the value as the number at the node.""" |
result = node.n
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Recompute the value as the string at the node.
<END_TASK>
<USER_TASK:>
Description:
def visit_Str(self, node: ast.Str) -> str:
"""Recompute the value as the string at the node.""" |
result = node.s
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Recompute the value as the bytes at the node.
<END_TASK>
<USER_TASK:>
Description:
def visit_Bytes(self, node: ast.Bytes) -> bytes:
"""Recompute the value as the bytes at the node.""" |
result = node.s
self.recomputed_values[node] = result
return node.s |
<SYSTEM_TASK:>
Visit the elements and assemble the results into a list.
<END_TASK>
<USER_TASK:>
Description:
def visit_List(self, node: ast.List) -> List[Any]:
"""Visit the elements and assemble the results into a list.""" |
if isinstance(node.ctx, ast.Store):
raise NotImplementedError("Can not compute the value of a Store on a list")
result = [self.visit(node=elt) for elt in node.elts]
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the elements and assemble the results into a tuple.
<END_TASK>
<USER_TASK:>
Description:
def visit_Tuple(self, node: ast.Tuple) -> Tuple[Any, ...]:
"""Visit the elements and assemble the results into a tuple.""" |
if isinstance(node.ctx, ast.Store):
raise NotImplementedError("Can not compute the value of a Store on a tuple")
result = tuple(self.visit(node=elt) for elt in node.elts)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the elements and assemble the results into a set.
<END_TASK>
<USER_TASK:>
Description:
def visit_Set(self, node: ast.Set) -> Set[Any]:
"""Visit the elements and assemble the results into a set.""" |
result = set(self.visit(node=elt) for elt in node.elts)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit keys and values and assemble a dictionary with the results.
<END_TASK>
<USER_TASK:>
Description:
def visit_Dict(self, node: ast.Dict) -> Dict[Any, Any]:
"""Visit keys and values and assemble a dictionary with the results.""" |
recomputed_dict = dict() # type: Dict[Any, Any]
for key, val in zip(node.keys, node.values):
recomputed_dict[self.visit(node=key)] = self.visit(node=val)
self.recomputed_values[node] = recomputed_dict
return recomputed_dict |
<SYSTEM_TASK:>
Forward the node value as a result.
<END_TASK>
<USER_TASK:>
Description:
def visit_NameConstant(self, node: ast.NameConstant) -> Any:
"""Forward the node value as a result.""" |
self.recomputed_values[node] = node.value
return node.value |
<SYSTEM_TASK:>
Load the variable by looking it up in the variable look-up and in the built-ins.
<END_TASK>
<USER_TASK:>
Description:
def visit_Name(self, node: ast.Name) -> Any:
"""Load the variable by looking it up in the variable look-up and in the built-ins.""" |
if not isinstance(node.ctx, ast.Load):
raise NotImplementedError("Can only compute a value of Load on a name {}, but got context: {}".format(
node.id, node.ctx))
result = None # type: Optional[Any]
if node.id in self._name_to_value:
result = self._name_to_value[node.id]
if result is None and hasattr(builtins, node.id):
result = getattr(builtins, node.id)
if result is None and node.id != "None":
# The variable refers to a name local of the lambda (e.g., a target in the generator expression).
# Since we evaluate generator expressions with runtime compilation, None is returned here as a placeholder.
return PLACEHOLDER
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the node operand and apply the operation on the result.
<END_TASK>
<USER_TASK:>
Description:
def visit_UnaryOp(self, node: ast.UnaryOp) -> Any:
"""Visit the node operand and apply the operation on the result.""" |
if isinstance(node.op, ast.UAdd):
result = +self.visit(node=node.operand)
elif isinstance(node.op, ast.USub):
result = -self.visit(node=node.operand)
elif isinstance(node.op, ast.Not):
result = not self.visit(node=node.operand)
elif isinstance(node.op, ast.Invert):
result = ~self.visit(node=node.operand)
else:
raise NotImplementedError("Unhandled op of {}: {}".format(node, node.op))
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Recursively visit the left and right operand, respectively, and apply the operation on the results.
<END_TASK>
<USER_TASK:>
Description:
def visit_BinOp(self, node: ast.BinOp) -> Any:
"""Recursively visit the left and right operand, respectively, and apply the operation on the results.""" |
# pylint: disable=too-many-branches
left = self.visit(node=node.left)
right = self.visit(node=node.right)
if isinstance(node.op, ast.Add):
result = left + right
elif isinstance(node.op, ast.Sub):
result = left - right
elif isinstance(node.op, ast.Mult):
result = left * right
elif isinstance(node.op, ast.Div):
result = left / right
elif isinstance(node.op, ast.FloorDiv):
result = left // right
elif isinstance(node.op, ast.Mod):
result = left % right
elif isinstance(node.op, ast.Pow):
result = left**right
elif isinstance(node.op, ast.LShift):
result = left << right
elif isinstance(node.op, ast.RShift):
result = left >> right
elif isinstance(node.op, ast.BitOr):
result = left | right
elif isinstance(node.op, ast.BitXor):
result = left ^ right
elif isinstance(node.op, ast.BitAnd):
result = left & right
elif isinstance(node.op, ast.MatMult):
result = left @ right
else:
raise NotImplementedError("Unhandled op of {}: {}".format(node, node.op))
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Recursively visit the operands and apply the operation on them.
<END_TASK>
<USER_TASK:>
Description:
def visit_BoolOp(self, node: ast.BoolOp) -> Any:
"""Recursively visit the operands and apply the operation on them.""" |
values = [self.visit(value_node) for value_node in node.values]
if isinstance(node.op, ast.And):
result = functools.reduce(lambda left, right: left and right, values, True)
elif isinstance(node.op, ast.Or):
result = functools.reduce(lambda left, right: left or right, values, True)
else:
raise NotImplementedError("Unhandled op of {}: {}".format(node, node.op))
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Recursively visit the comparators and apply the operations on them.
<END_TASK>
<USER_TASK:>
Description:
def visit_Compare(self, node: ast.Compare) -> Any:
"""Recursively visit the comparators and apply the operations on them.""" |
# pylint: disable=too-many-branches
left = self.visit(node=node.left)
comparators = [self.visit(node=comparator) for comparator in node.comparators]
result = None # type: Optional[Any]
for comparator, op in zip(comparators, node.ops):
if isinstance(op, ast.Eq):
comparison = left == comparator
elif isinstance(op, ast.NotEq):
comparison = left != comparator
elif isinstance(op, ast.Lt):
comparison = left < comparator
elif isinstance(op, ast.LtE):
comparison = left <= comparator
elif isinstance(op, ast.Gt):
comparison = left > comparator
elif isinstance(op, ast.GtE):
comparison = left >= comparator
elif isinstance(op, ast.Is):
comparison = left is comparator
elif isinstance(op, ast.IsNot):
comparison = left is not comparator
elif isinstance(op, ast.In):
comparison = left in comparator
elif isinstance(op, ast.NotIn):
comparison = left not in comparator
else:
raise NotImplementedError("Unhandled op of {}: {}".format(node, op))
if result is None:
result = comparison
else:
result = result and comparison
left = comparator
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the function and the arguments and finally make the function call with them.
<END_TASK>
<USER_TASK:>
Description:
def visit_Call(self, node: ast.Call) -> Any:
"""Visit the function and the arguments and finally make the function call with them.""" |
func = self.visit(node=node.func)
args = [] # type: List[Any]
for arg_node in node.args:
if isinstance(arg_node, ast.Starred):
args.extend(self.visit(node=arg_node))
else:
args.append(self.visit(node=arg_node))
kwargs = dict() # type: Dict[str, Any]
for keyword in node.keywords:
if keyword.arg is None:
kw = self.visit(node=keyword.value)
for key, val in kw.items():
kwargs[key] = val
else:
kwargs[keyword.arg] = self.visit(node=keyword.value)
result = func(*args, **kwargs)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the ``test``, and depending on its outcome, the ``body`` or ``orelse``.
<END_TASK>
<USER_TASK:>
Description:
def visit_IfExp(self, node: ast.IfExp) -> Any:
"""Visit the ``test``, and depending on its outcome, the ``body`` or ``orelse``.""" |
test = self.visit(node=node.test)
if test:
result = self.visit(node=node.body)
else:
result = self.visit(node=node.orelse)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the node's ``value`` and get the attribute from the result.
<END_TASK>
<USER_TASK:>
Description:
def visit_Attribute(self, node: ast.Attribute) -> Any:
"""Visit the node's ``value`` and get the attribute from the result.""" |
value = self.visit(node=node.value)
if not isinstance(node.ctx, ast.Load):
raise NotImplementedError(
"Can only compute a value of Load on the attribute {}, but got context: {}".format(node.attr, node.ctx))
result = getattr(value, node.attr)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit ``lower``, ``upper`` and ``step`` and recompute the node as a ``slice``.
<END_TASK>
<USER_TASK:>
Description:
def visit_Slice(self, node: ast.Slice) -> slice:
"""Visit ``lower``, ``upper`` and ``step`` and recompute the node as a ``slice``.""" |
lower = None # type: Optional[int]
if node.lower is not None:
lower = self.visit(node=node.lower)
upper = None # type: Optional[int]
if node.upper is not None:
upper = self.visit(node=node.upper)
step = None # type: Optional[int]
if node.step is not None:
step = self.visit(node=node.step)
result = slice(lower, upper, step)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit each dimension of the advanced slicing and assemble the dimensions in a tuple.
<END_TASK>
<USER_TASK:>
Description:
def visit_ExtSlice(self, node: ast.ExtSlice) -> Tuple[Any, ...]:
"""Visit each dimension of the advanced slicing and assemble the dimensions in a tuple.""" |
result = tuple(self.visit(node=dim) for dim in node.dims)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Visit the ``slice`` and a ``value`` and get the element.
<END_TASK>
<USER_TASK:>
Description:
def visit_Subscript(self, node: ast.Subscript) -> Any:
"""Visit the ``slice`` and a ``value`` and get the element.""" |
value = self.visit(node=node.value)
a_slice = self.visit(node=node.slice)
result = value[a_slice]
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Compile the generator or comprehension from the node and execute the compiled code.
<END_TASK>
<USER_TASK:>
Description:
def _execute_comprehension(self, node: Union[ast.ListComp, ast.SetComp, ast.GeneratorExp, ast.DictComp]) -> Any:
"""Compile the generator or comprehension from the node and execute the compiled code.""" |
args = [ast.arg(arg=name) for name in sorted(self._name_to_value.keys())]
func_def_node = ast.FunctionDef(
name="generator_expr",
args=ast.arguments(args=args, kwonlyargs=[], kw_defaults=[], defaults=[]),
decorator_list=[],
body=[ast.Return(node)])
module_node = ast.Module(body=[func_def_node])
ast.fix_missing_locations(module_node)
code = compile(source=module_node, filename='<ast>', mode='exec')
module_locals = {} # type: Dict[str, Any]
module_globals = {} # type: Dict[str, Any]
exec(code, module_globals, module_locals) # pylint: disable=exec-used
generator_expr_func = module_locals["generator_expr"]
return generator_expr_func(**self._name_to_value) |
<SYSTEM_TASK:>
Compile the generator expression as a function and call it.
<END_TASK>
<USER_TASK:>
Description:
def visit_GeneratorExp(self, node: ast.GeneratorExp) -> Any:
"""Compile the generator expression as a function and call it.""" |
result = self._execute_comprehension(node=node)
for generator in node.generators:
self.visit(generator.iter)
# Do not set the computed value of the node since its representation would be non-informative.
return result |
<SYSTEM_TASK:>
Compile the list comprehension as a function and call it.
<END_TASK>
<USER_TASK:>
Description:
def visit_ListComp(self, node: ast.ListComp) -> Any:
"""Compile the list comprehension as a function and call it.""" |
result = self._execute_comprehension(node=node)
for generator in node.generators:
self.visit(generator.iter)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Compile the set comprehension as a function and call it.
<END_TASK>
<USER_TASK:>
Description:
def visit_SetComp(self, node: ast.SetComp) -> Any:
"""Compile the set comprehension as a function and call it.""" |
result = self._execute_comprehension(node=node)
for generator in node.generators:
self.visit(generator.iter)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Compile the dictionary comprehension as a function and call it.
<END_TASK>
<USER_TASK:>
Description:
def visit_DictComp(self, node: ast.DictComp) -> Any:
"""Compile the dictionary comprehension as a function and call it.""" |
result = self._execute_comprehension(node=node)
for generator in node.generators:
self.visit(generator.iter)
self.recomputed_values[node] = result
return result |
<SYSTEM_TASK:>
Raise an exception that this node is unexpected.
<END_TASK>
<USER_TASK:>
Description:
def visit_Return(self, node: ast.Return) -> Any: # pylint: disable=no-self-use
"""Raise an exception that this node is unexpected.""" |
raise AssertionError("Unexpected return node during the re-computation: {}".format(ast.dump(node))) |
<SYSTEM_TASK:>
Raise an exception that this node has not been handled.
<END_TASK>
<USER_TASK:>
Description:
def generic_visit(self, node: ast.AST) -> None:
"""Raise an exception that this node has not been handled.""" |
raise NotImplementedError("Unhandled recomputation of the node: {} {}".format(type(node), node)) |
<SYSTEM_TASK:>
Tokenize input text to words.
<END_TASK>
<USER_TASK:>
Description:
def tokenize_words(string):
"""
Tokenize input text to words.
:param string: Text to tokenize
:type string: str or unicode
:return: words
:rtype: list of strings
""" |
string = six.text_type(string)
return re.findall(WORD_TOKENIZATION_RULES, string) |
<SYSTEM_TASK:>
Tokenize input text to paragraphs, sentences and words.
<END_TASK>
<USER_TASK:>
Description:
def tokenize_text(string):
"""
Tokenize input text to paragraphs, sentences and words.
Tokenization to paragraphs is done using simple Newline algorithm
For sentences and words tokenizers above are used
:param string: Text to tokenize
:type string: str or unicode
:return: text, tokenized into paragraphs, sentences and words
:rtype: list of list of list of words
""" |
string = six.text_type(string)
rez = []
for part in string.split('\n'):
par = []
for sent in tokenize_sents(part):
par.append(tokenize_words(sent))
if par:
rez.append(par)
return rez |
<SYSTEM_TASK:>
Class method that returns a new RangeSet with ranges from provided
<END_TASK>
<USER_TASK:>
Description:
def fromlist(cls, rnglist, autostep=None):
"""Class method that returns a new RangeSet with ranges from provided
list.""" |
inst = RangeSet(autostep=autostep)
inst.updaten(rnglist)
return inst |
<SYSTEM_TASK:>
Object-based iterator over contiguous range sets.
<END_TASK>
<USER_TASK:>
Description:
def contiguous(self):
"""Object-based iterator over contiguous range sets.""" |
pad = self.padding or 0
for sli in self._contiguous_slices():
yield RangeSet.fromone(slice(sli.start, sli.stop, sli.step), pad) |
<SYSTEM_TASK:>
Return the intersection of two RangeSets as a new RangeSet.
<END_TASK>
<USER_TASK:>
Description:
def intersection(self, other):
"""Return the intersection of two RangeSets as a new RangeSet.
(I.e. all elements that are in both sets.)
""" |
#NOTE: This is a work around
# Python 3 return as the result of set.intersection a new set instance.
# Python 2 however returns as a the result a ClusterShell.RangeSet.RangeSet instance.
# ORIGINAL CODE: return self._wrap_set_op(set.intersection, other)
copy = self.copy()
copy.intersection_update(other)
return copy |
<SYSTEM_TASK:>
Return the difference of two RangeSets as a new RangeSet.
<END_TASK>
<USER_TASK:>
Description:
def difference(self, other):
"""Return the difference of two RangeSets as a new RangeSet.
(I.e. all elements that are in this set and not in the other.)
""" |
#NOTE: This is a work around
# Python 3 return as the result of set.intersection a new set instance.
# Python 2 however returns as a the result a ClusterShell.RangeSet.RangeSet instance.
# ORIGINAL CODE: return self._wrap_set_op(set.difference, other)
copy = self.copy()
copy.difference_update(other)
return copy |
<SYSTEM_TASK:>
Report whether another set contains this RangeSet.
<END_TASK>
<USER_TASK:>
Description:
def issubset(self, other):
"""Report whether another set contains this RangeSet.""" |
self._binary_sanity_check(other)
return set.issubset(self, other) |
<SYSTEM_TASK:>
Report whether this RangeSet contains another set.
<END_TASK>
<USER_TASK:>
Description:
def issuperset(self, other):
"""Report whether this RangeSet contains another set.""" |
self._binary_sanity_check(other)
return set.issuperset(self, other) |
<SYSTEM_TASK:>
Update a rangeset with the union of itself and several others.
<END_TASK>
<USER_TASK:>
Description:
def updaten(self, rangesets):
"""
Update a rangeset with the union of itself and several others.
""" |
for rng in rangesets:
if isinstance(rng, set):
self.update(rng)
else:
self.update(RangeSet(rng)) |