signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
---|---|---|---|
def __init__(self, extension=None): | if extension is None:<EOL><INDENT>extension = defaults.TEMPLATE_EXTENSION<EOL><DEDENT>self.template_extension = extension<EOL> | Construct a template locator.
Arguments:
extension: the template file extension, without the leading dot.
Pass False for no extension (e.g. to use extensionless template
files). Defaults to the package default. | f815:c0:m0 |
def get_object_directory(self, obj): | if not hasattr(obj, '<STR_LIT>'):<EOL><INDENT>return None<EOL><DEDENT>module = sys.modules[obj.__module__]<EOL>if not hasattr(module, '<STR_LIT>'):<EOL><INDENT>return None<EOL><DEDENT>path = module.__file__<EOL>return os.path.dirname(path)<EOL> | Return the directory containing an object's defining class.
Returns None if there is no such directory, for example if the
class was defined in an interactive Python session, or in a
doctest that appears in a text file (rather than a Python file). | f815:c0:m1 |
def make_template_name(self, obj): | template_name = obj.__class__.__name__<EOL>def repl(match):<EOL><INDENT>return '<STR_LIT:_>' + match.group(<NUM_LIT:0>).lower()<EOL><DEDENT>return re.sub('<STR_LIT>', repl, template_name)[<NUM_LIT:1>:]<EOL> | Return the canonical template name for an object instance.
This method converts Python-style class names (PEP 8's recommended
CamelCase, aka CapWords) to lower_case_with_underscords. Here
is an example with code:
>>> class HelloWorld(object):
... pass
>>> hi = HelloWorld()
>>>
>>> locator = Locator()
>>> locator.make_template_name(hi)
'hello_world' | f815:c0:m2 |
def make_file_name(self, template_name, template_extension=None): | file_name = template_name<EOL>if template_extension is None:<EOL><INDENT>template_extension = self.template_extension<EOL><DEDENT>if template_extension is not False:<EOL><INDENT>file_name += os.path.extsep + template_extension<EOL><DEDENT>return file_name<EOL> | Generate and return the file name for the given template name.
Arguments:
template_extension: defaults to the instance's extension. | f815:c0:m3 |
def _find_path(self, search_dirs, file_name): | for dir_path in search_dirs:<EOL><INDENT>file_path = os.path.join(dir_path, file_name)<EOL>if os.path.exists(file_path):<EOL><INDENT>return file_path<EOL><DEDENT><DEDENT>return None<EOL> | Search for the given file, and return the path.
Returns None if the file is not found. | f815:c0:m4 |
def _find_path_required(self, search_dirs, file_name): | path = self._find_path(search_dirs, file_name)<EOL>if path is None:<EOL><INDENT>raise TemplateNotFoundError('<STR_LIT>' %<EOL>(repr(file_name), repr(search_dirs)))<EOL><DEDENT>return path<EOL> | Return the path to a template with the given file name. | f815:c0:m5 |
def find_file(self, file_name, search_dirs): | return self._find_path_required(search_dirs, file_name)<EOL> | Return the path to a template with the given file name.
Arguments:
file_name: the file name of the template.
search_dirs: the list of directories in which to search. | f815:c0:m6 |
def find_name(self, template_name, search_dirs): | file_name = self.make_file_name(template_name)<EOL>return self._find_path_required(search_dirs, file_name)<EOL> | Return the path to a template with the given name.
Arguments:
template_name: the name of the template.
search_dirs: the list of directories in which to search. | f815:c0:m7 |
def find_object(self, obj, search_dirs, file_name=None): | if file_name is None:<EOL><INDENT>template_name = self.make_template_name(obj)<EOL>file_name = self.make_file_name(template_name)<EOL><DEDENT>dir_path = self.get_object_directory(obj)<EOL>if dir_path is not None:<EOL><INDENT>search_dirs = [dir_path] + search_dirs<EOL><DEDENT>path = self._find_path_required(search_dirs, file_name)<EOL>return path<EOL> | Return the path to a template associated with the given object. | f815:c0:m8 |
def read(path): | <EOL>f = open(path, '<STR_LIT:rb>')<EOL>try:<EOL><INDENT>b = f.read()<EOL><DEDENT>finally:<EOL><INDENT>f.close()<EOL><DEDENT>return b.decode(FILE_ENCODING)<EOL> | Read and return the contents of a text file as a unicode string. | f817:m0 |
def write(u, path): | print("<STR_LIT>" % path)<EOL>f = open(path, "<STR_LIT:wb>")<EOL>try:<EOL><INDENT>b = u.encode(FILE_ENCODING)<EOL>f.write(b)<EOL><DEDENT>finally:<EOL><INDENT>f.close()<EOL><DEDENT> | Write a unicode string to a file (as utf-8). | f817:m1 |
def make_temp_path(path, new_ext=None): | root, ext = os.path.splitext(path)<EOL>if new_ext is None:<EOL><INDENT>new_ext = ext<EOL><DEDENT>temp_path = root + TEMP_EXTENSION + new_ext<EOL>return temp_path<EOL> | Arguments:
new_ext: the new file extension, including the leading dot.
Defaults to preserving the existing file extension. | f817:m2 |
def strip_html_comments(text): | lines = text.splitlines(True) <EOL>new_lines = filter(lambda line: not line.startswith("<STR_LIT>"), lines)<EOL>return "<STR_LIT>".join(new_lines)<EOL> | Strip HTML comments from a unicode string. | f817:m3 |
def convert_md_to_rst(md_path, rst_temp_path): | <EOL>command = "<STR_LIT>" % (rst_temp_path, md_path)<EOL>print("<STR_LIT>" % (md_path, rst_temp_path,<EOL>command))<EOL>if os.path.exists(rst_temp_path):<EOL><INDENT>os.remove(rst_temp_path)<EOL><DEDENT>os.system(command)<EOL>if not os.path.exists(rst_temp_path):<EOL><INDENT>s = ("<STR_LIT>"<EOL>"<STR_LIT>" % (command,<EOL>__file__))<EOL>sys.exit(s)<EOL><DEDENT>return read(rst_temp_path)<EOL> | Convert the contents of a file from Markdown to reStructuredText.
Returns the converted text as a Unicode string.
Arguments:
md_path: a path to a UTF-8 encoded Markdown file to convert.
rst_temp_path: a temporary path to which to write the converted contents. | f817:m4 |
def make_long_description(): | readme_path = README_PATH<EOL>readme_md = strip_html_comments(read(readme_path))<EOL>history_md = strip_html_comments(read(HISTORY_PATH))<EOL>license_md = | Generate the reST long_description for setup() from source files.
Returns the generated long_description as a unicode string. | f817:m5 |
def prep(): | long_description = make_long_description()<EOL>write(long_description, RST_DESCRIPTION_PATH)<EOL> | Update the reST long_description file. | f817:m6 |
def publish(): | long_description = make_long_description()<EOL>if long_description != read(RST_DESCRIPTION_PATH):<EOL><INDENT>print( | Publish this package to PyPI (aka "the Cheeseshop"). | f817:m7 |
def get_extra_args(): | extra = {}<EOL>if py_version >= (<NUM_LIT:3>, ):<EOL><INDENT>extra['<STR_LIT>'] = True<EOL><DEDENT>return extra<EOL> | Return a dictionary of extra args to pass to setup(). | f817:m8 |
def wait(animation='<STR_LIT>', text='<STR_LIT>', speed=<NUM_LIT>): | def decorator(func):<EOL><INDENT>func.animation = animation<EOL>func.speed = speed<EOL>func.text = text<EOL>@wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>animation = func.animation<EOL>text = func.text<EOL>if not isinstance(animation, (list, tuple)) andnot hasattr(animations, animation):<EOL><INDENT>text = animation if text == '<STR_LIT>' else text<EOL>animation = '<STR_LIT>'<EOL><DEDENT>wait = Wait(animation=animation, text=text, speed=func.speed)<EOL>wait.start()<EOL>try:<EOL><INDENT>ret = func(*args, **kwargs)<EOL><DEDENT>finally:<EOL><INDENT>wait.stop()<EOL><DEDENT>sys.stdout.write('<STR_LIT:\n>')<EOL>return ret<EOL><DEDENT>return wrapper<EOL><DEDENT>return decorator<EOL> | Decorator for adding wait animation to long running
functions.
Args:
animation (str, tuple): String reference to animation or tuple
with custom animation.
speed (float): Number of seconds each cycle of animation.
Examples:
>>> @animation.wait('bar')
>>> def long_running_function():
>>> ... 5 seconds later ...
>>> return | f822:m1 |
def simple_wait(func): | @wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>wait = Wait()<EOL>wait.start()<EOL>try:<EOL><INDENT>ret = func(*args, **kwargs)<EOL><DEDENT>finally:<EOL><INDENT>wait.stop()<EOL><DEDENT>sys.stdout.write('<STR_LIT:\n>')<EOL>return ret<EOL><DEDENT>return wrapper<EOL> | Decorator for adding simple text wait animation to
long running functions.
Examples:
>>> @animation.simple_wait
>>> def long_running_function():
>>> ... 5 seconds later ...
>>> return | f822:m2 |
def start(self): | self.thread = threading.Thread(target=self._animate)<EOL>self.thread.start()<EOL>return<EOL> | Start animation thread. | f822:c0:m2 |
def stop(self): | time.sleep(self.speed)<EOL>self._count = -<NUM_LIT><EOL>sys.stdout.write(self.reverser + '<STR_LIT>')<EOL>sys.stdout.flush()<EOL>return<EOL> | Stop animation thread. | f822:c0:m3 |
def expand_collection_in_dict(d, key, new_items, no_duplicates=True): | if key in d:<EOL><INDENT>if no_duplicates:<EOL><INDENT>new_items = filter(lambda x: x not in d[key], new_items)<EOL><DEDENT>if isinstance(d[key], set):<EOL><INDENT>map(d[key].add, new_items)<EOL><DEDENT>elif isinstance(d[key], list):<EOL><INDENT>map(d[key].append, new_items)<EOL><DEDENT>else:<EOL><INDENT>d[key] = d[key] + new_items<EOL><DEDENT><DEDENT>else:<EOL><INDENT>d[key] = new_items<EOL><DEDENT> | Parameters
d: dict
dict in which a key will be inserted/expanded
key: hashable
key in d
new_items: iterable
d[key] will be extended with items in new_items
no_duplicates: bool
avoid inserting duplicates in d[key] (default: True) | f825:m0 |
def copy(src, dst, only_update=False, copystat=True, cwd=None,<EOL>dest_is_dir=False, create_dest_dirs=False, logger=None): | <EOL>if cwd:<EOL><INDENT>if not os.path.isabs(src):<EOL><INDENT>src = os.path.join(cwd, src)<EOL><DEDENT>if not os.path.isabs(dst):<EOL><INDENT>dst = os.path.join(cwd, dst)<EOL><DEDENT><DEDENT>if not os.path.exists(src):<EOL><INDENT>msg = "<STR_LIT>".format(src)<EOL>raise FileNotFoundError(msg)<EOL><DEDENT>if dest_is_dir:<EOL><INDENT>if not dst[-<NUM_LIT:1>] == '<STR_LIT:/>':<EOL><INDENT>dst = dst+'<STR_LIT:/>'<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if os.path.exists(dst) and os.path.isdir(dst):<EOL><INDENT>dest_is_dir = True<EOL><DEDENT><DEDENT>if dest_is_dir:<EOL><INDENT>dest_dir = dst<EOL>dest_fname = os.path.basename(src)<EOL>dst = os.path.join(dest_dir, dest_fname)<EOL><DEDENT>else:<EOL><INDENT>dest_dir = os.path.dirname(dst)<EOL>dest_fname = os.path.basename(dst)<EOL><DEDENT>if not os.path.exists(dest_dir):<EOL><INDENT>if create_dest_dirs:<EOL><INDENT>make_dirs(dest_dir, logger=logger)<EOL><DEDENT>else:<EOL><INDENT>msg = "<STR_LIT>"<EOL>raise FileNotFoundError(msg)<EOL><DEDENT><DEDENT>if only_update:<EOL><INDENT>if not missing_or_other_newer(dst, src):<EOL><INDENT>if logger:<EOL><INDENT>logger.debug(<EOL>"<STR_LIT>".format(<EOL>src, dst))<EOL><DEDENT>return<EOL><DEDENT><DEDENT>if os.path.islink(dst):<EOL><INDENT>if os.path.abspath(os.path.realpath(dst)) ==os.path.abspath(dst):<EOL><INDENT>pass <EOL><DEDENT><DEDENT>else:<EOL><INDENT>if logger:<EOL><INDENT>logger.debug("<STR_LIT>".format(src, dst))<EOL><DEDENT>shutil.copy(src, dst)<EOL>if copystat:<EOL><INDENT>shutil.copystat(src, dst)<EOL><DEDENT><DEDENT>return dst<EOL> | Augmented shutil.copy with extra options and slightly
modified behaviour
Parameters
==========
src: string
path to source file
dst: string
path to destingation
only_update: bool
only copy if source is newer than destination
(returns None if it was newer), default: False
copystat: bool
See shutil.copystat. default: True
cwd: string
Path to working directory (root of relative paths)
dest_is_dir: bool
ensures that dst is treated as a directory. default: False
create_dest_dirs: bool
creates directories if needed.
logger: logging.Looger
debug level info emitted. Passed onto make_dirs.
Returns
=======
Path to the copied file. | f825:m4 |
def md5_of_file(path, nblocks=<NUM_LIT>): | md = md5()<EOL>with open(path, '<STR_LIT:rb>') as f:<EOL><INDENT>for chunk in iter(lambda: f.read(nblocks*md.block_size), b'<STR_LIT>'):<EOL><INDENT>md.update(chunk)<EOL><DEDENT><DEDENT>return md<EOL> | Computes the md5 hash of a file.
Parameters
==========
path: string
path to file to compute hash of
Returns
=======
hashlib md5 hash object. Use .digest() or .hexdigest()
on returned object to get binary or hex encoded string. | f825:m5 |
def missing_or_other_newer(path, other_path, cwd=None): | cwd = cwd or '<STR_LIT:.>'<EOL>path = get_abspath(path, cwd=cwd)<EOL>other_path = get_abspath(other_path, cwd=cwd)<EOL>if not os.path.exists(path):<EOL><INDENT>return True<EOL><DEDENT>if os.path.getmtime(other_path) - <NUM_LIT> >= os.path.getmtime(path):<EOL><INDENT>return True<EOL><DEDENT>return False<EOL> | Investigate if path is non-existant or older than provided reference
path.
Parameters
==========
path: string
path to path which might be missing or too old
other_path: string
reference path
cwd: string
working directory (root of relative paths)
Returns
=======
True if path is older or missing. | f825:m7 |
def import_module_from_file(filename, only_if_newer_than=None): | import imp<EOL>path, name = os.path.split(filename)<EOL>name, ext = os.path.splitext(name)<EOL>name = name.split('<STR_LIT:.>')[<NUM_LIT:0>]<EOL>fobj, filename, data = imp.find_module(name, [path])<EOL>if only_if_newer_than:<EOL><INDENT>for dep in only_if_newer_than:<EOL><INDENT>if os.path.getmtime(filename) < os.path.getmtime(dep):<EOL><INDENT>raise ImportError("<STR_LIT>".format(dep, filename))<EOL><DEDENT><DEDENT><DEDENT>mod = imp.load_module(name, fobj, filename, data)<EOL>return mod<EOL> | Imports (cython generated) shared object file (.so)
Provide a list of paths in `only_if_newer_than` to check
timestamps of dependencies. import_ raises an ImportError
if any is newer.
Word of warning: Python's caching or the OS caching (unclear to author)
is horrible for reimporting same path of an .so file. It will
not detect the new time stamp nor new checksum but will use old
module.
Use unique names for this reason.
Parameters
==========
filename: string
path to shared object
only_if_newer_than: iterable of strings
paths to dependencies of the shared object
Raises
======
ImportError if any of the files specified in only_if_newer_than are newer
than the file given by filename. | f825:m9 |
def find_binary_of_command(candidates): | from distutils.spawn import find_executable<EOL>for c in candidates:<EOL><INDENT>binary_path = find_executable(c)<EOL>if c and binary_path:<EOL><INDENT>return c, binary_path<EOL><DEDENT><DEDENT>raise RuntimeError('<STR_LIT>'.format(<EOL>candidates))<EOL> | Calls `find_executable` from distuils for
provided candidates and returns first hit.
If no candidate mathces, a RuntimeError is raised | f825:m10 |
def pyx_is_cplus(path): | for line in open(path, '<STR_LIT>'):<EOL><INDENT>if line.startswith('<STR_LIT:#>') and '<STR_LIT:=>' in line:<EOL><INDENT>splitted = line.split('<STR_LIT:=>')<EOL>if len(splitted) != <NUM_LIT:2>:<EOL><INDENT>continue<EOL><DEDENT>lhs, rhs = splitted<EOL>if lhs.strip().split()[-<NUM_LIT:1>].lower() == '<STR_LIT>' andrhs.strip().split()[<NUM_LIT:0>].lower() == '<STR_LIT>':<EOL><INDENT>return True<EOL><DEDENT><DEDENT><DEDENT>return False<EOL> | Inspect a Cython source file (.pyx) and look for comment line like:
# distutils: language = c++
Returns True if such a file is present in the file, else False. | f825:m11 |
def uniquify(l): | result = []<EOL>for x in l:<EOL><INDENT>if x not in result:<EOL><INDENT>result.append(x)<EOL><DEDENT><DEDENT>return result<EOL> | Uniquify a list (skip duplicate items). | f825:m12 |
@classmethod<EOL><INDENT>def _get_metadata_key(cls, kw):<DEDENT> | return cls.__name__+'<STR_LIT:_>'+kw<EOL> | kw could be e.g. 'compiler | f825:c2:m0 |
@classmethod<EOL><INDENT>def get_from_metadata_file(cls, dirpath, key):<DEDENT> | fullpath = os.path.join(dirpath, cls.metadata_filename)<EOL>if os.path.exists(fullpath):<EOL><INDENT>d = pickle.load(open(fullpath, '<STR_LIT:rb>'))<EOL>return d[key]<EOL><DEDENT>else:<EOL><INDENT>raise FileNotFoundError(<EOL>"<STR_LIT>".format(fullpath))<EOL><DEDENT> | Get value of key in metadata file dict. | f825:c2:m1 |
@classmethod<EOL><INDENT>def save_to_metadata_file(cls, dirpath, key, value):<DEDENT> | fullpath = os.path.join(dirpath, cls.metadata_filename)<EOL>if os.path.exists(fullpath):<EOL><INDENT>d = pickle.load(open(fullpath, '<STR_LIT:rb>'))<EOL>d.update({key: value})<EOL>pickle.dump(d, open(fullpath, '<STR_LIT:wb>'))<EOL><DEDENT>else:<EOL><INDENT>pickle.dump({key: value}, open(fullpath, '<STR_LIT:wb>'))<EOL><DEDENT> | Store `key: value` in metadata file dict. | f825:c2:m2 |
def compile_sources(files, CompilerRunner_=None,<EOL>destdir=None, cwd=None,<EOL>keep_dir_struct=False,<EOL>per_file_kwargs=None,<EOL>**kwargs): | _per_file_kwargs = {}<EOL>if per_file_kwargs is not None:<EOL><INDENT>for k, v in per_file_kwargs.items():<EOL><INDENT>if isinstance(k, Glob):<EOL><INDENT>for path in glob.glob(k.pathname):<EOL><INDENT>_per_file_kwargs[path] = v<EOL><DEDENT><DEDENT>elif isinstance(k, ArbitraryDepthGlob):<EOL><INDENT>for path in glob_at_depth(k.filename, cwd):<EOL><INDENT>_per_file_kwargs[path] = v<EOL><DEDENT><DEDENT>else:<EOL><INDENT>_per_file_kwargs[k] = v<EOL><DEDENT><DEDENT><DEDENT>destdir = destdir or '<STR_LIT:.>'<EOL>if not os.path.isdir(destdir):<EOL><INDENT>if os.path.exists(destdir):<EOL><INDENT>raise IOError("<STR_LIT>".format(destdir))<EOL><DEDENT>else:<EOL><INDENT>make_dirs(destdir)<EOL><DEDENT><DEDENT>if cwd is None:<EOL><INDENT>cwd = '<STR_LIT:.>'<EOL>for f in files:<EOL><INDENT>copy(f, destdir, only_update=True, dest_is_dir=True)<EOL><DEDENT><DEDENT>dstpaths = []<EOL>for f in files:<EOL><INDENT>if keep_dir_struct:<EOL><INDENT>name, ext = os.path.splitext(f)<EOL><DEDENT>else:<EOL><INDENT>name, ext = os.path.splitext(os.path.basename(f))<EOL><DEDENT>file_kwargs = kwargs.copy()<EOL>file_kwargs.update(_per_file_kwargs.get(f, {}))<EOL>dstpaths.append(src2obj(<EOL>f, CompilerRunner_, cwd=cwd,<EOL>**file_kwargs<EOL>))<EOL><DEDENT>return dstpaths<EOL> | Compile source code files to object files.
Parameters
----------
files: iterable of path strings
source files, if cwd is given, the paths are taken as relative.
CompilerRunner_: CompilerRunner instance (optional)
could be e.g. pycompilation.FortranCompilerRunner
Will be inferred from filename extensions if missing.
destdir: path string
output directory, if cwd is given, the path is taken as relative
cwd: path string
working directory. Specify to have compiler run in other directory.
also used as root of relative paths.
keep_dir_struct: bool
Reproduce directory structure in `destdir`. default: False
per_file_kwargs: dict
dict mapping instances in `files` to keyword arguments
**kwargs: dict
default keyword arguments to pass to CompilerRunner_ | f827:m1 |
def link(obj_files, out_file=None, shared=False, CompilerRunner_=None,<EOL>cwd=None, cplus=False, fort=False, **kwargs): | if out_file is None:<EOL><INDENT>out_file, ext = os.path.splitext(os.path.basename(obj_files[-<NUM_LIT:1>]))<EOL>if shared:<EOL><INDENT>out_file += sharedext<EOL><DEDENT><DEDENT>if not CompilerRunner_:<EOL><INDENT>if fort:<EOL><INDENT>CompilerRunner_, extra_kwargs, vendor =get_mixed_fort_c_linker(<EOL>vendor=kwargs.get('<STR_LIT>', None),<EOL>metadir=kwargs.get('<STR_LIT>', None),<EOL>cplus=cplus,<EOL>cwd=cwd,<EOL>)<EOL>for k, v in extra_kwargs.items():<EOL><INDENT>expand_collection_in_dict(kwargs, k, v)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if cplus:<EOL><INDENT>CompilerRunner_ = CppCompilerRunner<EOL><DEDENT>else:<EOL><INDENT>CompilerRunner_ = CCompilerRunner<EOL><DEDENT><DEDENT><DEDENT>flags = kwargs.pop('<STR_LIT>', [])<EOL>if shared:<EOL><INDENT>if '<STR_LIT>' not in flags:<EOL><INDENT>flags.append('<STR_LIT>')<EOL><DEDENT>dl_flag = '<STR_LIT>'<EOL>if sys.platform == '<STR_LIT>' and dl_flag not in flags:<EOL><INDENT>flags.append(dl_flag)<EOL><DEDENT><DEDENT>run_linker = kwargs.pop('<STR_LIT>', True)<EOL>if not run_linker:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>out_file = get_abspath(out_file, cwd=cwd)<EOL>runner = CompilerRunner_(<EOL>obj_files, out_file, flags,<EOL>cwd=cwd,<EOL>**kwargs)<EOL>runner.run()<EOL>return out_file<EOL> | Link object files.
Parameters
----------
obj_files: iterable of path strings
out_file: path string (optional)
path to executable/shared library, if missing
it will be deduced from the last item in obj_files.
shared: bool
Generate a shared library? default: False
CompilerRunner_: pycompilation.CompilerRunner subclass (optional)
If not given the `cplus` and `fort` flags will be inspected
(fallback is the C compiler)
cwd: path string
root of relative paths and working directory for compiler
cplus: bool
C++ objects? default: False
fort: bool
Fortran objects? default: False
**kwargs: dict
keyword arguments passed onto CompilerRunner_
Returns
-------
The absolute to the generated shared object / executable | f827:m2 |
def link_py_so(obj_files, so_file=None, cwd=None, libraries=None,<EOL>cplus=False, fort=False, **kwargs): | libraries = libraries or []<EOL>include_dirs = kwargs.pop('<STR_LIT>', [])<EOL>library_dirs = kwargs.pop('<STR_LIT>', [])<EOL>if sys.platform == "<STR_LIT:win32>":<EOL><INDENT>warnings.warn("<STR_LIT>")<EOL><DEDENT>elif sys.platform == '<STR_LIT>':<EOL><INDENT>pass<EOL><DEDENT>elif sys.platform[:<NUM_LIT:3>] == '<STR_LIT>':<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>pylib = os.path.join(get_config_var('<STR_LIT>'), get_config_var('<STR_LIT>'))<EOL>if os.path.exists(pylib):<EOL><INDENT>libraries.append(pylib)<EOL><DEDENT>else:<EOL><INDENT>if get_config_var('<STR_LIT>'):<EOL><INDENT>ABIFLAGS = get_config_var('<STR_LIT>')<EOL>pythonlib = '<STR_LIT>'.format(<EOL>sys.hexversion >> <NUM_LIT>, (sys.hexversion >> <NUM_LIT:16>) & <NUM_LIT>,<EOL>ABIFLAGS or '<STR_LIT>')<EOL>libraries += [pythonlib]<EOL><DEDENT>else:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>flags = kwargs.pop('<STR_LIT>', [])<EOL>needed_flags = ('<STR_LIT>',)<EOL>for flag in needed_flags:<EOL><INDENT>if flag not in flags:<EOL><INDENT>flags.append(flag)<EOL><DEDENT><DEDENT>return link(obj_files, shared=True, flags=flags, cwd=cwd,<EOL>cplus=cplus, fort=fort, include_dirs=include_dirs,<EOL>libraries=libraries, library_dirs=library_dirs, **kwargs)<EOL> | Link python extension module (shared object) for importing
Parameters
----------
obj_files: iterable of path strings
object files to be linked
so_file: path string
Name (path) of shared object file to create. If
not specified it will have the basname of the last object
file in `obj_files` but with the extension '.so' (Unix) or
'.dll' (Windows).
cwd: path string
root of relative paths and working directory of linker.
libraries: iterable of strings
libraries to link against, e.g. ['m']
cplus: bool
Any C++ objects? default: False
fort: bool
Any Fortran objects? default: False
kwargs**: dict
keyword arguments passed onto `link(...)`
Returns
-------
Absolute path to the generate shared object | f827:m3 |
def simple_cythonize(src, destdir=None, cwd=None, logger=None,<EOL>full_module_name=None, only_update=False,<EOL>**cy_kwargs): | from Cython.Compiler.Main import (<EOL>default_options, CompilationOptions<EOL>)<EOL>from Cython.Compiler.Main import compile as cy_compile<EOL>assert src.lower().endswith('<STR_LIT>') or src.lower().endswith('<STR_LIT>')<EOL>cwd = cwd or '<STR_LIT:.>'<EOL>destdir = destdir or '<STR_LIT:.>'<EOL>ext = '<STR_LIT>' if cy_kwargs.get('<STR_LIT>', False) else '<STR_LIT>'<EOL>c_name = os.path.splitext(os.path.basename(src))[<NUM_LIT:0>] + ext<EOL>dstfile = os.path.join(destdir, c_name)<EOL>if only_update:<EOL><INDENT>if not missing_or_other_newer(dstfile, src, cwd=cwd):<EOL><INDENT>msg = '<STR_LIT>'.format(<EOL>dstfile, src)<EOL>if logger:<EOL><INDENT>logger.info(msg)<EOL><DEDENT>else:<EOL><INDENT>print(msg)<EOL><DEDENT>return dstfile<EOL><DEDENT><DEDENT>if cwd:<EOL><INDENT>ori_dir = os.getcwd()<EOL><DEDENT>else:<EOL><INDENT>ori_dir = '<STR_LIT:.>'<EOL><DEDENT>os.chdir(cwd)<EOL>try:<EOL><INDENT>cy_options = CompilationOptions(default_options)<EOL>cy_options.__dict__.update(cy_kwargs)<EOL>if logger:<EOL><INDENT>logger.info("<STR_LIT>".format(<EOL>src, dstfile))<EOL><DEDENT>cy_result = cy_compile([src], cy_options, full_module_name=full_module_name)<EOL>if cy_result.num_errors > <NUM_LIT:0>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if os.path.abspath(os.path.dirname(<EOL>src)) != os.path.abspath(destdir):<EOL><INDENT>if os.path.exists(dstfile):<EOL><INDENT>os.unlink(dstfile)<EOL><DEDENT>shutil.move(os.path.join(os.path.dirname(src), c_name),<EOL>destdir)<EOL><DEDENT><DEDENT>finally:<EOL><INDENT>os.chdir(ori_dir)<EOL><DEDENT>return dstfile<EOL> | Generates a C file from a Cython source file.
Parameters
----------
src: path string
path to Cython source
destdir: path string (optional)
Path to output directory (default: '.')
cwd: path string (optional)
Root of relative paths (default: '.')
logger: logging.Logger
info level used.
full_module_name: string
passed to cy_compile (default: None)
only_update: bool
Only cythonize if source is newer. default: False
**cy_kwargs:
second argument passed to cy_compile.
Generates a .cpp file if cplus=True in cy_kwargs, else a .c file. | f827:m4 |
def src2obj(srcpath, CompilerRunner_=None, objpath=None,<EOL>only_update=False, cwd=None, out_ext=None, inc_py=False,<EOL>**kwargs): | name, ext = os.path.splitext(os.path.basename(srcpath))<EOL>if objpath is None:<EOL><INDENT>if os.path.isabs(srcpath):<EOL><INDENT>objpath = '<STR_LIT:.>'<EOL><DEDENT>else:<EOL><INDENT>objpath = os.path.dirname(srcpath)<EOL>objpath = objpath or '<STR_LIT:.>' <EOL><DEDENT><DEDENT>out_ext = out_ext or objext<EOL>if os.path.isdir(objpath):<EOL><INDENT>objpath = os.path.join(objpath, name+out_ext)<EOL><DEDENT>include_dirs = kwargs.pop('<STR_LIT>', [])<EOL>if inc_py:<EOL><INDENT>from distutils.sysconfig import get_python_inc<EOL>py_inc_dir = get_python_inc()<EOL>if py_inc_dir not in include_dirs:<EOL><INDENT>include_dirs.append(py_inc_dir)<EOL><DEDENT><DEDENT>if ext.lower() == '<STR_LIT>':<EOL><INDENT>return pyx2obj(srcpath, objpath=objpath,<EOL>include_dirs=include_dirs, cwd=cwd,<EOL>only_update=only_update, **kwargs)<EOL><DEDENT>if CompilerRunner_ is None:<EOL><INDENT>CompilerRunner_, std = extension_mapping[ext.lower()]<EOL>if '<STR_LIT>' not in kwargs:<EOL><INDENT>kwargs['<STR_LIT>'] = std<EOL><DEDENT><DEDENT>run_linker = kwargs.pop('<STR_LIT>', False)<EOL>if run_linker:<EOL><INDENT>raise CompilationError("<STR_LIT>")<EOL><DEDENT>if only_update:<EOL><INDENT>if not missing_or_other_newer(objpath, srcpath, cwd=cwd):<EOL><INDENT>msg = "<STR_LIT>".format(objpath)<EOL>if kwargs.get('<STR_LIT>', None):<EOL><INDENT>kwargs['<STR_LIT>'].info(msg)<EOL><DEDENT>else:<EOL><INDENT>print(msg)<EOL><DEDENT>return objpath<EOL><DEDENT><DEDENT>runner = CompilerRunner_(<EOL>[srcpath], objpath, include_dirs=include_dirs,<EOL>run_linker=run_linker, cwd=cwd, **kwargs)<EOL>runner.run()<EOL>return objpath<EOL> | Compiles a source code file to an object file.
Files ending with '.pyx' assumed to be cython files and
are dispatched to pyx2obj.
Parameters
----------
srcpath: path string
path to source file
CompilerRunner_: pycompilation.CompilerRunner subclass (optional)
Default: deduced from extension of srcpath
objpath: path string (optional)
path to generated object. defualt: deduced from srcpath
only_update: bool
only compile if source is newer than objpath. default: False
cwd: path string (optional)
working directory and root of relative paths. default: current dir.
out_ext: string
set when objpath is a dir and you want to override defaults
('.o'/'.obj' for Unix/Windows).
inc_py: bool
add Python include path to include_dirs. default: False
**kwargs: dict
keyword arguments passed onto CompilerRunner_ or pyx2obj | f827:m5 |
def pyx2obj(pyxpath, objpath=None, interm_c_dir=None, cwd=None,<EOL>logger=None, full_module_name=None, only_update=False,<EOL>metadir=None, include_numpy=False, include_dirs=None,<EOL>cy_kwargs=None, gdb=False, cplus=None, **kwargs): | assert pyxpath.endswith('<STR_LIT>')<EOL>cwd = cwd or '<STR_LIT:.>'<EOL>objpath = objpath or '<STR_LIT:.>'<EOL>interm_c_dir = interm_c_dir or os.path.dirname(objpath)<EOL>abs_objpath = get_abspath(objpath, cwd=cwd)<EOL>if os.path.isdir(abs_objpath):<EOL><INDENT>pyx_fname = os.path.basename(pyxpath)<EOL>name, ext = os.path.splitext(pyx_fname)<EOL>objpath = os.path.join(objpath, name+objext)<EOL><DEDENT>cy_kwargs = cy_kwargs or {}<EOL>cy_kwargs['<STR_LIT>'] = cwd<EOL>if cplus is None:<EOL><INDENT>cplus = pyx_is_cplus(pyxpath)<EOL><DEDENT>cy_kwargs['<STR_LIT>'] = cplus<EOL>if gdb:<EOL><INDENT>cy_kwargs['<STR_LIT>'] = True<EOL><DEDENT>if include_dirs:<EOL><INDENT>cy_kwargs['<STR_LIT>'] = include_dirs<EOL><DEDENT>interm_c_file = simple_cythonize(<EOL>pyxpath, destdir=interm_c_dir,<EOL>cwd=cwd, logger=logger,<EOL>full_module_name=full_module_name,<EOL>only_update=only_update, **cy_kwargs)<EOL>include_dirs = include_dirs or []<EOL>if include_numpy:<EOL><INDENT>import numpy<EOL>numpy_inc_dir = numpy.get_include()<EOL>if numpy_inc_dir not in include_dirs:<EOL><INDENT>include_dirs.append(numpy_inc_dir)<EOL><DEDENT><DEDENT>flags = kwargs.pop('<STR_LIT>', [])<EOL>needed_flags = ('<STR_LIT>', '<STR_LIT>')<EOL>if not cplus:<EOL><INDENT>needed_flags += ('<STR_LIT>',) <EOL><DEDENT>for flag in needed_flags:<EOL><INDENT>if flag not in flags:<EOL><INDENT>flags.append(flag)<EOL><DEDENT><DEDENT>options = kwargs.pop('<STR_LIT>', [])<EOL>if kwargs.pop('<STR_LIT>', False):<EOL><INDENT>raise CompilationError("<STR_LIT>")<EOL><DEDENT>if '<STR_LIT>' not in options:<EOL><INDENT>options.append('<STR_LIT>')<EOL><DEDENT>if '<STR_LIT>' not in options:<EOL><INDENT>options.append('<STR_LIT>')<EOL><DEDENT>if cplus:<EOL><INDENT>std = kwargs.pop('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>std = kwargs.pop('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>return src2obj(<EOL>interm_c_file,<EOL>objpath=objpath,<EOL>cwd=cwd,<EOL>only_update=only_update,<EOL>metadir=metadir,<EOL>include_dirs=include_dirs,<EOL>flags=flags,<EOL>std=std,<EOL>options=options,<EOL>logger=logger,<EOL>inc_py=True,<EOL>strict_aliasing=False,<EOL>**kwargs)<EOL> | Convenience function
If cwd is specified, pyxpath and dst are taken to be relative
If only_update is set to `True` the modification time is checked
and compilation is only run if the source is newer than the
destination
Parameters
----------
pyxpath: path string
path to Cython source file
objpath: path string (optional)
path to object file to generate
interm_c_dir: path string (optional)
directory to put generated C file.
cwd: path string (optional)
working directory and root of relative paths
logger: logging.Logger (optional)
passed onto `simple_cythonize` and `src2obj`
full_module_name: string (optional)
passed onto `simple_cythonize`
only_update: bool (optional)
passed onto `simple_cythonize` and `src2obj`
metadir: path string (optional)
passed onto src2obj
include_numpy: bool (optional)
Add numpy include directory to include_dirs. default: False
include_dirs: iterable of path strings (optional)
Passed onto src2obj and via cy_kwargs['include_path']
to simple_cythonize.
cy_kwargs: dict (optional)
keyword arguments passed onto `simple_cythonize`
gdb: bool (optional)
convenience: cy_kwargs['gdb_debug'] is set True if gdb=True,
default: False
cplus: bool (optional)
Indicate whether C++ is used. default: auto-detect using `pyx_is_cplus`
**kwargs: dict
keyword arguments passed onto src2obj
Returns
-------
Absolute path of generated object file. | f827:m6 |
def compile_link_import_py_ext(<EOL>srcs, extname=None, build_dir=None, compile_kwargs=None,<EOL>link_kwargs=None, **kwargs): | build_dir = build_dir or '<STR_LIT:.>'<EOL>if extname is None:<EOL><INDENT>extname = os.path.splitext(os.path.basename(srcs[-<NUM_LIT:1>]))[<NUM_LIT:0>]<EOL><DEDENT>compile_kwargs = compile_kwargs or {}<EOL>compile_kwargs.update(kwargs)<EOL>link_kwargs = link_kwargs or {}<EOL>link_kwargs.update(kwargs)<EOL>try:<EOL><INDENT>mod = import_module_from_file(os.path.join(build_dir, extname), srcs)<EOL><DEDENT>except ImportError:<EOL><INDENT>objs = compile_sources(list(map(get_abspath, srcs)), destdir=build_dir,<EOL>cwd=build_dir, **compile_kwargs)<EOL>so = link_py_so(<EOL>objs, cwd=build_dir, fort=any_fort(srcs), cplus=any_cplus(srcs),<EOL>**link_kwargs)<EOL>mod = import_module_from_file(so)<EOL><DEDENT>return mod<EOL> | Compiles sources in `srcs` to a shared object (python extension)
which is imported. If shared object is newer than the sources, they
are not recompiled but instead it is imported.
Parameters
----------
srcs: string
list of paths to sources
extname: string
name of extension (default: None)
(taken from the last file in `srcs` - without extension)
build_dir: string
path to directory in which objects files etc. are generated
compile_kwargs: dict
keyword arguments passed to compile_sources
link_kwargs: dict
keyword arguments passed to link_py_so
**kwargs:
additional keyword arguments overwrites to both compile_kwargs
and link_kwargs useful for convenience e.g. when passing logger
Returns
-------
the imported module
Examples
--------
>>> mod = compile_link_import_py_ext(['fft.f90', 'convolution.cpp',\
'fft_wrapper.pyx'], only_update=True) # doctest: +SKIP
>>> Aprim = mod.fft(A) # doctest: +SKIP | f827:m10 |
def compile_link_import_strings(codes, build_dir=None, **kwargs): | build_dir = build_dir or tempfile.mkdtemp()<EOL>if not os.path.isdir(build_dir):<EOL><INDENT>raise OSError("<STR_LIT>", build_dir)<EOL><DEDENT>source_files = []<EOL>if kwargs.get('<STR_LIT>', False) is True:<EOL><INDENT>import logging<EOL>logging.basicConfig(level=logging.DEBUG)<EOL>kwargs['<STR_LIT>'] = logging.getLogger()<EOL><DEDENT>only_update = kwargs.get('<STR_LIT>', True)<EOL>for name, code_ in codes:<EOL><INDENT>dest = os.path.join(build_dir, name)<EOL>differs = True<EOL>md5_in_mem = md5_of_string(code_.encode('<STR_LIT:utf-8>')).hexdigest()<EOL>if only_update and os.path.exists(dest):<EOL><INDENT>if os.path.exists(dest+'<STR_LIT>'):<EOL><INDENT>md5_on_disk = open(dest+'<STR_LIT>', '<STR_LIT>').read()<EOL><DEDENT>else:<EOL><INDENT>md5_on_disk = md5_of_file(dest).hexdigest()<EOL><DEDENT>differs = md5_on_disk != md5_in_mem<EOL><DEDENT>if not only_update or differs:<EOL><INDENT>with open(dest, '<STR_LIT>') as fh:<EOL><INDENT>fh.write(code_)<EOL>open(dest+'<STR_LIT>', '<STR_LIT>').write(md5_in_mem)<EOL><DEDENT><DEDENT>source_files.append(dest)<EOL><DEDENT>return compile_link_import_py_ext(<EOL>source_files, build_dir=build_dir, **kwargs)<EOL> | Creates a temporary directory and dumps, compiles and links
provided source code.
Parameters
----------
codes: iterable of name/source pair tuples
build_dir: string (default: None)
path to cache_dir. None implies use a temporary directory.
**kwargs:
keyword arguments passed onto `compile_link_import_py_ext` | f827:m11 |
def PCExtension(*args, **kwargs): | vals = {}<EOL>intercept = {<EOL>'<STR_LIT>': (), <EOL>'<STR_LIT>': True,<EOL>'<STR_LIT>': (),<EOL>'<STR_LIT>': (), <EOL>'<STR_LIT>': [],<EOL>'<STR_LIT>': False, <EOL>'<STR_LIT>': {},<EOL>'<STR_LIT>': {},<EOL>}<EOL>for k, v in intercept.items():<EOL><INDENT>vals[k] = kwargs.pop(k, v)<EOL><DEDENT>intercept2 = {<EOL>'<STR_LIT>': None,<EOL>'<STR_LIT>': True,<EOL>}<EOL>for k, v in intercept2.items():<EOL><INDENT>vck = kwargs.pop(k, v)<EOL>vck = vals['<STR_LIT>'].pop(k, vck)<EOL>vck = vck or vals['<STR_LIT>'].pop(k, vck)<EOL>vals[k] = vck<EOL><DEDENT>instance = Extension(*args, **kwargs)<EOL>if vals['<STR_LIT>'] is True:<EOL><INDENT>import logging<EOL>logging.basicConfig(level=logging.DEBUG)<EOL>vals['<STR_LIT>'] = logging.getLogger('<STR_LIT>')<EOL><DEDENT>for k, v in vals.items():<EOL><INDENT>setattr(instance, k, v)<EOL><DEDENT>return instance<EOL> | Parameters
==========
template_regexps: list of 3-tuples
e.g. [(pattern1, target1, subsd1), ...], used to generate
templated code
pass_extra_compile_args: bool
should ext.extra_compile_args be passed along? default: False | f830:m0 |
def _copy_or_render_source(ext, f, output_dir, render_callback,<EOL>skip_copy=False): | <EOL>dirname = os.path.dirname(f)<EOL>filename = os.path.basename(f)<EOL>for pattern, target, subsd in ext.template_regexps:<EOL><INDENT>if re.match(pattern, filename):<EOL><INDENT>tgt = os.path.join(dirname, re.sub(<EOL>pattern, target, filename))<EOL>rw = MetaReaderWriter('<STR_LIT>')<EOL>try:<EOL><INDENT>prev_subsd = rw.get_from_metadata_file(output_dir, f)<EOL><DEDENT>except (FileNotFoundError, KeyError):<EOL><INDENT>prev_subsd = None<EOL><DEDENT>render_callback(<EOL>get_abspath(f),<EOL>os.path.join(output_dir, tgt),<EOL>subsd,<EOL>only_update=ext.only_update,<EOL>prev_subsd=prev_subsd,<EOL>create_dest_dirs=True,<EOL>logger=ext.logger)<EOL>rw.save_to_metadata_file(output_dir, f, subsd)<EOL>return tgt<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if not skip_copy:<EOL><INDENT>copy(f,<EOL>os.path.join(output_dir,<EOL>os.path.dirname(f)),<EOL>only_update=ext.only_update,<EOL>dest_is_dir=True,<EOL>create_dest_dirs=True,<EOL>logger=ext.logger)<EOL><DEDENT>return f<EOL><DEDENT> | Tries to do regex match for each (pattern, target, subsd) tuple
in ext.template_regexps for file f. | f830:m1 |
def render_python_template_to(src, dest, subsd, only_update=False,<EOL>prev_subsd=None, create_dest_dirs=True,<EOL>logger=None): | if only_update:<EOL><INDENT>if subsd == prev_subsd:<EOL><INDENT>if not missing_or_other_newer(dest, src):<EOL><INDENT>if logger:<EOL><INDENT>msg = ("<STR_LIT>"<EOL>"<STR_LIT>")<EOL>logger.info(msg.format(src))<EOL><DEDENT>return<EOL><DEDENT><DEDENT><DEDENT>with open(src, '<STR_LIT>') as ifh:<EOL><INDENT>data = ifh.read() <EOL><DEDENT>if create_dest_dirs:<EOL><INDENT>dest_dir = os.path.dirname(dest)<EOL>if not os.path.exists(dest_dir):<EOL><INDENT>make_dirs(dest_dir)<EOL><DEDENT><DEDENT>with open(dest, '<STR_LIT>') as ofh:<EOL><INDENT>ofh.write(data % subsd)<EOL><DEDENT> | Overload this function if you want to use a template engine such as
e.g. mako. | f830:m2 |
@classmethod<EOL><INDENT>def find_compiler(cls, preferred_vendor, metadir, cwd,<EOL>use_meta=True):<DEDENT> | cwd = cwd or '<STR_LIT:.>'<EOL>metadir = metadir or '<STR_LIT:.>'<EOL>metadir = os.path.join(cwd, metadir)<EOL>used_metafile = False<EOL>if not preferred_vendor and use_meta:<EOL><INDENT>try:<EOL><INDENT>preferred_vendor = cls.get_from_metadata_file(<EOL>metadir, '<STR_LIT>')<EOL>used_metafile = True<EOL><DEDENT>except FileNotFoundError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>candidates = list(cls.compiler_dict.keys())<EOL>if preferred_vendor:<EOL><INDENT>if preferred_vendor in candidates:<EOL><INDENT>candidates = [preferred_vendor]+candidates<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>".format(<EOL>preferred_vendor))<EOL><DEDENT><DEDENT>name, path = find_binary_of_command([<EOL>cls.compiler_dict[x] for x in candidates])<EOL>if use_meta and not used_metafile:<EOL><INDENT>if not os.path.isdir(metadir):<EOL><INDENT>raise FileNotFoundError("<STR_LIT>".format(metadir))<EOL><DEDENT>cls.save_to_metadata_file(metadir, '<STR_LIT>',<EOL>(name, path))<EOL>cls.save_to_metadata_file(<EOL>metadir, '<STR_LIT>',<EOL>cls.compiler_name_vendor_mapping[name])<EOL>if cls.logger:<EOL><INDENT>cls.logger.info(<EOL>'<STR_LIT>')<EOL><DEDENT><DEDENT>return name, path, cls.compiler_name_vendor_mapping[name]<EOL> | Identify a suitable C/fortran/other compiler
When it is possible that the user (un)installs a compiler
inbetween compilations of object files we want to catch
that. This method allows compiler choice to be stored in a
pickled metadata file. Provide metadir a dirpath to
make the class save choice there in a file with
cls.metadata_filename as name. | f831:c0:m1 |
def cmd(self): | cmd = (<EOL>[self.compiler_binary] +<EOL>self.flags +<EOL>['<STR_LIT>'+x for x in self.undef] +<EOL>['<STR_LIT>'+x for x in self.define] +<EOL>['<STR_LIT>'+x for x in self.include_dirs] +<EOL>self.sources<EOL>)<EOL>if self.run_linker:<EOL><INDENT>cmd += (['<STR_LIT>'+x for x in self.library_dirs] +<EOL>[(x if os.path.exists(x) else '<STR_LIT>'+x) for x in self.libraries] +<EOL>self.linkline)<EOL><DEDENT>counted = []<EOL>for envvar in re.findall('<STR_LIT>', '<STR_LIT:U+0020>'.join(cmd)):<EOL><INDENT>if os.getenv(envvar) is None:<EOL><INDENT>if envvar not in counted:<EOL><INDENT>counted.append(envvar)<EOL>msg = "<STR_LIT>".format(<EOL>envvar)<EOL>self.logger.error(msg)<EOL>raise CompilationError(msg)<EOL><DEDENT><DEDENT><DEDENT>return cmd<EOL> | The command below covers most cases, if you need
someting more complex subclass this. | f831:c0:m2 |
def DiskCache(cachedir, methods): | class _DiskCache(object):<EOL><INDENT>cached_methods = methods<EOL>def __init__(self, *args, **kwargs):<EOL><INDENT>from tempfile import mkdtemp<EOL>from joblib import Memory<EOL>self.cachedir = cachedir or mkdtemp()<EOL>self.memory = Memory(cachedir=self.cachedir)<EOL>for method in self.cached_methods:<EOL><INDENT>setattr(self, method, self.memory.cache(getattr(self, method)))<EOL><DEDENT>if not os.path.isdir(self.cachedir):<EOL><INDENT>raise OSError("<STR_LIT>", self.cachedir)<EOL><DEDENT>super(_DiskCache, self).__init__(*args, **kwargs)<EOL><DEDENT><DEDENT>return _DiskCache<EOL> | Class factory for mixin class to help with caching.
The _DiskCache mixin class uses joblib to pickle results. | f835:m0 |
def main(coeffs="<STR_LIT>", diff=<NUM_LIT:0>, xmin=<NUM_LIT:0>, xmax=<NUM_LIT:3>, N=<NUM_LIT:4>, clean=False): | coeffs = tuple(map(float, coeffs.split('<STR_LIT:U+002C>')))<EOL>poly = MyPoly(coeffs)<EOL>Dpoly = poly.diff(diff)<EOL>mod = Dpoly.compile_link_import_py_ext()<EOL>x = np.linspace(xmin, xmax, N)<EOL>result = mod.callback(x)<EOL>for _ in range(diff, <NUM_LIT:0>, -<NUM_LIT:1>):<EOL><INDENT>coeffs = tuple((p*c for p, c in enumerate(coeffs[<NUM_LIT:1>:], <NUM_LIT:1>)))<EOL><DEDENT>ref = np.zeros_like(x)<EOL>for p, c in enumerate(coeffs):<EOL><INDENT>ref += c*x**p<EOL><DEDENT>assert np.allclose(result, ref)<EOL>if clean:<EOL><INDENT>poly.memory.clear()<EOL>Dpoly.memory.clear()<EOL><DEDENT>return result<EOL> | Compile a native callback of a polynomial | f837:m0 |
def run_compilation(**kwargs): | for f in files:<EOL><INDENT>shutil.copy(f, kwargs['<STR_LIT>'])<EOL><DEDENT>objs = [<EOL>src2obj('<STR_LIT>',<EOL>options=options_omp,<EOL>**kwargs),<EOL>src2obj('<STR_LIT>',<EOL>std='<STR_LIT>',<EOL>options=options,<EOL>**kwargs),<EOL>src2obj('<STR_LIT>',<EOL>cplus=True, **kwargs)<EOL>]<EOL>return link_py_so(objs, cplus=True, fort=True,<EOL>options=options_omp, **kwargs)<EOL> | Compiles and links Cython wrapped C++ function
(which calls into an OpenMP enabled Fortran 2003 routine) | f838:m0 |
def __init__(self, access_token, **kwargs): | self.api_version = kwargs.get('<STR_LIT>') or DEFAULT_API_VERSION<EOL>self.app_secret = kwargs.get('<STR_LIT>')<EOL>self.graph_url = '<STR_LIT>'.format(self.api_version)<EOL>self.access_token = access_token<EOL> | @required:
access_token
@optional:
api_version
app_secret | f841:c1:m0 |
def send_attachment(self, recipient_id, attachment_type, attachment_path,<EOL>notification_type=NotificationType.regular): | payload = {<EOL>'<STR_LIT>': {<EOL>{<EOL>'<STR_LIT:id>': recipient_id<EOL>}<EOL>},<EOL>'<STR_LIT>': notification_type,<EOL>'<STR_LIT:message>': {<EOL>{<EOL>'<STR_LIT>': {<EOL>'<STR_LIT:type>': attachment_type,<EOL>'<STR_LIT>': {}<EOL>}<EOL>}<EOL>},<EOL>'<STR_LIT>': (os.path.basename(attachment_path), open(attachment_path, '<STR_LIT:rb>'))<EOL>}<EOL>multipart_data = MultipartEncoder(payload)<EOL>multipart_header = {<EOL>'<STR_LIT:Content-Type>': multipart_data.content_type<EOL>}<EOL>return requests.post(self.graph_url, data=multipart_data,<EOL>params=self.auth_args, headers=multipart_header).json()<EOL> | Send an attachment to the specified recipient using local path.
Input:
recipient_id: recipient id to send to
attachment_type: type of attachment (image, video, audio, file)
attachment_path: Path of attachment
Output:
Response from API as <dict> | f841:c1:m4 |
def send_attachment_url(self, recipient_id, attachment_type, attachment_url,<EOL>notification_type=NotificationType.regular): | return self.send_message(recipient_id, {<EOL>'<STR_LIT>': {<EOL>'<STR_LIT:type>': attachment_type,<EOL>'<STR_LIT>': {<EOL>'<STR_LIT:url>': attachment_url<EOL>}<EOL>}<EOL>}, notification_type)<EOL> | Send an attachment to the specified recipient using URL.
Input:
recipient_id: recipient id to send to
attachment_type: type of attachment (image, video, audio, file)
attachment_url: URL of attachment
Output:
Response from API as <dict> | f841:c1:m5 |
def send_text_message(self, recipient_id, message, notification_type=NotificationType.regular): | return self.send_message(recipient_id, {<EOL>'<STR_LIT:text>': message<EOL>}, notification_type)<EOL> | Send text messages to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/text-message
Input:
recipient_id: recipient id to send to
message: message to send
Output:
Response from API as <dict> | f841:c1:m6 |
def send_generic_message(self, recipient_id, elements, notification_type=NotificationType.regular): | return self.send_message(recipient_id, {<EOL>"<STR_LIT>": {<EOL>"<STR_LIT:type>": "<STR_LIT>",<EOL>"<STR_LIT>": {<EOL>"<STR_LIT>": "<STR_LIT>",<EOL>"<STR_LIT>": elements<EOL>}<EOL>}<EOL>}, notification_type)<EOL> | Send generic messages to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/generic-template
Input:
recipient_id: recipient id to send to
elements: generic message elements to send
Output:
Response from API as <dict> | f841:c1:m7 |
def send_button_message(self, recipient_id, text, buttons, notification_type=NotificationType.regular): | return self.send_message(recipient_id, {<EOL>"<STR_LIT>": {<EOL>"<STR_LIT:type>": "<STR_LIT>",<EOL>"<STR_LIT>": {<EOL>"<STR_LIT>": "<STR_LIT>",<EOL>"<STR_LIT:text>": text,<EOL>"<STR_LIT>": buttons<EOL>}<EOL>}<EOL>}, notification_type)<EOL> | Send text messages to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/button-template
Input:
recipient_id: recipient id to send to
text: text of message to send
buttons: buttons to send
Output:
Response from API as <dict> | f841:c1:m8 |
def send_action(self, recipient_id, action, notification_type=NotificationType.regular): | return self.send_recipient(recipient_id, {<EOL>'<STR_LIT>': action<EOL>}, notification_type)<EOL> | Send typing indicators or send read receipts to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/sender-actions
Input:
recipient_id: recipient id to send to
action: action type (mark_seen, typing_on, typing_off)
Output:
Response from API as <dict> | f841:c1:m9 |
def send_image(self, recipient_id, image_path, notification_type=NotificationType.regular): | return self.send_attachment(recipient_id, "<STR_LIT:image>", image_path, notification_type)<EOL> | Send an image to the specified recipient.
Image must be PNG or JPEG or GIF (more might be supported).
https://developers.facebook.com/docs/messenger-platform/send-api-reference/image-attachment
Input:
recipient_id: recipient id to send to
image_path: path to image to be sent
Output:
Response from API as <dict> | f841:c1:m10 |
def send_image_url(self, recipient_id, image_url, notification_type=NotificationType.regular): | return self.send_attachment_url(recipient_id, "<STR_LIT:image>", image_url, notification_type)<EOL> | Send an image to specified recipient using URL.
Image must be PNG or JPEG or GIF (more might be supported).
https://developers.facebook.com/docs/messenger-platform/send-api-reference/image-attachment
Input:
recipient_id: recipient id to send to
image_url: url of image to be sent
Output:
Response from API as <dict> | f841:c1:m11 |
def send_audio(self, recipient_id, audio_path, notification_type=NotificationType.regular): | return self.send_attachment(recipient_id, "<STR_LIT>", audio_path, notification_type)<EOL> | Send audio to the specified recipient.
Audio must be MP3 or WAV
https://developers.facebook.com/docs/messenger-platform/send-api-reference/audio-attachment
Input:
recipient_id: recipient id to send to
audio_path: path to audio to be sent
Output:
Response from API as <dict> | f841:c1:m12 |
def send_audio_url(self, recipient_id, audio_url, notification_type=NotificationType.regular): | return self.send_attachment_url(recipient_id, "<STR_LIT>", audio_url, notification_type)<EOL> | Send audio to specified recipient using URL.
Audio must be MP3 or WAV
https://developers.facebook.com/docs/messenger-platform/send-api-reference/audio-attachment
Input:
recipient_id: recipient id to send to
audio_url: url of audio to be sent
Output:
Response from API as <dict> | f841:c1:m13 |
def send_video(self, recipient_id, video_path, notification_type=NotificationType.regular): | return self.send_attachment(recipient_id, "<STR_LIT>", video_path, notification_type)<EOL> | Send video to the specified recipient.
Video should be MP4 or MOV, but supports more (https://www.facebook.com/help/218673814818907).
https://developers.facebook.com/docs/messenger-platform/send-api-reference/video-attachment
Input:
recipient_id: recipient id to send to
video_path: path to video to be sent
Output:
Response from API as <dict> | f841:c1:m14 |
def send_video_url(self, recipient_id, video_url, notification_type=NotificationType.regular): | return self.send_attachment_url(recipient_id, "<STR_LIT>", video_url, notification_type)<EOL> | Send video to specified recipient using URL.
Video should be MP4 or MOV, but supports more (https://www.facebook.com/help/218673814818907).
https://developers.facebook.com/docs/messenger-platform/send-api-reference/video-attachment
Input:
recipient_id: recipient id to send to
video_url: url of video to be sent
Output:
Response from API as <dict> | f841:c1:m15 |
def send_file(self, recipient_id, file_path, notification_type=NotificationType.regular): | return self.send_attachment(recipient_id, "<STR_LIT:file>", file_path, notification_type)<EOL> | Send file to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/file-attachment
Input:
recipient_id: recipient id to send to
file_path: path to file to be sent
Output:
Response from API as <dict> | f841:c1:m16 |
def send_file_url(self, recipient_id, file_url, notification_type=NotificationType.regular): | return self.send_attachment_url(recipient_id, "<STR_LIT:file>", file_url, notification_type)<EOL> | Send file to the specified recipient.
https://developers.facebook.com/docs/messenger-platform/send-api-reference/file-attachment
Input:
recipient_id: recipient id to send to
file_url: url of file to be sent
Output:
Response from API as <dict> | f841:c1:m17 |
def get_user_info(self, recipient_id, fields=None): | params = {}<EOL>if fields is not None and isinstance(fields, (list, tuple)):<EOL><INDENT>params['<STR_LIT>'] = "<STR_LIT:U+002C>".join(fields)<EOL><DEDENT>params.update(self.auth_args)<EOL>request_endpoint = '<STR_LIT>'.format(self.graph_url, recipient_id)<EOL>response = requests.get(request_endpoint, params=params)<EOL>if response.status_code == <NUM_LIT:200>:<EOL><INDENT>return response.json()<EOL><DEDENT>return None<EOL> | Getting information about the user
https://developers.facebook.com/docs/messenger-platform/user-profile
Input:
recipient_id: recipient id to send to
Output:
Response from API as <dict> | f841:c1:m18 |
def _send_payload(self, payload): | return self.send_raw(payload)<EOL> | Deprecated, use send_raw instead | f841:c1:m20 |
def validate_hub_signature(app_secret, request_payload, hub_signature_header): | try:<EOL><INDENT>hash_method, hub_signature = hub_signature_header.split('<STR_LIT:=>')<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>digest_module = getattr(hashlib, hash_method)<EOL>hmac_object = hmac.new(str(app_secret), unicode(request_payload), digest_module)<EOL>generated_hash = hmac_object.hexdigest()<EOL>if hub_signature == generated_hash:<EOL><INDENT>return True<EOL><DEDENT><DEDENT>return False<EOL> | @inputs:
app_secret: Secret Key for application
request_payload: request body
hub_signature_header: X-Hub-Signature header sent with request
@outputs:
boolean indicated that hub signature is validated | f843:m0 |
def generate_appsecret_proof(access_token, app_secret): | if six.PY2:<EOL><INDENT>hmac_object = hmac.new(str(app_secret), unicode(access_token), hashlib.sha256)<EOL><DEDENT>else:<EOL><INDENT>hmac_object = hmac.new(bytearray(app_secret, '<STR_LIT:utf8>'), str(access_token).encode('<STR_LIT:utf8>'), hashlib.sha256)<EOL><DEDENT>generated_hash = hmac_object.hexdigest()<EOL>return generated_hash<EOL> | @inputs:
access_token: page access token
app_secret_token: app secret key
@outputs:
appsecret_proof: HMAC-SHA256 hash of page access token
using app_secret as the key | f843:m1 |
def get_app_template_dir(app_name): | if app_name in _cache:<EOL><INDENT>return _cache[app_name]<EOL><DEDENT>template_dir = None<EOL>if django.VERSION >= (<NUM_LIT:1>, <NUM_LIT:7>):<EOL><INDENT>from django.apps import apps<EOL>for app in apps.get_app_configs():<EOL><INDENT>if app.label == app_name:<EOL><INDENT>template_dir = join(app.path, '<STR_LIT>')<EOL>break<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>for app in settings.INSTALLED_APPS:<EOL><INDENT>if app.split('<STR_LIT:.>')[-<NUM_LIT:1>] == app_name:<EOL><INDENT>mod = import_module(app)<EOL>template_dir = join(abspath(dirname(mod.__file__)), '<STR_LIT>')<EOL>break<EOL><DEDENT><DEDENT><DEDENT>_cache[app_name] = template_dir<EOL>return template_dir<EOL> | Get the template directory for an application
Uses apps interface available in django 1.7+
Returns a full path, or None if the app was not found. | f849:m0 |
def get_template_sources(self, template_name, template_dirs=None): | if '<STR_LIT::>' not in template_name:<EOL><INDENT>return []<EOL><DEDENT>app_name, template_name = template_name.split("<STR_LIT::>", <NUM_LIT:1>)<EOL>template_dir = get_app_template_dir(app_name)<EOL>if template_dir:<EOL><INDENT>if django.VERSION >= (<NUM_LIT:1>, <NUM_LIT:9>):<EOL><INDENT>from django.template import Origin<EOL>origin = Origin(<EOL>name=join(template_dir, template_name),<EOL>template_name=template_name,<EOL>loader=self,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>origin = join(template_dir, template_name)<EOL><DEDENT>return [origin]<EOL><DEDENT>return []<EOL> | Returns the absolute paths to "template_name" in the specified app.
If the name does not contain an app name (no colon), an empty list
is returned.
The parent FilesystemLoader.load_template_source() will take care
of the actual loading for us. | f849:c0:m0 |
@register.tag<EOL>def placeholder(parser, token): | return Placeholder.parse(parser, token)<EOL> | A dummy placeholder template tag. | f851:m0 |
def _is_variable_extends(extend_node): | if django.VERSION < (<NUM_LIT:1>, <NUM_LIT:4>):<EOL><INDENT>return extend_node.parent_name_expr <EOL><DEDENT>else:<EOL><INDENT>return not isinstance(extend_node.parent_name.var, six.string_types)<EOL><DEDENT> | Check whether an ``{% extends variable %}`` is used in the template.
:type extend_node: ExtendsNode | f852:m0 |
def _extend_blocks(extend_node, blocks, context): | try:<EOL><INDENT>parent = extend_node.get_parent(_get_extend_context(context))<EOL><DEDENT>except TemplateSyntaxError:<EOL><INDENT>if _is_variable_extends(extend_node):<EOL><INDENT>return<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>for parent_block in parent.nodelist.get_nodes_by_type(BlockNode):<EOL><INDENT>if not parent_block.name in blocks:<EOL><INDENT>blocks[parent_block.name] = parent_block<EOL><DEDENT>else:<EOL><INDENT>block = blocks[parent_block.name]<EOL>seen_supers = []<EOL>while hasattr(block.parent, '<STR_LIT>') and block.parent not in seen_supers:<EOL><INDENT>seen_supers.append(block.parent)<EOL>block = block.parent<EOL><DEDENT>block.parent = parent_block<EOL><DEDENT><DEDENT>parent_extends = parent.nodelist.get_nodes_by_type(ExtendsNode)<EOL>if parent_extends:<EOL><INDENT>_extend_blocks(parent_extends[<NUM_LIT:0>], blocks, context)<EOL><DEDENT> | Extends the dictionary `blocks` with *new* blocks in the parent node (recursive)
:param extend_node: The ``{% extends .. %}`` node object.
:type extend_node: ExtendsNode
:param blocks: dict of all block names found in the template.
:type blocks: dict | f852:m1 |
def _extend_nodelist(extends_node, context, instance_types): | results = []<EOL>blocks = extends_node.blocks.copy() <EOL>_extend_blocks(extends_node, blocks, context)<EOL>all_block_names = list(blocks.keys())<EOL>for block in list(blocks.values()):<EOL><INDENT>results += _scan_nodes(block.nodelist, context, instance_types, block, ignore_blocks=all_block_names)<EOL><DEDENT>parent_template = _find_topmost_template(extends_node, context)<EOL>if not parent_template:<EOL><INDENT>return []<EOL><DEDENT>else:<EOL><INDENT>results += _scan_nodes(parent_template.nodelist, context, instance_types, ignore_blocks=all_block_names)<EOL>return results<EOL><DEDENT> | Returns a list of results found in the parent template(s)
:type extends_node: ExtendsNode | f852:m3 |
def _scan_nodes(nodelist, context, instance_types, current_block=None, ignore_blocks=None): | results = []<EOL>for node in nodelist:<EOL><INDENT>if isinstance(node, instance_types):<EOL><INDENT>results.append(node)<EOL><DEDENT>elif isinstance(node, IncludeNode):<EOL><INDENT>if node.template:<EOL><INDENT>if not callable(getattr(node.template, '<STR_LIT>', None)):<EOL><INDENT>template = get_template(node.template.var)<EOL><DEDENT>else:<EOL><INDENT>template = node.template<EOL><DEDENT>if TemplateAdapter is not None and isinstance(template, TemplateAdapter):<EOL><INDENT>template = template.template<EOL><DEDENT>results += _scan_nodes(template.nodelist, context, instance_types, current_block)<EOL><DEDENT><DEDENT>elif isinstance(node, ExtendsNode):<EOL><INDENT>results += _extend_nodelist(node, context, instance_types)<EOL><DEDENT>elif isinstance(node, VariableNode) and current_block:<EOL><INDENT>if node.filter_expression.token == '<STR_LIT>':<EOL><INDENT>if not hasattr(current_block.parent, '<STR_LIT>'):<EOL><INDENT>raise TemplateSyntaxError(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>current_block.name<EOL>))<EOL><DEDENT>results += _scan_nodes(current_block.parent.nodelist, context, instance_types, current_block.parent)<EOL><DEDENT><DEDENT>elif isinstance(node, BlockNode) and ignore_blocks and node.name in ignore_blocks:<EOL><INDENT>continue<EOL><DEDENT>elif hasattr(node, '<STR_LIT>'):<EOL><INDENT>for nodelist_name in node.child_nodelists:<EOL><INDENT>if hasattr(node, nodelist_name):<EOL><INDENT>subnodelist = getattr(node, nodelist_name)<EOL>if isinstance(subnodelist, NodeList):<EOL><INDENT>if isinstance(node, BlockNode):<EOL><INDENT>current_block = node<EOL><DEDENT>results += _scan_nodes(subnodelist, context, instance_types, current_block)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>for attr in dir(node):<EOL><INDENT>obj = getattr(node, attr)<EOL>if isinstance(obj, NodeList):<EOL><INDENT>if isinstance(node, BlockNode):<EOL><INDENT>current_block = node<EOL><DEDENT>results += _scan_nodes(obj, context, instance_types, current_block)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return results<EOL> | Loop through all nodes of a single scope level.
:type nodelist: django.template.base.NodeList
:type current_block: BlockNode
:param instance_types: The instance to look for | f852:m4 |
def get_node_instances(nodelist, instances): | context = _get_main_context(nodelist)<EOL>if TemplateAdapter is not None and isinstance(nodelist, TemplateAdapter):<EOL><INDENT>nodelist = nodelist.template<EOL><DEDENT>return _scan_nodes(nodelist, context, instances)<EOL> | Find the nodes of a given instance.
In contract to the standard ``template.nodelist.get_nodes_by_type()`` method,
this also looks into ``{% extends %}`` and ``{% include .. %}`` nodes
to find all possible nodes of the given type.
:param instances: A class Type, or tuple of types to find.
:param nodelist: The Template object, or nodelist to scan.
:returns: A list of Node objects which inherit from the list of given `instances` to find.
:rtype: list | f852:m7 |
def configure(): | root_logger = logging.getLogger()<EOL>root_logger.addHandler(logging.NullHandler())<EOL>root_logger.setLevel(<NUM_LIT>)<EOL>_display_logger.setLevel(<NUM_LIT>)<EOL>_debug_logger.setLevel(<NUM_LIT:10>)<EOL>display_handlers = [h.get_name() for h in _display_logger.handlers]<EOL>if '<STR_LIT>' not in display_handlers:<EOL><INDENT>stdout_handler = logging.StreamHandler(sys.stdout)<EOL>stdout_handler.set_name('<STR_LIT>')<EOL>formatter = logging.Formatter('<STR_LIT>')<EOL>stdout_handler.setFormatter(formatter)<EOL>_display_logger.addHandler(stdout_handler)<EOL><DEDENT> | Configures the logging facility
This function will setup an initial logging facility for handling display
and debug outputs. The default facility will send display messages to
stdout and the default debug facility will do nothing.
:returns: None | f857:m5 |
def prepare(self): | <EOL>if self.private_data_dir is None:<EOL><INDENT>raise ConfigurationError("<STR_LIT>")<EOL><DEDENT>if self.module and self.playbook:<EOL><INDENT>raise ConfigurationError("<STR_LIT>")<EOL><DEDENT>if not os.path.exists(self.artifact_dir):<EOL><INDENT>os.makedirs(self.artifact_dir, mode=<NUM_LIT>)<EOL><DEDENT>if self.directory_isolation_path is not None:<EOL><INDENT>self.directory_isolation_path = tempfile.mkdtemp(prefix='<STR_LIT>', dir=self.directory_isolation_path)<EOL>if os.path.exists(self.project_dir):<EOL><INDENT>output.debug("<STR_LIT>".format(self.project_dir,<EOL>self.directory_isolation_path))<EOL>copy_tree(self.project_dir, self.directory_isolation_path, preserve_symlinks=True)<EOL><DEDENT><DEDENT>self.prepare_inventory()<EOL>self.prepare_env()<EOL>self.prepare_command()<EOL>if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK and self.playbook is None:<EOL><INDENT>raise ConfigurationError("<STR_LIT>")<EOL><DEDENT>elif self.execution_mode == ExecutionMode.ANSIBLE and self.module is None:<EOL><INDENT>raise ConfigurationError("<STR_LIT>")<EOL><DEDENT>elif self.execution_mode == ExecutionMode.NONE:<EOL><INDENT>raise ConfigurationError("<STR_LIT>")<EOL><DEDENT>if self.ssh_key_data:<EOL><INDENT>self.ssh_key_path = os.path.join(self.artifact_dir, '<STR_LIT>')<EOL>open_fifo_write(self.ssh_key_path, self.ssh_key_data)<EOL>self.command = self.wrap_args_with_ssh_agent(self.command, self.ssh_key_path)<EOL><DEDENT>callback_dir = self.env.get('<STR_LIT>', os.getenv('<STR_LIT>'))<EOL>if callback_dir is None:<EOL><INDENT>callback_dir = os.path.join(os.path.split(os.path.abspath(__file__))[<NUM_LIT:0>],<EOL>"<STR_LIT>")<EOL><DEDENT>python_path = self.env.get('<STR_LIT>', os.getenv('<STR_LIT>', '<STR_LIT>'))<EOL>if python_path and not python_path.endswith('<STR_LIT::>'):<EOL><INDENT>python_path += '<STR_LIT::>'<EOL><DEDENT>self.env['<STR_LIT>'] = callback_dir<EOL>if '<STR_LIT>' in self.env:<EOL><INDENT>self.env['<STR_LIT>'] = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>self.env['<STR_LIT>'] = '<STR_LIT>'<EOL><DEDENT>self.env['<STR_LIT>'] = '<STR_LIT:False>'<EOL>self.env['<STR_LIT>'] = '<STR_LIT:False>'<EOL>self.env['<STR_LIT>'] = self.artifact_dir<EOL>self.env['<STR_LIT>'] = python_path + callback_dir<EOL>if self.roles_path:<EOL><INDENT>self.env['<STR_LIT>'] = '<STR_LIT::>'.join(self.roles_path)<EOL><DEDENT>if self.process_isolation:<EOL><INDENT>self.command = self.wrap_args_with_process_isolation(self.command)<EOL><DEDENT>if self.fact_cache_type == '<STR_LIT>':<EOL><INDENT>self.env['<STR_LIT>'] = '<STR_LIT>'<EOL>self.env['<STR_LIT>'] = self.fact_cache<EOL><DEDENT> | Performs basic checks and then properly invokes
- prepare_inventory
- prepare_env
- prepare_command
It's also responsible for wrapping the command with the proper ssh agent invocation
and setting early ANSIBLE_ environment variables. | f859:c1:m1 |
def prepare_inventory(self): | if self.inventory is None:<EOL><INDENT>self.inventory = os.path.join(self.private_data_dir, "<STR_LIT>")<EOL><DEDENT> | Prepares the inventory default under ``private_data_dir`` if it's not overridden by the constructor. | f859:c1:m2 |
def prepare_env(self): | try:<EOL><INDENT>passwords = self.loader.load_file('<STR_LIT>', Mapping)<EOL>self.expect_passwords = {<EOL>re.compile(pattern, re.M): password<EOL>for pattern, password in iteritems(passwords)<EOL>}<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>output.debug('<STR_LIT>')<EOL>self.expect_passwords = dict()<EOL><DEDENT>self.expect_passwords[pexpect.TIMEOUT] = None<EOL>self.expect_passwords[pexpect.EOF] = None<EOL>try:<EOL><INDENT>self.env = os.environ.copy()<EOL>envvars = self.loader.load_file('<STR_LIT>', Mapping)<EOL>if envvars:<EOL><INDENT>self.env.update({k:six.text_type(v) for k, v in envvars.items()})<EOL><DEDENT>if self.envvars and isinstance(self.envvars, dict):<EOL><INDENT>self.env.update({k:six.text_type(v) for k, v in self.envvars.items()})<EOL><DEDENT><DEDENT>except ConfigurationError:<EOL><INDENT>output.debug("<STR_LIT>")<EOL>self.env = os.environ.copy()<EOL><DEDENT>try:<EOL><INDENT>self.settings = self.loader.load_file('<STR_LIT>', Mapping)<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>output.debug("<STR_LIT>")<EOL>self.settings = dict()<EOL><DEDENT>try:<EOL><INDENT>self.ssh_key_data = self.loader.load_file('<STR_LIT>', string_types)<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>output.debug("<STR_LIT>")<EOL>self.ssh_key_data = None<EOL><DEDENT>self.idle_timeout = self.settings.get('<STR_LIT>', None)<EOL>self.job_timeout = self.settings.get('<STR_LIT>', None)<EOL>self.pexpect_timeout = self.settings.get('<STR_LIT>', <NUM_LIT:5>)<EOL>self.process_isolation = self.settings.get('<STR_LIT>', self.process_isolation)<EOL>self.process_isolation_executable = self.settings.get('<STR_LIT>', self.process_isolation_executable)<EOL>self.process_isolation_path = self.settings.get('<STR_LIT>', self.process_isolation_path)<EOL>self.process_isolation_hide_paths = self.settings.get('<STR_LIT>', self.process_isolation_hide_paths)<EOL>self.process_isolation_show_paths = self.settings.get('<STR_LIT>', self.process_isolation_show_paths)<EOL>self.process_isolation_ro_paths = self.settings.get('<STR_LIT>', self.process_isolation_ro_paths)<EOL>self.pexpect_use_poll = self.settings.get('<STR_LIT>', True)<EOL>self.suppress_ansible_output = self.settings.get('<STR_LIT>', self.quiet)<EOL>self.directory_isolation_cleanup = bool(self.settings.get('<STR_LIT>', True))<EOL>if '<STR_LIT>' in self.env or not os.path.exists(self.project_dir):<EOL><INDENT>self.cwd = self.private_data_dir<EOL><DEDENT>else:<EOL><INDENT>if self.directory_isolation_path is not None:<EOL><INDENT>self.cwd = self.directory_isolation_path<EOL><DEDENT>else:<EOL><INDENT>self.cwd = self.project_dir<EOL><DEDENT><DEDENT>if '<STR_LIT>' in self.settings:<EOL><INDENT>if '<STR_LIT>' in self.settings:<EOL><INDENT>if self.settings['<STR_LIT>'] == '<STR_LIT>':<EOL><INDENT>self.fact_cache = os.path.join(self.artifact_dir, self.settings['<STR_LIT>'])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.fact_cache = os.path.join(self.artifact_dir, self.settings['<STR_LIT>'])<EOL><DEDENT><DEDENT> | Manages reading environment metadata files under ``private_data_dir`` and merging/updating
with existing values so the :py:class:`ansible_runner.runner.Runner` object can read and use them easily | f859:c1:m3 |
def prepare_command(self): | try:<EOL><INDENT>cmdline_args = self.loader.load_file('<STR_LIT:args>', string_types)<EOL>self.command = shlex.split(cmdline_args.decode('<STR_LIT:utf-8>'))<EOL>self.execution_mode = ExecutionMode.RAW<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>self.command = self.generate_ansible_command()<EOL><DEDENT> | Determines if the literal ``ansible`` or ``ansible-playbook`` commands are given
and if not calls :py:meth:`ansible_runner.runner_config.RunnerConfig.generate_ansible_command` | f859:c1:m4 |
def generate_ansible_command(self): | if self.binary is not None:<EOL><INDENT>base_command = self.binary<EOL>self.execution_mode = ExecutionMode.RAW<EOL><DEDENT>elif self.module is not None:<EOL><INDENT>base_command = '<STR_LIT>'<EOL>self.execution_mode = ExecutionMode.ANSIBLE<EOL><DEDENT>else:<EOL><INDENT>base_command = '<STR_LIT>'<EOL>self.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK<EOL><DEDENT>exec_list = [base_command]<EOL>try:<EOL><INDENT>cmdline_args = self.loader.load_file('<STR_LIT>', string_types, encoding=None)<EOL>args = shlex.split(cmdline_args)<EOL>exec_list.extend(args)<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>pass<EOL><DEDENT>if isinstance(self.inventory, list):<EOL><INDENT>for i in self.inventory:<EOL><INDENT>exec_list.append("<STR_LIT>")<EOL>exec_list.append(i)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>exec_list.append("<STR_LIT>")<EOL>exec_list.append(self.inventory)<EOL><DEDENT>if self.limit is not None:<EOL><INDENT>exec_list.append("<STR_LIT>")<EOL>exec_list.append(self.limit)<EOL><DEDENT>if self.loader.isfile('<STR_LIT>'):<EOL><INDENT>exec_list.extend(['<STR_LIT>', '<STR_LIT>'.format(self.loader.abspath('<STR_LIT>'))])<EOL><DEDENT>if isinstance(self.extra_vars, dict) and self.extra_vars:<EOL><INDENT>exec_list.extend(<EOL>[<EOL>'<STR_LIT>',<EOL>'<STR_LIT:%s>' % '<STR_LIT:U+0020>'.join(<EOL>["<STR_LIT>".format(k, self.extra_vars[k]) for k in self.extra_vars]<EOL>)<EOL>]<EOL>)<EOL><DEDENT>if self.verbosity:<EOL><INDENT>v = '<STR_LIT:v>' * self.verbosity<EOL>exec_list.append('<STR_LIT>'.format(v))<EOL><DEDENT>if self.tags:<EOL><INDENT>exec_list.extend(['<STR_LIT>', '<STR_LIT:{}>'.format(self.tags)])<EOL><DEDENT>if self.skip_tags:<EOL><INDENT>exec_list.extend(['<STR_LIT>', '<STR_LIT:{}>'.format(self.skip_tags)])<EOL><DEDENT>if self.forks:<EOL><INDENT>exec_list.extend(['<STR_LIT>', '<STR_LIT:{}>'.format(self.forks)])<EOL><DEDENT>if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK:<EOL><INDENT>exec_list.append(self.playbook)<EOL><DEDENT>elif self.execution_mode == ExecutionMode.ANSIBLE:<EOL><INDENT>exec_list.append("<STR_LIT>")<EOL>exec_list.append(self.module)<EOL>if self.module_args is not None:<EOL><INDENT>exec_list.append("<STR_LIT>")<EOL>exec_list.append(self.module_args)<EOL><DEDENT>if self.host_pattern is not None:<EOL><INDENT>exec_list.append(self.host_pattern)<EOL><DEDENT><DEDENT>return exec_list<EOL> | Given that the ``RunnerConfig`` preparation methods have been run to gather the inputs this method
will generate the ``ansible`` or ``ansible-playbook`` command that will be used by the
:py:class:`ansible_runner.runner.Runner` object to start the process | f859:c1:m5 |
def build_process_isolation_temp_dir(self): | path = tempfile.mkdtemp(prefix='<STR_LIT>', dir=self.process_isolation_path)<EOL>os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)<EOL>atexit.register(shutil.rmtree, path)<EOL>return path<EOL> | Create a temporary directory for process isolation to use. | f859:c1:m6 |
def wrap_args_with_process_isolation(self, args): | cwd = os.path.realpath(self.cwd)<EOL>pi_temp_dir = self.build_process_isolation_temp_dir()<EOL>new_args = [self.process_isolation_executable or '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT:/>', '<STR_LIT:/>', '<STR_LIT>', '<STR_LIT>']<EOL>for path in sorted(set(self.process_isolation_hide_paths or [])):<EOL><INDENT>if not os.path.exists(path):<EOL><INDENT>logger.debug('<STR_LIT>'.format(path))<EOL>continue<EOL><DEDENT>path = os.path.realpath(path)<EOL>if os.path.isdir(path):<EOL><INDENT>new_path = tempfile.mkdtemp(dir=pi_temp_dir)<EOL>os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)<EOL><DEDENT>else:<EOL><INDENT>handle, new_path = tempfile.mkstemp(dir=pi_temp_dir)<EOL>os.close(handle)<EOL>os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR)<EOL><DEDENT>new_args.extend(['<STR_LIT>', '<STR_LIT>'.format(new_path), '<STR_LIT>'.format(path)])<EOL><DEDENT>if self.private_data_dir:<EOL><INDENT>show_paths = [self.private_data_dir]<EOL><DEDENT>else:<EOL><INDENT>show_paths = [cwd]<EOL><DEDENT>for path in sorted(set(self.process_isolation_ro_paths or [])):<EOL><INDENT>if not os.path.exists(path):<EOL><INDENT>logger.debug('<STR_LIT>'.format(path))<EOL>continue<EOL><DEDENT>path = os.path.realpath(path)<EOL>new_args.extend(['<STR_LIT>', '<STR_LIT>'.format(path), '<STR_LIT>'.format(path)])<EOL><DEDENT>show_paths.extend(self.process_isolation_show_paths or [])<EOL>for path in sorted(set(show_paths)):<EOL><INDENT>if not os.path.exists(path):<EOL><INDENT>logger.debug('<STR_LIT>'.format(path))<EOL>continue<EOL><DEDENT>path = os.path.realpath(path)<EOL>new_args.extend(['<STR_LIT>', '<STR_LIT>'.format(path), '<STR_LIT>'.format(path)])<EOL><DEDENT>if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK:<EOL><INDENT>if self.directory_isolation_path is not None:<EOL><INDENT>new_args.extend(['<STR_LIT>', os.path.realpath(self.directory_isolation_path)])<EOL><DEDENT>else:<EOL><INDENT>new_args.extend(['<STR_LIT>', self.project_dir])<EOL><DEDENT><DEDENT>elif self.execution_mode == ExecutionMode.ANSIBLE:<EOL><INDENT>new_args.extend(['<STR_LIT>', os.path.realpath(self.private_data_dir)])<EOL><DEDENT>new_args.extend(args)<EOL>return new_args<EOL> | Wrap existing command line with bwrap to restrict access to:
- self.process_isolation_path (generally, /tmp) (except for own /tmp files) | f859:c1:m7 |
def wrap_args_with_ssh_agent(self, args, ssh_key_path, ssh_auth_sock=None, silence_ssh_add=False): | if ssh_key_path:<EOL><INDENT>ssh_add_command = args2cmdline('<STR_LIT>', ssh_key_path)<EOL>if silence_ssh_add:<EOL><INDENT>ssh_add_command = '<STR_LIT:U+0020>'.join([ssh_add_command, '<STR_LIT>'])<EOL><DEDENT>cmd = '<STR_LIT>'.join([ssh_add_command,<EOL>args2cmdline('<STR_LIT>', '<STR_LIT>', ssh_key_path),<EOL>args2cmdline(*args)])<EOL>args = ['<STR_LIT>']<EOL>if ssh_auth_sock:<EOL><INDENT>args.extend(['<STR_LIT>', ssh_auth_sock])<EOL><DEDENT>args.extend(['<STR_LIT>', '<STR_LIT:-c>', cmd])<EOL><DEDENT>return args<EOL> | Given an existing command line and parameterization this will return the same command line wrapped with the
necessary calls to ``ssh-agent`` | f859:c1:m8 |
def _load_json(self, contents): | try:<EOL><INDENT>return json.loads(contents)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT> | Attempts to deserialize the contents of a JSON object
Args:
contents (string): The contents to deserialize
Returns:
dict: If the contents are JSON serialized
None: If the contents are not JSON serialized | f860:c0:m1 |
def _load_yaml(self, contents): | try:<EOL><INDENT>return safe_load(contents)<EOL><DEDENT>except YAMLError:<EOL><INDENT>pass<EOL><DEDENT> | Attempts to deserialize the contents of a YAML object
Args:
contents (string): The contents to deserialize
Returns:
dict: If the contents are YAML serialized
None: If the contents are not YAML serialized | f860:c0:m2 |
def get_contents(self, path): | try:<EOL><INDENT>if not os.path.exists(path):<EOL><INDENT>raise ConfigurationError('<STR_LIT>' % path)<EOL><DEDENT>with open(path) as f:<EOL><INDENT>data = f.read()<EOL><DEDENT>return data<EOL><DEDENT>except (IOError, OSError) as exc:<EOL><INDENT>raise ConfigurationError('<STR_LIT>' % exc)<EOL><DEDENT> | Loads the contents of the file specified by path
Args:
path (string): The relative or absolute path to the file to
be loaded. If the path is relative, then it is combined
with the base_path to generate a full path string
Returns:
string: The contents of the file as a string
Raises:
ConfigurationError: If the file cannot be loaded | f860:c0:m3 |
def abspath(self, path): | if not path.startswith(os.path.sep) or path.startswith('<STR_LIT>'):<EOL><INDENT>path = os.path.expanduser(os.path.join(self.base_path, path))<EOL><DEDENT>return path<EOL> | Transform the path to an absolute path
Args:
path (string): The path to transform to an absolute path
Returns:
string: The absolute path to the file | f860:c0:m4 |
def isfile(self, path): | return os.path.isfile(self.abspath(path))<EOL> | Check if the path is a file
:params path: The path to the file to check. If the path is relative
it will be exanded to an absolute path
:returns: boolean | f860:c0:m5 |
def load_file(self, path, objtype=None, encoding='<STR_LIT:utf-8>'): | path = self.abspath(path)<EOL>debug('<STR_LIT>' % path)<EOL>if path in self._cache:<EOL><INDENT>return self._cache[path]<EOL><DEDENT>try:<EOL><INDENT>debug('<STR_LIT>' % path)<EOL>contents = parsed_data = self.get_contents(path)<EOL>if encoding:<EOL><INDENT>parsed_data = contents.encode(encoding)<EOL><DEDENT><DEDENT>except ConfigurationError as exc:<EOL><INDENT>debug(exc)<EOL>raise<EOL><DEDENT>except UnicodeEncodeError:<EOL><INDENT>raise ConfigurationError('<STR_LIT>')<EOL><DEDENT>if objtype is not string_types:<EOL><INDENT>for deserializer in (self._load_json, self._load_yaml):<EOL><INDENT>parsed_data = deserializer(contents)<EOL>if parsed_data:<EOL><INDENT>break<EOL><DEDENT><DEDENT>if objtype and not isinstance(parsed_data, objtype):<EOL><INDENT>debug('<STR_LIT>' % (path, objtype))<EOL>raise ConfigurationError('<STR_LIT>')<EOL><DEDENT><DEDENT>self._cache[path] = parsed_data<EOL>return parsed_data<EOL> | Load the file specified by path
This method will first try to load the file contents from cache and
if there is a cache miss, it will load the contents from disk
Args:
path (string): The full or relative path to the file to be loaded
encoding (string): The file contents text encoding
objtype (object): The object type of the file contents. This
is used to type check the deserialized content against the
contents loaded from disk.
Ignore serializing if objtype is string_types
Returns:
object: The deserialized file contents which could be either a
string object or a dict object
Raises:
ConfigurationError: | f860:c0:m6 |
def isplaybook(obj): | return isinstance(obj, Iterable) and (not isinstance(obj, string_types) and not isinstance(obj, Mapping))<EOL> | Inspects the object and returns if it is a playbook
Args:
obj (object): The object to be inspected by this function
Returns:
boolean: True if the object is a list and False if it is not | f870:m0 |
def isinventory(obj): | return isinstance(obj, Mapping) or isinstance(obj, string_types)<EOL> | Inspects the object and returns if it is an inventory
Args:
obj (object): The object to be inspected by this function
Returns:
boolean: True if the object is an inventory dict and False if it is not | f870:m1 |
def check_isolation_executable_installed(isolation_executable): | cmd = [isolation_executable, '<STR_LIT>']<EOL>try:<EOL><INDENT>proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,<EOL>stderr=subprocess.PIPE)<EOL>proc.communicate()<EOL>return bool(proc.returncode == <NUM_LIT:0>)<EOL><DEDENT>except (OSError, ValueError) as e:<EOL><INDENT>if isinstance(e, ValueError) or getattr(e, '<STR_LIT>', <NUM_LIT:1>) != <NUM_LIT:2>: <EOL><INDENT>raise RuntimeError('<STR_LIT>')<EOL><DEDENT>return False<EOL><DEDENT> | Check that proot is installed. | f870:m2 |
def dump_artifact(obj, path, filename=None): | p_sha1 = None<EOL>if not os.path.exists(path):<EOL><INDENT>os.makedirs(path, mode=<NUM_LIT>)<EOL><DEDENT>else:<EOL><INDENT>p_sha1 = hashlib.sha1()<EOL>p_sha1.update(obj.encode(encoding='<STR_LIT>'))<EOL><DEDENT>if filename is None:<EOL><INDENT>fd, fn = tempfile.mkstemp(dir=path)<EOL><DEDENT>else:<EOL><INDENT>fn = os.path.join(path, filename)<EOL><DEDENT>if os.path.exists(fn):<EOL><INDENT>c_sha1 = hashlib.sha1()<EOL>with open(fn) as f:<EOL><INDENT>contents = f.read()<EOL><DEDENT>c_sha1.update(contents.encode(encoding='<STR_LIT>'))<EOL><DEDENT>if not os.path.exists(fn) or p_sha1.hexdigest() != c_sha1.hexdigest():<EOL><INDENT>lock_fp = os.path.join(path, '<STR_LIT>')<EOL>lock_fd = os.open(lock_fp, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR)<EOL>fcntl.lockf(lock_fd, fcntl.LOCK_EX)<EOL>try:<EOL><INDENT>with open(fn, '<STR_LIT:w>') as f:<EOL><INDENT>os.chmod(fn, stat.S_IRUSR)<EOL>f.write(str(obj))<EOL><DEDENT><DEDENT>finally:<EOL><INDENT>fcntl.lockf(lock_fd, fcntl.LOCK_UN)<EOL>os.close(lock_fd)<EOL>os.remove(lock_fp)<EOL><DEDENT><DEDENT>return fn<EOL> | Write the artifact to disk at the specified path
Args:
obj (string): The string object to be dumped to disk in the specified
path. The artifact filename will be automatically created
path (string): The full path to the artifacts data directory.
filename (string, optional): The name of file to write the artifact to.
If the filename is not provided, then one will be generated.
Returns:
string: The full path filename for the artifact that was generated | f870:m3 |
def dump_artifacts(kwargs): | private_data_dir = kwargs.get('<STR_LIT>')<EOL>if not private_data_dir:<EOL><INDENT>private_data_dir = tempfile.mkdtemp()<EOL>kwargs['<STR_LIT>'] = private_data_dir<EOL><DEDENT>if not os.path.exists(private_data_dir):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>if '<STR_LIT>' in kwargs:<EOL><INDENT>role = {'<STR_LIT:name>': kwargs.pop('<STR_LIT>')}<EOL>if '<STR_LIT>' in kwargs:<EOL><INDENT>role['<STR_LIT>'] = kwargs.pop('<STR_LIT>')<EOL><DEDENT>play = [{'<STR_LIT>': kwargs.pop('<STR_LIT>', '<STR_LIT:all>'), '<STR_LIT>': [role]}]<EOL>if kwargs.pop('<STR_LIT>', False):<EOL><INDENT>play[<NUM_LIT:0>]['<STR_LIT>'] = False<EOL><DEDENT>kwargs['<STR_LIT>'] = play<EOL>if '<STR_LIT>' not in kwargs:<EOL><INDENT>kwargs['<STR_LIT>'] = {}<EOL><DEDENT>roles_path = kwargs.pop('<STR_LIT>', None)<EOL>if not roles_path:<EOL><INDENT>roles_path = os.path.join(private_data_dir, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>roles_path += '<STR_LIT>'.format(os.path.join(private_data_dir, '<STR_LIT>'))<EOL><DEDENT>kwargs['<STR_LIT>']['<STR_LIT>'] = roles_path<EOL><DEDENT>obj = kwargs.get('<STR_LIT>')<EOL>if obj and isplaybook(obj):<EOL><INDENT>path = os.path.join(private_data_dir, '<STR_LIT>')<EOL>kwargs['<STR_LIT>'] = dump_artifact(json.dumps(obj), path, '<STR_LIT>')<EOL><DEDENT>obj = kwargs.get('<STR_LIT>')<EOL>if obj and isinventory(obj):<EOL><INDENT>path = os.path.join(private_data_dir, '<STR_LIT>')<EOL>if isinstance(obj, Mapping):<EOL><INDENT>kwargs['<STR_LIT>'] = dump_artifact(json.dumps(obj), path, '<STR_LIT>')<EOL><DEDENT>elif isinstance(obj, string_types):<EOL><INDENT>if not os.path.exists(obj):<EOL><INDENT>kwargs['<STR_LIT>'] = dump_artifact(obj, path, '<STR_LIT>')<EOL><DEDENT><DEDENT><DEDENT>for key in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>obj = kwargs.get(key)<EOL>if obj and not os.path.exists(os.path.join(private_data_dir, '<STR_LIT>', key)):<EOL><INDENT>path = os.path.join(private_data_dir, '<STR_LIT>')<EOL>dump_artifact(json.dumps(obj), path, key)<EOL>kwargs.pop(key)<EOL><DEDENT><DEDENT>for key in ('<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>obj = kwargs.get(key)<EOL>if obj and not os.path.exists(os.path.join(private_data_dir, '<STR_LIT>', key)):<EOL><INDENT>path = os.path.join(private_data_dir, '<STR_LIT>')<EOL>dump_artifact(str(kwargs[key]), path, key)<EOL>kwargs.pop(key)<EOL><DEDENT><DEDENT> | Introspect the kwargs and dump objects to disk | f870:m5 |
def open_fifo_write(path, data): | os.mkfifo(path, stat.S_IRUSR | stat.S_IWUSR)<EOL>threading.Thread(target=lambda p, d: open(p, '<STR_LIT:wb>').write(d),<EOL>args=(path, data)).start()<EOL> | open_fifo_write opens the fifo named pipe in a new thread.
This blocks the thread until an external process (such as ssh-agent)
reads data from the pipe. | f870:m6 |
def ensure_str(s, encoding='<STR_LIT:utf-8>', errors='<STR_LIT:strict>'): | if not isinstance(s, (text_type, binary_type)):<EOL><INDENT>raise TypeError("<STR_LIT>" % type(s))<EOL><DEDENT>if PY2 and isinstance(s, text_type):<EOL><INDENT>s = s.encode(encoding, errors)<EOL><DEDENT>elif PY3 and isinstance(s, binary_type):<EOL><INDENT>s = s.decode(encoding, errors)<EOL><DEDENT>return s<EOL> | Copied from six==1.12
Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str` | f870:m8 |
def init_runner(**kwargs): | dump_artifacts(kwargs)<EOL>debug = kwargs.pop('<STR_LIT>', None)<EOL>logfile = kwargs.pop('<STR_LIT>', None)<EOL>if not kwargs.pop("<STR_LIT>", True):<EOL><INDENT>output.configure()<EOL>if debug in (True, False):<EOL><INDENT>output.set_debug('<STR_LIT>' if debug is True else '<STR_LIT>')<EOL><DEDENT>if logfile:<EOL><INDENT>output.set_logfile(logfile)<EOL><DEDENT><DEDENT>if kwargs.get("<STR_LIT>", False):<EOL><INDENT>check_isolation_executable_installed(kwargs.get("<STR_LIT>", "<STR_LIT>"))<EOL><DEDENT>event_callback_handler = kwargs.pop('<STR_LIT>', None)<EOL>status_callback_handler = kwargs.pop('<STR_LIT>', None)<EOL>cancel_callback = kwargs.pop('<STR_LIT>', None)<EOL>finished_callback = kwargs.pop('<STR_LIT>', None)<EOL>rc = RunnerConfig(**kwargs)<EOL>rc.prepare()<EOL>return Runner(rc,<EOL>event_handler=event_callback_handler,<EOL>status_handler=status_callback_handler,<EOL>cancel_callback=cancel_callback,<EOL>finished_callback=finished_callback)<EOL> | Initialize the Runner() instance
This function will properly initialize both run() and run_async()
functions in the same way and return a value instance of Runner.
See parameters given to :py:func:`ansible_runner.interface.run` | f871:m0 |
def run(**kwargs): | r = init_runner(**kwargs)<EOL>r.run()<EOL>return r<EOL> | Run an Ansible Runner task in the foreground and return a Runner object when complete.
:param private_data_dir: The directory containing all runner metadata needed to invoke the runner
module. Output artifacts will also be stored here for later consumption.
:param ident: The run identifier for this invocation of Runner. Will be used to create and name
the artifact directory holding the results of the invocation.
:param json_mode: Store event data in place of stdout on the console and in the stdout file
:param playbook: The playbook (either supplied here as a list or string... or as a path relative to
``private_data_dir/project``) that will be invoked by runner when executing Ansible.
:param module: The module that will be invoked in ad-hoc mode by runner when executing Ansible.
:param module_args: The module arguments that will be supplied to ad-hoc mode.
:param host_pattern: The host pattern to match when running in ad-hoc mode.
:param inventory: Overridees the inventory directory/file (supplied at ``private_data_dir/inventory``) with
a specific host or list of hosts. This can take the form of
- Path to the inventory file in the ``private_data_dir``
- Native python dict supporting the YAML/json inventory structure
- A text INI formatted string
- A list of inventory sources, or an empty list to disable passing inventory
:param roles_path: Directory or list of directories to assign to ANSIBLE_ROLES_PATH
:param envvars: Environment variables to be used when running Ansible. Environment variables will also be
read from ``env/envvars`` in ``private_data_dir``
:param extravars: Extra variables to be passed to Ansible at runtime using ``-e``. Extra vars will also be
read from ``env/extravars`` in ``private_data_dir``.
:param passwords: A dictionary containing password prompt patterns and response values used when processing output from
Ansible. Passwords will also be read from ``env/passwords`` in ``private_data_dir``.
:param settings: A dictionary containing settings values for the ``ansible-runner`` runtime environment. These will also
be read from ``env/settings`` in ``private_data_dir``.
:param ssh_key: The ssh private key passed to ``ssh-agent`` as part of the ansible-playbook run.
:param cmdline: Commnad line options passed to Ansible read from ``env/cmdline`` in ``private_data_dir``
:param limit: Matches ansible's ``--limit`` parameter to further constrain the inventory to be used
:param forks: Control Ansible parallel concurrency
:param verbosity: Control how verbose the output of ansible-playbook is
:param quiet: Disable all output
:param artifact_dir: The path to the directory where artifacts should live, this defaults to 'artifacts' under the private data dir
:param project_dir: The path to the playbook content, this defaults to 'project' within the private data dir
:param rotate_artifacts: Keep at most n artifact directories, disable with a value of 0 which is the default
:param event_handler: An optional callback that will be invoked any time an event is received by Runner itself
:param cancel_callback: An optional callback that can inform runner to cancel (returning True) or not (returning False)
:param finished_callback: An optional callback that will be invoked at shutdown after process cleanup.
:param status_handler: An optional callback that will be invoked any time the status changes (e.g...started, running, failed, successful, timeout)
:param process_isolation: Enable limiting what directories on the filesystem the playbook run has access to.
:param process_isolation_executable: Path to the executable that will be used to provide filesystem isolation (default: bwrap)
:param process_isolation_path: Path that an isolated playbook run will use for staging. (default: /tmp)
:param process_isolation_hide_paths: A path or list of paths on the system that should be hidden from the playbook run.
:param process_isolation_show_paths: A path or list of paths on the system that should be exposed to the playbook run.
:param process_isolation_ro_paths: A path or list of paths on the system that should be exposed to the playbook run as read-only.
:param directory_isolation_base_path: An optional path will be used as the base path to create a temp directory, the project contents will be
copied to this location which will then be used as the working directory during playbook execution.
:param fact_cache: A string that will be used as the name for the subdirectory of the fact cache in artifacts directory.
This is only used for 'jsonfile' type fact caches.
:param fact_cache_type: A string of the type of fact cache to use. Defaults to 'jsonfile'.
:type private_data_dir: str
:type ident: str
:type json_mode: bool
:type playbook: str or filename or list
:type inventory: str or dict or list
:type envvars: dict
:type extravars: dict
:type passwords: dict
:type settings: dict
:type ssh_key: str
:type artifact_dir: str
:type project_dir: str
:type rotate_artifacts: int
:type cmdline: str
:type limit: str
:type forks: int
:type quiet: bool
:type verbosity: int
:type event_handler: function
:type cancel_callback: function
:type finished_callback: function
:type status_handler: function
:type process_isolation: bool
:type process_isolation_executable: str
:type process_isolation_path: str
:type process_isolation_hide_paths: str or list
:type process_isolation_show_paths: str or list
:type process_isolation_ro_paths: str or list
:type directory_isolation_base_path: str
:type fact_cache: str
:type fact_cache_type: str
:returns: A :py:class:`ansible_runner.runner.Runner` object | f871:m1 |