signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
---|---|---|---|
def send_packet(self, dataOut): | try:<EOL><INDENT>if (pyusb1 is False):<EOL><INDENT>self.handle.bulkWrite(<NUM_LIT:1>, dataOut, <NUM_LIT:20>)<EOL><DEDENT>else:<EOL><INDENT>self.handle.write(endpoint=<NUM_LIT:1>, data=dataOut, timeout=<NUM_LIT:20>)<EOL><DEDENT><DEDENT>except usb.USBError:<EOL><INDENT>pass<EOL><DEDENT> | Send a packet and receive the ack from the radio dongle
The ack contains information about the packet transmition
and a data payload if the ack packet contained any | f1095:c0:m5 |
def _find_devices(serial=None): | ret = []<EOL>if pyusb1:<EOL><INDENT>for d in usb.core.find(idVendor=<NUM_LIT>, idProduct=<NUM_LIT>, find_all=<NUM_LIT:1>,<EOL>backend=pyusb_backend):<EOL><INDENT>if serial is not None and serial == d.serial_number:<EOL><INDENT>return d<EOL><DEDENT>ret.append(d)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>busses = usb.busses()<EOL>for bus in busses:<EOL><INDENT>for device in bus.devices:<EOL><INDENT>if device.idVendor == CRADIO_VID:<EOL><INDENT>if device.idProduct == CRADIO_PID:<EOL><INDENT>if serial == device.serial_number:<EOL><INDENT>return device<EOL><DEDENT>ret += [device, ]<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>return ret<EOL> | Returns a list of CrazyRadio devices currently connected to the computer | f1096:m0 |
def __init__(self, device=None, devid=<NUM_LIT:0>, serial=None): | self.current_channel = None<EOL>self.current_address = None<EOL>self.current_datarate = None<EOL>if device is None:<EOL><INDENT>try:<EOL><INDENT>if serial is None:<EOL><INDENT>device = _find_devices()[devid]<EOL><DEDENT>else:<EOL><INDENT>device = _find_devices(serial)<EOL><DEDENT><DEDENT>except Exception:<EOL><INDENT>if serial is None:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>raise Exception('<STR_LIT>'.format(serial))<EOL><DEDENT><DEDENT><DEDENT>self.dev = device<EOL>if (pyusb1 is True):<EOL><INDENT>self.dev.set_configuration(<NUM_LIT:1>)<EOL>self.handle = self.dev<EOL>self.version = float('<STR_LIT>'.format(<EOL>self.dev.bcdDevice >> <NUM_LIT:8>, self.dev.bcdDevice & <NUM_LIT>))<EOL><DEDENT>else:<EOL><INDENT>self.handle = self.dev.open()<EOL>self.handle.setConfiguration(<NUM_LIT:1>)<EOL>self.handle.claimInterface(<NUM_LIT:0>)<EOL>self.version = float(self.dev.deviceVersion)<EOL><DEDENT>if self.version < <NUM_LIT>:<EOL><INDENT>raise '<STR_LIT>'<EOL><DEDENT>if self.version < <NUM_LIT>:<EOL><INDENT>logger.warning('<STR_LIT>')<EOL><DEDENT>self.set_data_rate(self.DR_2MPS)<EOL>self.set_channel(<NUM_LIT:2>)<EOL>self.arc = -<NUM_LIT:1><EOL>if self.version >= <NUM_LIT>:<EOL><INDENT>self.set_cont_carrier(False)<EOL>self.set_address((<NUM_LIT>,) * <NUM_LIT:5>)<EOL>self.set_power(self.P_0DBM)<EOL>self.set_arc(<NUM_LIT:3>)<EOL>self.set_ard_bytes(<NUM_LIT:32>)<EOL>self.set_ack_enable(True)<EOL><DEDENT> | Create object and scan for USB dongle if no device is supplied | f1096:c1:m0 |
def set_channel(self, channel): | if channel != self.current_channel:<EOL><INDENT>_send_vendor_setup(self.handle, SET_RADIO_CHANNEL, channel, <NUM_LIT:0>, ())<EOL>self.current_channel = channel<EOL><DEDENT> | Set the radio channel to be used | f1096:c1:m2 |
def set_address(self, address): | if len(address) != <NUM_LIT:5>:<EOL><INDENT>raise Exception('<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT>if address != self.current_address:<EOL><INDENT>_send_vendor_setup(self.handle, SET_RADIO_ADDRESS, <NUM_LIT:0>, <NUM_LIT:0>, address)<EOL>self.current_address = address<EOL><DEDENT> | Set the radio address to be used | f1096:c1:m3 |
def set_data_rate(self, datarate): | if datarate != self.current_datarate:<EOL><INDENT>_send_vendor_setup(self.handle, SET_DATA_RATE, datarate, <NUM_LIT:0>, ())<EOL>self.current_datarate = datarate<EOL><DEDENT> | Set the radio datarate to be used | f1096:c1:m4 |
def set_power(self, power): | _send_vendor_setup(self.handle, SET_RADIO_POWER, power, <NUM_LIT:0>, ())<EOL> | Set the radio power to be used | f1096:c1:m5 |
def set_arc(self, arc): | _send_vendor_setup(self.handle, SET_RADIO_ARC, arc, <NUM_LIT:0>, ())<EOL>self.arc = arc<EOL> | Set the ACK retry count for radio communication | f1096:c1:m6 |
def set_ard_time(self, us): | <EOL>t = int((us / <NUM_LIT>) - <NUM_LIT:1>)<EOL>if (t < <NUM_LIT:0>):<EOL><INDENT>t = <NUM_LIT:0><EOL><DEDENT>if (t > <NUM_LIT>):<EOL><INDENT>t = <NUM_LIT><EOL><DEDENT>_send_vendor_setup(self.handle, SET_RADIO_ARD, t, <NUM_LIT:0>, ())<EOL> | Set the ACK retry delay for radio communication | f1096:c1:m7 |
def send_packet(self, dataOut): | ackIn = None<EOL>data = None<EOL>try:<EOL><INDENT>if (pyusb1 is False):<EOL><INDENT>self.handle.bulkWrite(<NUM_LIT:1>, dataOut, <NUM_LIT:1000>)<EOL>data = self.handle.bulkRead(<NUM_LIT>, <NUM_LIT:64>, <NUM_LIT:1000>)<EOL><DEDENT>else:<EOL><INDENT>self.handle.write(endpoint=<NUM_LIT:1>, data=dataOut, timeout=<NUM_LIT:1000>)<EOL>data = self.handle.read(<NUM_LIT>, <NUM_LIT:64>, timeout=<NUM_LIT:1000>)<EOL><DEDENT><DEDENT>except usb.USBError:<EOL><INDENT>pass<EOL><DEDENT>if data is not None:<EOL><INDENT>ackIn = _radio_ack()<EOL>if data[<NUM_LIT:0>] != <NUM_LIT:0>:<EOL><INDENT>ackIn.ack = (data[<NUM_LIT:0>] & <NUM_LIT>) != <NUM_LIT:0><EOL>ackIn.powerDet = (data[<NUM_LIT:0>] & <NUM_LIT>) != <NUM_LIT:0><EOL>ackIn.retry = data[<NUM_LIT:0>] >> <NUM_LIT:4><EOL>ackIn.data = data[<NUM_LIT:1>:]<EOL><DEDENT>else:<EOL><INDENT>ackIn.retry = self.arc<EOL><DEDENT><DEDENT>return ackIn<EOL> | Send a packet and receive the ack from the radio dongle
The ack contains information about the packet transmition
and a data payload if the ack packet contained any | f1096:c1:m14 |
def visit_grouping(self, grouping, asfrom=False, **kwargs): | return {<EOL>'<STR_LIT:type>': '<STR_LIT>',<EOL>'<STR_LIT>': grouping.element._compiler_dispatch(self, **kwargs)<EOL>}<EOL> | TODO: | f1103:c1:m3 |
def _label_select_column(self, select, column,<EOL>populate_result_map,<EOL>asfrom, column_clause_args,<EOL>name=None,<EOL>within_columns_clause=True): | if column.type._has_column_expression andpopulate_result_map:<EOL><INDENT>col_expr = column.type.column_expression(column)<EOL>add_to_result_map = lambda keyname, name, objects, type_:self._add_to_result_map(<EOL>keyname, name,<EOL>(column,) + objects, type_)<EOL><DEDENT>else:<EOL><INDENT>col_expr = column<EOL>if populate_result_map:<EOL><INDENT>add_to_result_map = self._add_to_result_map<EOL><DEDENT>else:<EOL><INDENT>add_to_result_map = None<EOL><DEDENT><DEDENT>if not within_columns_clause:<EOL><INDENT>result_expr = col_expr<EOL><DEDENT>elif isinstance(column, elements.Label):<EOL><INDENT>if col_expr is not column:<EOL><INDENT>result_expr = compiler._CompileLabel(<EOL>col_expr,<EOL>column.name,<EOL>alt_names=(column.element,)<EOL>)<EOL><DEDENT>else:<EOL><INDENT>result_expr = col_expr<EOL><DEDENT><DEDENT>elif select is not None and name:<EOL><INDENT>result_expr = compiler._CompileLabel(<EOL>col_expr,<EOL>name,<EOL>alt_names=(column._key_label,)<EOL>)<EOL><DEDENT>elifasfrom andisinstance(column,<EOL>elements.ColumnClause) andnot column.is_literal andcolumn.table is not None andnot isinstance(column.table, selectable.Select):<EOL><INDENT>result_expr = compiler._CompileLabel(col_expr,<EOL>elements._as_truncated(<EOL>column.name),<EOL>alt_names=(column.key,))<EOL><DEDENT>elif (<EOL>not isinstance(column, elements.TextClause) and<EOL>(<EOL>not isinstance(column,<EOL>elements.UnaryExpression) or<EOL>column.wraps_column_expression<EOL>) and<EOL>(<EOL>not hasattr(column, '<STR_LIT:name>') or<EOL>isinstance(column, functions.Function)<EOL>)<EOL>):<EOL><INDENT>result_expr = compiler._CompileLabel(col_expr, column.anon_label)<EOL><DEDENT>elif col_expr is not column:<EOL><INDENT>result_expr = compiler._CompileLabel(col_expr,<EOL>elements._as_truncated(<EOL>column.name),<EOL>alt_names=(column.key,))<EOL><DEDENT>else:<EOL><INDENT>result_expr = col_expr<EOL><DEDENT>column_clause_args.update(<EOL>within_columns_clause=within_columns_clause,<EOL>add_to_result_map=add_to_result_map<EOL>)<EOL>return result_expr._compiler_dispatch(<EOL>self,<EOL>**column_clause_args<EOL>)<EOL> | produce labeled columns present in a select(). | f1103:c1:m26 |
def date_handler(obj): | if hasattr(obj, '<STR_LIT>'):<EOL><INDENT>return obj.isoformat()<EOL><DEDENT>else:<EOL><INDENT>return str(obj)<EOL><DEDENT> | Implements a handler to serialize dates in JSON-strings
:param obj: An object
:return: The str method is called (which is the default serializer for JSON) unless the object has an attribute *isoformat* | f1104:m0 |
@classmethod<EOL><INDENT>def _init_compiled(cls, dialect, connection, dbapi_connection,<EOL>compiled, parameters):<DEDENT> | self = cls.__new__(cls)<EOL>self.root_connection = connection<EOL>self._dbapi_connection = dbapi_connection<EOL>self.dialect = connection.dialect<EOL>self.compiled = compiled<EOL>assert compiled.can_execute<EOL>self.execution_options = compiled.execution_options.union(<EOL>connection._execution_options)<EOL>self.result_column_struct = (<EOL>compiled._result_columns, compiled._ordered_columns,<EOL>compiled._textual_ordered_columns)<EOL>self.unicode_statement = util.text_type(compiled)<EOL>if not dialect.supports_unicode_statements:<EOL><INDENT>self.statement = self.unicode_statement.encode(<EOL>self.dialect.encoding)<EOL><DEDENT>else:<EOL><INDENT>self.statement = self.unicode_statement<EOL><DEDENT>self.isinsert = compiled.isinsert<EOL>self.isupdate = compiled.isupdate<EOL>self.isdelete = compiled.isdelete<EOL>self.is_text = compiled.isplaintext<EOL>if not parameters:<EOL><INDENT>self.compiled_parameters = [compiled.construct_params()]<EOL><DEDENT>else:<EOL><INDENT>self.compiled_parameters =[compiled.construct_params(m, _group_number=grp) for<EOL>grp, m in enumerate(parameters)]<EOL>self.executemany = len(parameters) > <NUM_LIT:1><EOL><DEDENT>self.cursor = self.create_cursor()<EOL>if self.isinsert or self.isupdate or self.isdelete:<EOL><INDENT>self.is_crud = True<EOL>self._is_explicit_returning = bool(compiled.statement._returning)<EOL>self._is_implicit_returning = bool(<EOL>compiled.returning and not compiled.statement._returning)<EOL><DEDENT>if self.compiled.insert_prefetch or self.compiled.update_prefetch:<EOL><INDENT>if self.executemany:<EOL><INDENT>self._process_executemany_defaults()<EOL><DEDENT>else:<EOL><INDENT>self._process_executesingle_defaults()<EOL><DEDENT><DEDENT>processors = compiled._bind_processors<EOL>parameters = []<EOL>if dialect.positional:<EOL><INDENT>for compiled_params in self.compiled_parameters:<EOL><INDENT>param = []<EOL>for key in self.compiled.positiontup:<EOL><INDENT>if key in processors:<EOL><INDENT>param.append(processors[key](compiled_params[key]))<EOL><DEDENT>else:<EOL><INDENT>param.append(compiled_params[key])<EOL><DEDENT><DEDENT>parameters.append(dialect.execute_sequence_format(param))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>encode = not dialect.supports_unicode_statements<EOL>for compiled_params in self.compiled_parameters:<EOL><INDENT>if encode:<EOL><INDENT>param = dict(<EOL>(<EOL>dialect._encoder(key)[<NUM_LIT:0>],<EOL>processors[key](compiled_params[key])<EOL>if key in processors<EOL>else compiled_params[key]<EOL>)<EOL>for key in compiled_params<EOL>)<EOL><DEDENT>else:<EOL><INDENT>param = dict(<EOL>(<EOL>key,<EOL>processors[key](compiled_params[key])<EOL>if key in processors<EOL>else compiled_params[key]<EOL>)<EOL>for key in compiled_params<EOL>)<EOL><DEDENT>parameters.append(param)<EOL><DEDENT><DEDENT>self.parameters = dialect.execute_sequence_format(parameters)<EOL>self.statement = compiled<EOL>return self<EOL> | Initialize execution context for a Compiled construct. | f1105:c0:m1 |
def use_setuptools(<EOL>version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,<EOL>download_delay=<NUM_LIT:15><EOL>): | was_imported = '<STR_LIT>' in sys.modules or '<STR_LIT>' in sys.modules<EOL>def do_download():<EOL><INDENT>egg = download_setuptools(version, download_base, to_dir, download_delay)<EOL>sys.path.insert(<NUM_LIT:0>, egg)<EOL>import setuptools; setuptools.bootstrap_install_from = egg<EOL><DEDENT>try:<EOL><INDENT>import pkg_resources<EOL><DEDENT>except ImportError:<EOL><INDENT>return do_download()<EOL><DEDENT>try:<EOL><INDENT>pkg_resources.require("<STR_LIT>"+version); return<EOL><DEDENT>except pkg_resources.VersionConflict as e:<EOL><INDENT>if was_imported:<EOL><INDENT>print((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>) % (version, e.args[<NUM_LIT:0>]), file=sys.stderr)<EOL>sys.exit(<NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>del pkg_resources, sys.modules['<STR_LIT>'] <EOL>return do_download()<EOL><DEDENT><DEDENT>except pkg_resources.DistributionNotFound:<EOL><INDENT>return do_download()<EOL><DEDENT> | Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script. | f1111:m1 |
def download_setuptools(<EOL>version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,<EOL>delay = <NUM_LIT:15><EOL>): | import urllib.request, urllib.error, urllib.parse, shutil<EOL>egg_name = "<STR_LIT>" % (version,sys.version[:<NUM_LIT:3>])<EOL>url = download_base + egg_name<EOL>saveto = os.path.join(to_dir, egg_name)<EOL>src = dst = None<EOL>if not os.path.exists(saveto): <EOL><INDENT>try:<EOL><INDENT>from distutils import log<EOL>if delay:<EOL><INDENT>log.warn("""<STR_LIT>""",<EOL>version, download_base, delay, url<EOL>); from time import sleep; sleep(delay)<EOL><DEDENT>log.warn("<STR_LIT>", url)<EOL>src = urllib.request.urlopen(url)<EOL>data = _validate_md5(egg_name, src.read())<EOL>dst = open(saveto,"<STR_LIT:wb>"); dst.write(data)<EOL><DEDENT>finally:<EOL><INDENT>if src: src.close()<EOL>if dst: dst.close()<EOL><DEDENT><DEDENT>return os.path.realpath(saveto)<EOL> | Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt. | f1111:m2 |
def main(argv, version=DEFAULT_VERSION): | try:<EOL><INDENT>import setuptools<EOL><DEDENT>except ImportError:<EOL><INDENT>egg = None<EOL>try:<EOL><INDENT>egg = download_setuptools(version, delay=<NUM_LIT:0>)<EOL>sys.path.insert(<NUM_LIT:0>,egg)<EOL>from setuptools.command.easy_install import main<EOL>return main(list(argv)+[egg]) <EOL><DEDENT>finally:<EOL><INDENT>if egg and os.path.exists(egg):<EOL><INDENT>os.unlink(egg)<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>if setuptools.__version__ == '<STR_LIT>':<EOL><INDENT>print((<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>), file=sys.stderr)<EOL>sys.exit(<NUM_LIT:2>)<EOL><DEDENT><DEDENT>req = "<STR_LIT>"+version<EOL>import pkg_resources<EOL>try:<EOL><INDENT>pkg_resources.require(req)<EOL><DEDENT>except pkg_resources.VersionConflict:<EOL><INDENT>try:<EOL><INDENT>from setuptools.command.easy_install import main<EOL><DEDENT>except ImportError:<EOL><INDENT>from easy_install import main<EOL><DEDENT>main(list(argv)+[download_setuptools(delay=<NUM_LIT:0>)])<EOL>sys.exit(<NUM_LIT:0>) <EOL><DEDENT>else:<EOL><INDENT>if argv:<EOL><INDENT>from setuptools.command.easy_install import main<EOL>main(argv)<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>",version,"<STR_LIT>")<EOL>print('<STR_LIT>')<EOL><DEDENT><DEDENT> | Install or upgrade setuptools and EasyInstall | f1111:m3 |
def update_md5(filenames): | import re<EOL>for name in filenames:<EOL><INDENT>base = os.path.basename(name)<EOL>f = open(name,'<STR_LIT:rb>')<EOL>md5_data[base] = md5(f.read()).hexdigest()<EOL>f.close()<EOL><DEDENT>data = ["<STR_LIT>" % it for it in list(md5_data.items())]<EOL>data.sort()<EOL>repl = "<STR_LIT>".join(data)<EOL>import inspect<EOL>srcfile = inspect.getsourcefile(sys.modules[__name__])<EOL>f = open(srcfile, '<STR_LIT:rb>'); src = f.read(); f.close()<EOL>match = re.search("<STR_LIT>", src)<EOL>if not match:<EOL><INDENT>print("<STR_LIT>", file=sys.stderr)<EOL>sys.exit(<NUM_LIT:2>)<EOL><DEDENT>src = src[:match.start(<NUM_LIT:1>)] + repl + src[match.end(<NUM_LIT:1>):]<EOL>f = open(srcfile,'<STR_LIT:w>')<EOL>f.write(src)<EOL>f.close()<EOL> | Update our built-in md5 registry | f1111:m4 |
def __init__(self, namespace=None): | self._namespace = namespace<EOL> | Intializes an instance of the ElementTreeFactory.
Most users should have no need to instantiate an ElementTreeFactory
instance directly. Most will wish to use the "tag" instance created in
the module, while those that wish to customize the output will subclass
ElementTreeFactory and create an instance of their subclass.
The optional namespace parameter can be used to specify the namespace
used to qualify all elements generated by an instance of the class.
Rather than specifying this explicitly when constructing the class it
is recommended that developers sub-class this class, and specify the
namespace as part of an overridden __init__ method. In other words,
make dialect specific sub-classes of this generic class (an
HTMLElementFactory class for instance). | f1112:c0:m0 |
def _find(self, root, tagname, id=None): | if id is None:<EOL><INDENT>result = root.find('<STR_LIT>' % tagname)<EOL>if result is None:<EOL><INDENT>raise LookupError('<STR_LIT>' % tagname)<EOL><DEDENT>else:<EOL><INDENT>return result<EOL><DEDENT><DEDENT>else:<EOL><INDENT>result = [<EOL>elem for elem in root.findall('<STR_LIT>' % tagname)<EOL>if elem.attrib.get('<STR_LIT:id>', '<STR_LIT>') == id<EOL>]<EOL>if len(result) == <NUM_LIT:0>:<EOL><INDENT>raise LookupError('<STR_LIT>' % (tagname, id))<EOL><DEDENT>elif len(result) > <NUM_LIT:1>:<EOL><INDENT>raise LookupError('<STR_LIT>' % (tagname, id))<EOL><DEDENT>else:<EOL><INDENT>return result[<NUM_LIT:0>]<EOL><DEDENT><DEDENT> | Returns the first element with the specified tagname and id | f1112:c0:m1 |
def _format(self, content): | if isinstance(content, basestring):<EOL><INDENT>return content<EOL><DEDENT>else:<EOL><INDENT>return unicode(content)<EOL><DEDENT> | Reformats content into a human-readable string | f1112:c0:m2 |
def _append(self, node, contents): | if isinstance(contents, basestring):<EOL><INDENT>if contents != '<STR_LIT>':<EOL><INDENT>if len(node) == <NUM_LIT:0>:<EOL><INDENT>if node.text is None:<EOL><INDENT>node.text = contents<EOL><DEDENT>else:<EOL><INDENT>node.text += contents<EOL><DEDENT><DEDENT>else:<EOL><INDENT>last = node[-<NUM_LIT:1>]<EOL>if last.tail is None:<EOL><INDENT>last.tail = contents<EOL><DEDENT>else:<EOL><INDENT>last.tail += contents<EOL><DEDENT><DEDENT><DEDENT><DEDENT>elif et.iselement(contents):<EOL><INDENT>contents.tail = '<STR_LIT>'<EOL>node.append(contents)<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>for content in contents:<EOL><INDENT>self._append(node, content)<EOL><DEDENT><DEDENT>except TypeError:<EOL><INDENT>self._append(node, self._format(contents))<EOL><DEDENT><DEDENT> | Adds content (string, node, node-list, etc.) to a node | f1112:c0:m3 |
def has_container(report, *matchers): | return HasContainer(report, *matchers)<EOL> | >>> from hamcrest import assert_that
>>> from allure_commons_test.report import has_test_case
>>> class Report(object):
... test_cases = [
... {
... 'fullName': 'test_case',
... 'uuid': 'test_case_uuid'
... },
... {
... 'fullName': 'test_case_without_container',
... 'uuid': 'test_case_without_container_uuid'
... }
... ]
... test_containers = [
... {
... 'children' : ['test_case_uuid'],
... 'befores': [ {'name': 'before_fixture'} ]
... }
... ]
>>> assert_that(Report,
... has_test_case('test_case',
... has_container(Report,
... has_before('before_fixture')
... )
... )
... )
>>> assert_that(Report,
... has_test_case('test_case_without_container',
... has_container(Report,
... has_before('before_fixture')
... )
... )
... ) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
AssertionError: ...
Expected: ...
but: ...
<BLANKLINE> | f1181:m0 |
def has_same_container(*args): | return HasSameContainer(*args)<EOL> | >>> from hamcrest import assert_that
>>> class Report(object):
... test_cases = [
... {
... 'fullName': 'first_test_case',
... 'uuid': 'first_test_case_uuid'
... },
... {
... 'fullName': 'second_test_case',
... 'uuid': 'second_test_case_uuid'
... },
... {
... 'fullName': 'third_test_case',
... 'uuid': 'third_test_case_uuid'
... }
... ]
... test_containers = [
... {
... 'children' : ['first_test_case_uuid', 'second_test_case_uuid'],
... },
... {
... 'children' : ['first_test_case_uuid', 'third_test_case_uuid'],
... }
... ]
>>> assert_that(Report,
... has_same_container('first_test_case', 'second_test_case')
... )
>>> assert_that(Report,
... has_same_container('second_test_case', 'third_test_case')
... ) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
AssertionError: ...
Expected: ...
but: ...
<BLANKLINE> | f1181:m1 |
@hookspec<EOL><INDENT>def decorate_as_label(self, label_type, labels):<DEDENT> | label | f1189:c0:m6 |
|
@hookspec<EOL><INDENT>def add_label(self, label_type, labels):<DEDENT> | label | f1189:c0:m7 |
|
@hookspec<EOL><INDENT>def decorate_as_link(self, url, link_type, name):<DEDENT> | url | f1189:c0:m8 |
|
@hookspec<EOL><INDENT>def add_link(self, url, link_type, name):<DEDENT> | url | f1189:c0:m9 |
|
@hookspec<EOL><INDENT>def start_step(self, uuid, title, params):<DEDENT> | step | f1189:c0:m10 |
|
@hookspec<EOL><INDENT>def stop_step(self, uuid, exc_type, exc_val, exc_tb):<DEDENT> | step | f1189:c0:m11 |
|
@hookspec<EOL><INDENT>def attach_data(self, body, name, attachment_type, extension):<DEDENT> | attach data | f1189:c0:m12 |
|
@hookspec<EOL><INDENT>def attach_file(self, source, name, attachment_type, extension):<DEDENT> | attach file | f1189:c0:m13 |
|
@hookspec<EOL><INDENT>def start_fixture(self, parent_uuid, uuid, name, parameters):<DEDENT> | start fixture | f1189:c1:m0 |
|
@hookspec<EOL><INDENT>def stop_fixture(self, parent_uuid, uuid, name, exc_type, exc_val, exc_tb):<DEDENT> | stop fixture | f1189:c1:m1 |
|
@hookspec<EOL><INDENT>def report_result(self, result):<DEDENT> | reporting | f1189:c1:m4 |
|
@hookspec<EOL><INDENT>def report_container(self, container):<DEDENT> | reporting | f1189:c1:m5 |
|
@hookspec<EOL><INDENT>def report_attached_file(self, source, file_name):<DEDENT> | reporting | f1189:c1:m6 |
|
@hookspec<EOL><INDENT>def report_attached_data(self, body, file_name):<DEDENT> | reporting | f1189:c1:m7 |
|
def format_exception_only(etype, value): | <EOL>if (isinstance(etype, BaseException) or<EOL>isinstance(etype, types.InstanceType) or<EOL>etype is None or type(etype) is str): <EOL><INDENT>return [_format_final_exc_line(etype, value)]<EOL><DEDENT>stype = etype.__name__<EOL>if not issubclass(etype, SyntaxError):<EOL><INDENT>return [_format_final_exc_line(stype, value)]<EOL><DEDENT>lines = []<EOL>try:<EOL><INDENT>msg, (filename, lineno, offset, badline) = value.args<EOL><DEDENT>except Exception:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>filename = filename or "<STR_LIT>"<EOL>lines.append('<STR_LIT>' % (filename, lineno))<EOL>if badline is not None:<EOL><INDENT>lines.append('<STR_LIT>' % badline.strip())<EOL>if offset is not None:<EOL><INDENT>caretspace = badline.rstrip('<STR_LIT:\n>')[:offset].lstrip()<EOL>caretspace = ((c.isspace() and c or '<STR_LIT:U+0020>') for c in caretspace)<EOL>lines.append('<STR_LIT>' % '<STR_LIT>'.join(caretspace))<EOL><DEDENT><DEDENT>value = msg<EOL><DEDENT>lines.append(_format_final_exc_line(stype, value))<EOL>return lines<EOL> | Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.last_type and sys.last_value. The return value is a list of
strings, each ending in a newline.
Normally, the list contains a single string; however, for
SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the list. | f1190:m0 |
def _format_final_exc_line(etype, value): | valuestr = _some_str(value)<EOL>if value is None or not valuestr:<EOL><INDENT>line = "<STR_LIT>" % etype<EOL><DEDENT>else:<EOL><INDENT>line = "<STR_LIT>" % (etype, valuestr)<EOL><DEDENT>return line<EOL> | Return a list of a single line -- normal case for format_exception_only | f1190:m1 |
def getargspec(func): | <EOL>if inspect.ismethod(func):<EOL><INDENT>func = func.__func__<EOL><DEDENT>parts = <NUM_LIT:0>, () <EOL>if type(func) is partial:<EOL><INDENT>keywords = func.keywords<EOL>if keywords is None:<EOL><INDENT>keywords = {}<EOL><DEDENT>parts = len(func.args), keywords.keys()<EOL>func = func.func<EOL><DEDENT>if not inspect.isfunction(func):<EOL><INDENT>raise TypeError('<STR_LIT>' % func)<EOL><DEDENT>args, varargs, varkw = inspect.getargs(func.__code__)<EOL>func_defaults = func.__defaults__<EOL>if func_defaults is None:<EOL><INDENT>func_defaults = []<EOL><DEDENT>else:<EOL><INDENT>func_defaults = list(func_defaults)<EOL><DEDENT>if parts[<NUM_LIT:0>]:<EOL><INDENT>args = args[parts[<NUM_LIT:0>]:]<EOL><DEDENT>if parts[<NUM_LIT:1>]:<EOL><INDENT>for arg in parts[<NUM_LIT:1>]:<EOL><INDENT>i = args.index(arg) - len(args) <EOL>del args[i]<EOL>try:<EOL><INDENT>del func_defaults[i]<EOL><DEDENT>except IndexError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>return inspect.ArgSpec(args, varargs, varkw, func_defaults)<EOL> | Used because getargspec for python 2.7 does not accept functools.partial
which is the type for pytest fixtures.
getargspec excerpted from:
sphinx.util.inspect
~~~~~~~~~~~~~~~~~~~
Helpers for inspecting Python modules.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
Like inspect.getargspec but supports functools.partial as well. | f1193:m0 |
def represent(item): | if six.PY2 and isinstance(item, str):<EOL><INDENT>try:<EOL><INDENT>item = item.decode(encoding='<STR_LIT>')<EOL><DEDENT>except UnicodeDecodeError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>if isinstance(item, six.text_type):<EOL><INDENT>return u'<STR_LIT>' % item<EOL><DEDENT>elif isinstance(item, (bytes, bytearray)):<EOL><INDENT>return repr(type(item))<EOL><DEDENT>else:<EOL><INDENT>return repr(item)<EOL><DEDENT> | >>> represent(None)
'None'
>>> represent(123)
'123'
>>> import six
>>> expected = u"'hi'" if six.PY2 else "'hi'"
>>> represent('hi') == expected
True
>>> expected = u"'привет'" if six.PY2 else "'привет'"
>>> represent(u'привет') == expected
True
>>> represent(bytearray([0xd0, 0xbf])) # doctest: +ELLIPSIS
"<... 'bytearray'>"
>>> from struct import pack
>>> result = "<type 'str'>" if six.PY2 else "<class 'bytes'>"
>>> represent(pack('h', 0x89)) == result
True
>>> result = "<type 'int'>" if six.PY2 else "<class 'int'>"
>>> represent(int) == result
True
>>> represent(represent) # doctest: +ELLIPSIS
'<function represent at ...>'
>>> represent([represent]) # doctest: +ELLIPSIS
'[<function represent at ...>]'
>>> class ClassWithName(object):
... pass
>>> represent(ClassWithName)
"<class 'utils.ClassWithName'>" | f1193:m8 |
def func_parameters(func, *args, **kwargs): | parameters = {}<EOL>arg_spec = getargspec(func) if six.PY2 else inspect.getfullargspec(func)<EOL>arg_order = list(arg_spec.args)<EOL>args_dict = dict(zip(arg_spec.args, args))<EOL>if arg_spec.defaults:<EOL><INDENT>kwargs_defaults_dict = dict(zip(arg_spec.args[len(args):], arg_spec.defaults))<EOL>parameters.update(kwargs_defaults_dict)<EOL><DEDENT>if arg_spec.varargs:<EOL><INDENT>arg_order.append(arg_spec.varargs)<EOL>varargs = args[len(arg_spec.args):]<EOL>parameters.update({arg_spec.varargs: varargs} if varargs else {})<EOL><DEDENT>if arg_spec.args and arg_spec.args[<NUM_LIT:0>] in ['<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>args_dict.pop(arg_spec.args[<NUM_LIT:0>], None)<EOL><DEDENT>if kwargs:<EOL><INDENT>if sys.version_info < (<NUM_LIT:3>, <NUM_LIT:6>):<EOL><INDENT>arg_order.extend(sorted(list(kwargs.keys())))<EOL><DEDENT>else:<EOL><INDENT>arg_order.extend(list(kwargs.keys()))<EOL><DEDENT>parameters.update(kwargs)<EOL><DEDENT>parameters.update(args_dict)<EOL>items = parameters.iteritems() if six.PY2 else parameters.items()<EOL>sorted_items = sorted(map(lambda kv: (kv[<NUM_LIT:0>], represent(kv[<NUM_LIT:1>])), items), key=lambda x: arg_order.index(x[<NUM_LIT:0>]))<EOL>return collections.OrderedDict(sorted_items)<EOL> | >>> def helper(func):
... def wrapper(*args, **kwargs):
... params = func_parameters(func, *args, **kwargs)
... print(list(params.items()))
... return func(*args, **kwargs)
... return wrapper
>>> @helper
... def args(a, b):
... pass
>>> args(1, 2)
[('a', '1'), ('b', '2')]
>>> args(*(1,2))
[('a', '1'), ('b', '2')]
>>> args(1, b=2)
[('a', '1'), ('b', '2')]
>>> @helper
... def kwargs(a=1, b=2):
... pass
>>> kwargs()
[('a', '1'), ('b', '2')]
>>> kwargs(a=3, b=4)
[('a', '3'), ('b', '4')]
>>> kwargs(b=4, a=3)
[('a', '3'), ('b', '4')]
>>> kwargs(a=3)
[('a', '3'), ('b', '2')]
>>> kwargs(b=4)
[('a', '1'), ('b', '4')]
>>> @helper
... def args_kwargs(a, b, c=3, d=4):
... pass
>>> args_kwargs(1, 2)
[('a', '1'), ('b', '2'), ('c', '3'), ('d', '4')]
>>> args_kwargs(1, 2, d=5)
[('a', '1'), ('b', '2'), ('c', '3'), ('d', '5')]
>>> args_kwargs(1, 2, 5, 6)
[('a', '1'), ('b', '2'), ('c', '5'), ('d', '6')]
>>> @helper
... def varargs(*a):
... pass
>>> varargs()
[]
>>> varargs(1, 2)
[('a', '(1, 2)')]
>>> @helper
... def keywords(**a):
... pass
>>> keywords()
[]
>>> keywords(a=1, b=2)
[('a', '1'), ('b', '2')]
>>> @helper
... def args_varargs(a, b, *c):
... pass
>>> args_varargs(1, 2)
[('a', '1'), ('b', '2')]
>>> args_varargs(1, 2, 2)
[('a', '1'), ('b', '2'), ('c', '(2,)')]
>>> @helper
... def args_kwargs_varargs(a, b, c=3, **d):
... pass
>>> args_kwargs_varargs(1, 2)
[('a', '1'), ('b', '2'), ('c', '3')]
>>> args_kwargs_varargs(1, 2, 4, d=5, e=6)
[('a', '1'), ('b', '2'), ('c', '4'), ('d', '5'), ('e', '6')]
>>> @helper
... def args_kwargs_varargs_keywords(a, b=2, *c, **d):
... pass
>>> args_kwargs_varargs_keywords(1)
[('a', '1'), ('b', '2')]
>>> args_kwargs_varargs_keywords(1, 2, 4, d=5, e=6)
[('a', '1'), ('b', '2'), ('c', '(4,)'), ('d', '5'), ('e', '6')]
>>> class Class(object):
... @staticmethod
... @helper
... def static_args(a, b):
... pass
...
... @classmethod
... @helper
... def method_args(cls, a, b):
... pass
...
... @helper
... def args(self, a, b):
... pass
>>> cls = Class()
>>> cls.args(1, 2)
[('a', '1'), ('b', '2')]
>>> cls.method_args(1, 2)
[('a', '1'), ('b', '2')]
>>> cls.static_args(1, 2)
[('a', '1'), ('b', '2')] | f1193:m9 |
def format_exception(etype, value): | return '<STR_LIT:\n>'.join(format_exception_only(etype, value)) if etype or value else None<EOL> | >>> import sys
>>> try:
... assert False, u'Привет'
... except AssertionError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
'AssertionError: ...\\n'
>>> try:
... assert False, 'Привет'
... except AssertionError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
'AssertionError: ...\\n'
>>> try:
... compile("bla u'Привет'", "fake.py", "exec")
... except SyntaxError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
' File "fake.py", line 1...SyntaxError: invalid syntax\\n'
>>> try:
... compile("bla 'Привет'", "fake.py", "exec")
... except SyntaxError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
' File "fake.py", line 1...SyntaxError: invalid syntax\\n'
>>> from hamcrest import assert_that, equal_to
>>> try:
... assert_that('left', equal_to('right'))
... except AssertionError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
"AssertionError: \\nExpected:...but:..."
>>> try:
... assert_that(u'left', equal_to(u'right'))
... except AssertionError:
... etype, e, _ = sys.exc_info()
... format_exception(etype, e) # doctest: +ELLIPSIS
"AssertionError: \\nExpected:...but:..." | f1193:m11 |
def get_allure_suites(longname): | labels = []<EOL>suites = longname.split('<STR_LIT:.>')<EOL>if len(suites) > <NUM_LIT:3>:<EOL><INDENT>labels.append(Label(LabelType.PARENT_SUITE, suites.pop(<NUM_LIT:0>)))<EOL><DEDENT>labels.append(Label(LabelType.SUITE, suites.pop(<NUM_LIT:0>)))<EOL>if len(suites) > <NUM_LIT:1>:<EOL><INDENT>labels.append(Label(LabelType.SUB_SUITE, '<STR_LIT:.>'.join(suites[:-<NUM_LIT:1>])))<EOL><DEDENT>return labels<EOL> | >>> get_allure_suites('Suite1.Test')
[Label(name='suite', value='Suite1')]
>>> get_allure_suites('Suite1.Suite2.Test') # doctest: +NORMALIZE_WHITESPACE
[Label(name='suite', value='Suite1'), Label(name='subSuite', value='Suite2')]
>>> get_allure_suites('Suite1.Suite2.Suite3.Test') # doctest: +NORMALIZE_WHITESPACE
[Label(name='parentSuite', value='Suite1'),
Label(name='suite', value='Suite2'),
Label(name='subSuite', value='Suite3')] | f1209:m2 |
def refresh(self): | response = requests.get('<STR_LIT>' % (API_BASE_URL, self.id))<EOL>attributes = response.json()<EOL>self.category = Category(attributes['<STR_LIT>'])<EOL>self.url = attributes['<STR_LIT:url>']<EOL>self.title = attributes['<STR_LIT:title>']<EOL>if attributes['<STR_LIT:image>']:<EOL><INDENT>self.image = Image(attributes['<STR_LIT:image>']['<STR_LIT:id>'])<EOL><DEDENT>else:<EOL><INDENT>self.image = None<EOL><DEDENT>self.locale = attributes['<STR_LIT>']<EOL>self.introduction = WikiText(attributes['<STR_LIT>'],<EOL>attributes['<STR_LIT>'])<EOL>self.conclusion = WikiText(attributes['<STR_LIT>'],<EOL>attributes['<STR_LIT>'])<EOL>self.subject = attributes['<STR_LIT>']<EOL>self.modifiedDate = datetime.utcfromtimestamp(attributes['<STR_LIT>'])<EOL>self.createdDate = datetime.utcfromtimestamp(attributes['<STR_LIT>'])<EOL>self.publishedDate = datetime.utcfromtimestamp(attributes['<STR_LIT>'])<EOL>author = attributes['<STR_LIT>']<EOL>self.steps = [Step(step['<STR_LIT>'], step['<STR_LIT>'], data=step) for step in attributes['<STR_LIT>']]<EOL>self.type = attributes['<STR_LIT:type>']<EOL>self.public = attributes['<STR_LIT>']<EOL>self.revision = attributes['<STR_LIT>']<EOL>self.difficulty = attributes['<STR_LIT>']<EOL>self.prerequisites = [Guide(guide['<STR_LIT>']) for guide in attributes['<STR_LIT>']]<EOL>self.flags = [Flag.from_id(flag['<STR_LIT>']) for flag in attributes['<STR_LIT>']]<EOL> | Refetch instance data from the API. | f1211:c0:m1 |
@staticmethod<EOL><INDENT>def all(guideids=None, filter=None, order=None):<DEDENT> | parameters = []<EOL>if guideids:<EOL><INDENT>parameters.append('<STR_LIT>' % '<STR_LIT:U+002C>'.join(map(str, guideids)))<EOL><DEDENT>if filter:<EOL><INDENT>parameters.append('<STR_LIT>' % filter)<EOL><DEDENT>if order:<EOL><INDENT>parameters.append('<STR_LIT>' % order)<EOL><DEDENT>parameters = '<STR_LIT:&>'.join(parameters)<EOL>offset = <NUM_LIT:0><EOL>limit = <NUM_LIT:5> <EOL>guideJSONs = []<EOL>while True:<EOL><INDENT>if not guideJSONs:<EOL><INDENT>url = '<STR_LIT>'% (API_BASE_URL, offset, limit, parameters)<EOL>response = requests.get(url)<EOL>guideJSONs = response.json()<EOL>if not guideJSONs:<EOL><INDENT>return<EOL><DEDENT>offset += limit<EOL><DEDENT>yield Guide(guideJSONs.pop(<NUM_LIT:0>)['<STR_LIT>'])<EOL><DEDENT> | Fetch all guides.
:param iterable guideids: Only return Guides corresponding to these ids.
:param string filter: Only return guides of this type. Choices:
installation, repair, disassembly, teardown,
technique, maintenance.
:param string order: Instead of ordering by guideid, order alphabetically.
Choices: ASC, DESC.
:rtype: generator of :class:`pyfixit.guide.Guide` objects. | f1211:c0:m2 |
def refresh(self): | response = requests.get('<STR_LIT>' % (API_BASE_URL, self.id))<EOL>attributes = response.json()<EOL>self.height = attributes['<STR_LIT>']<EOL>self.width = attributes['<STR_LIT:width>']<EOL>image = attributes['<STR_LIT:image>']<EOL>del(image['<STR_LIT:id>'])<EOL>for size in image:<EOL><INDENT>vars(self)[size] = image[size]<EOL><DEDENT> | Refetch instance data from the API. | f1212:c0:m4 |
@staticmethod<EOL><INDENT>def from_id(id):<DEDENT> | return globals()[id]<EOL> | Fetch a pre-created singleton flag.
:var string id: The flag's id, as seen on
http://www.ifixit.com/Info/Flags. | f1213:c0:m1 |
def refresh(self): | response = requests.get('<STR_LIT>' % (API_BASE_URL, self.name))<EOL>attributes = response.json()<EOL>self.ancestors = [Category(name) for name in attributes['<STR_LIT>']]<EOL>self.contents = WikiText(attributes['<STR_LIT>'],<EOL>attributes['<STR_LIT>'])<EOL>self.description = attributes['<STR_LIT:description>']<EOL>self.guides = []<EOL>for guide in attributes['<STR_LIT>']:<EOL><INDENT>self.guides.append(Guide(guide['<STR_LIT>']))<EOL><DEDENT>flags = dict(attributes['<STR_LIT>']).values()<EOL>self.flags = [Flag.from_id(flag['<STR_LIT>']) for flag in flags]<EOL>self.image = Image(attributes['<STR_LIT:image>']['<STR_LIT:id>']) if attributes['<STR_LIT:image>'] else None<EOL>self.locale = attributes['<STR_LIT>']<EOL>self.title = attributes['<STR_LIT>']<EOL> | Refetch instance data from the API. | f1215:c0:m3 |
def refresh(self): | <EOL>response = requests.get('<STR_LIT>' % (API_BASE_URL, self.guideid))<EOL>attributes = response.json()<EOL>for step in attributes['<STR_LIT>']:<EOL><INDENT>if step['<STR_LIT>'] == self.stepid:<EOL><INDENT>self._update(step)<EOL>return<EOL><DEDENT><DEDENT>raise Exception('<STR_LIT>'% (self.stepid, self.guideid))<EOL> | Refetch instance data from the API. | f1224:c0:m1 |
def _update(self, data): | self.orderby = data['<STR_LIT>']<EOL>self.revision = data['<STR_LIT>']<EOL>self.title = data['<STR_LIT:title>']<EOL>self.lines = [Line(self.guideid, self.stepid, line['<STR_LIT>'], data=line)<EOL>for line in data['<STR_LIT>']]<EOL>if data['<STR_LIT>']['<STR_LIT:type>'] == '<STR_LIT:image>':<EOL><INDENT>self.media = []<EOL>for image in data['<STR_LIT>']['<STR_LIT:data>']:<EOL><INDENT>self.media.append(Image(image['<STR_LIT:id>']))<EOL><DEDENT><DEDENT> | Update the step using the blob of json-parsed data directly from the
API. | f1224:c0:m2 |
def refresh(self): | <EOL>response = requests.get('<STR_LIT>' % (API_BASE_URL, self.guideid))<EOL>attributes = response.json()<EOL>for step in attributes['<STR_LIT>']:<EOL><INDENT>if step['<STR_LIT>'] == self.stepid:<EOL><INDENT>for line in step['<STR_LIT>']:<EOL><INDENT>if line['<STR_LIT>'] == self.lineid:<EOL><INDENT>self._update(line)<EOL>return<EOL><DEDENT><DEDENT><DEDENT><DEDENT>raise Exception('<STR_LIT>'% (self.lineid, self.stepid, self.guideid))<EOL> | Refetch instance data from the API. | f1225:c0:m1 |
def _update(self, data): | self.bullet = data['<STR_LIT>']<EOL>self.level = data['<STR_LIT>']<EOL>self.text = WikiText(data['<STR_LIT>'],<EOL>data['<STR_LIT>'])<EOL> | Update the line using the blob of json-parsed data directly from the
API. | f1225:c0:m2 |
def main(): | Log.info('<STR_LIT>')<EOL>app = Application()<EOL>app.run()<EOL>Log.info("<STR_LIT>")<EOL> | Run main logic.
It is called at first when install.py is called. | f1234:m0 |
def __init__(self): | self._load_options_from_env()<EOL> | Initialize this class. | f1234:c0:m0 |
def run(self): | try:<EOL><INDENT>self.linux.verify_system_status()<EOL><DEDENT>except InstallSkipError:<EOL><INDENT>Log.info('<STR_LIT>')<EOL>return<EOL><DEDENT>work_dir = tempfile.mkdtemp(suffix='<STR_LIT>')<EOL>Log.info("<STR_LIT>".format(work_dir))<EOL>with Cmd.pushd(work_dir):<EOL><INDENT>self.rpm_py.download_and_install()<EOL>if not self.python.is_python_binding_installed():<EOL><INDENT>message = (<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL>raise InstallError(message)<EOL><DEDENT><DEDENT>if self.is_work_dir_removed:<EOL><INDENT>shutil.rmtree(work_dir)<EOL>Log.info("<STR_LIT>".format(work_dir))<EOL><DEDENT>else:<EOL><INDENT>Log.info("<STR_LIT>".format(work_dir))<EOL><DEDENT> | Run install process. | f1234:c0:m1 |
def __init__(self, version, python, linux, **kwargs): | if not version:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>if not python:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>if not linux:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(version, str):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(python, Python):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(linux, Linux):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>is_installed_from_bin = kwargs.get('<STR_LIT>', False)<EOL>git_branch = kwargs.get('<STR_LIT>')<EOL>optimized = kwargs.get('<STR_LIT>', True)<EOL>verbose = kwargs.get('<STR_LIT>', False)<EOL>rpm_py_version = RpmPyVersion(version)<EOL>self.version = rpm_py_version<EOL>self.is_installed_from_bin = is_installed_from_bin<EOL>self.downloader = Downloader(rpm_py_version, git_branch=git_branch)<EOL>self.installer = linux.create_installer(rpm_py_version,<EOL>optimized=optimized,<EOL>verbose=verbose)<EOL> | Initialize this class. | f1234:c1:m0 |
def download_and_install(self): | if self.is_installed_from_bin:<EOL><INDENT>try:<EOL><INDENT>self.installer.install_from_rpm_py_package()<EOL>return<EOL><DEDENT>except RpmPyPackageNotFoundError as e:<EOL><INDENT>Log.warn('<STR_LIT>'.format(e))<EOL>pass<EOL><DEDENT><DEDENT>top_dir_name = self.downloader.download_and_expand()<EOL>rpm_py_dir = os.path.join(top_dir_name, '<STR_LIT>')<EOL>setup_py_in_found = False<EOL>with Cmd.pushd(rpm_py_dir):<EOL><INDENT>if self.installer.setup_py.exists_in_path():<EOL><INDENT>setup_py_in_found = True<EOL>self.installer.run()<EOL><DEDENT><DEDENT>if not setup_py_in_found:<EOL><INDENT>self.installer.install_from_rpm_py_package()<EOL><DEDENT> | Download and install RPM Python binding. | f1234:c1:m1 |
def __init__(self, version, **kwargs): | if not version:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(version, str):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>self.version = version<EOL> | Initialize this class. | f1234:c2:m0 |
def __str__(self): | return self.version<EOL> | Return the string expression of this class. | f1234:c2:m1 |
@property<EOL><INDENT>def info(self):<DEDENT> | version_str = self.version<EOL>return Utils.version_str2tuple(version_str)<EOL> | RPM Python binding's version info. | f1234:c2:m2 |
@property<EOL><INDENT>def is_release(self):<DEDENT> | <EOL>return True if re.match(r'<STR_LIT>', self.version) else False<EOL> | Release version or not. | f1234:c2:m3 |
@property<EOL><INDENT>def git_branch(self):<DEDENT> | info = self.info<EOL>return '<STR_LIT>'.format(<EOL>major=info[<NUM_LIT:0>], minor=info[<NUM_LIT:1>])<EOL> | Git branch name. | f1234:c2:m4 |
def __init__(self, version, **kwargs): | if not version:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(version, RpmPyVersion):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>self.version = version<EOL>self.replaced_word_dict = {<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': version.version,<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>}<EOL>optimized = kwargs.get('<STR_LIT>', True)<EOL>patches = []<EOL>if optimized:<EOL><INDENT>patches = self.PATCHES_DEFAULT<EOL>if version.info < (<NUM_LIT:4>, <NUM_LIT:12>):<EOL><INDENT>patches.extend(self.PATCHS_ADD_EXTRA_LINK_ARGS)<EOL><DEDENT><DEDENT>self.patches = patches<EOL> | Initialize this class. | f1234:c3:m0 |
def exists_in_path(self): | return os.path.isfile(self.IN_PATH)<EOL> | Return if setup.py.in exists.
If RPM version >= 4.10.0-beta1, setup.py.in exist.
otherwise RPM version <= 4.9.x, setup.py.in does not exist. | f1234:c3:m1 |
def add_patchs_to_build_without_pkg_config(self, lib_dir, include_dir): | additional_patches = [<EOL>{<EOL>'<STR_LIT:src>': r"<STR_LIT>",<EOL>'<STR_LIT>': "<STR_LIT>".format(lib_dir),<EOL>},<EOL>{<EOL>'<STR_LIT:src>': r"<STR_LIT>",<EOL>'<STR_LIT>': "<STR_LIT>",<EOL>'<STR_LIT>': True,<EOL>},<EOL>{<EOL>'<STR_LIT:src>': r"<STR_LIT>",<EOL>'<STR_LIT>': "<STR_LIT>".format(include_dir),<EOL>'<STR_LIT>': True,<EOL>},<EOL>]<EOL>self.patches.extend(additional_patches)<EOL> | Add patches to remove pkg-config command and rpm.pc part.
Replace with given library_path: lib_dir and include_path: include_dir
without rpm.pc file. | f1234:c3:m2 |
def apply_and_save(self): | patches = self.patches<EOL>content = None<EOL>with open(self.IN_PATH) as f_in:<EOL><INDENT>content = f_in.read()<EOL><DEDENT>for key in self.replaced_word_dict:<EOL><INDENT>content = content.replace(key, self.replaced_word_dict[key])<EOL><DEDENT>out_patches = []<EOL>for patch in patches:<EOL><INDENT>pattern = re.compile(patch['<STR_LIT:src>'], re.MULTILINE)<EOL>(content, subs_num) = re.subn(pattern, patch['<STR_LIT>'],<EOL>content)<EOL>if subs_num > <NUM_LIT:0>:<EOL><INDENT>patch['<STR_LIT>'] = True<EOL><DEDENT>out_patches.append(patch)<EOL><DEDENT>for patch in out_patches:<EOL><INDENT>if patch.get('<STR_LIT>') and not patch.get('<STR_LIT>'):<EOL><INDENT>Log.warn('<STR_LIT>'.format(patch['<STR_LIT:src>']))<EOL><DEDENT><DEDENT>with open(self.OUT_PATH, '<STR_LIT:w>') as f_out:<EOL><INDENT>f_out.write(content)<EOL><DEDENT>self.pathces = out_patches<EOL>content = None<EOL> | Apply replaced words and patches, and save setup.py file. | f1234:c3:m3 |
def __init__(self, rpm_py_version, **kwargs): | if not rpm_py_version:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(rpm_py_version, RpmPyVersion):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>self.rpm_py_version = rpm_py_version<EOL>self.git_branch = kwargs.get('<STR_LIT>')<EOL> | Initialize this class. | f1234:c4:m0 |
def download_and_expand(self): | top_dir_name = None<EOL>if self.git_branch:<EOL><INDENT>top_dir_name = self._download_and_expand_by_git()<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>top_dir_name = self._download_and_expand_from_archive_url()<EOL><DEDENT>except RemoteFileNotFoundError:<EOL><INDENT>Log.info('<STR_LIT>')<EOL>top_dir_name = self._download_and_expand_by_git()<EOL><DEDENT><DEDENT>return top_dir_name<EOL> | Download and expand RPM Python binding. | f1234:c4:m1 |
def __init__(self, rpm_py_version, python, rpm, **kwargs): | if not rpm_py_version:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not python:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not rpm:<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(rpm_py_version, RpmPyVersion):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(python, Python):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>if not isinstance(rpm, Rpm):<EOL><INDENT>ValueError('<STR_LIT>')<EOL><DEDENT>optimized = kwargs.get('<STR_LIT>', True)<EOL>verbose = kwargs.get('<STR_LIT>', False)<EOL>self.rpm_py_version = rpm_py_version<EOL>self.python = python<EOL>self.rpm = rpm<EOL>self.setup_py = SetupPy(rpm_py_version, optimized=optimized)<EOL>self.setup_py_opts = '<STR_LIT>' if verbose else '<STR_LIT>'<EOL>self.optimized = optimized<EOL>self.package_sys_name = None<EOL>self.pacakge_popt_name = None<EOL>self.pacakge_popt_devel_name = None<EOL> | Initialize this class. | f1234:c5:m0 |
def run(self): | self._make_lib_file_symbolic_links()<EOL>self._copy_each_include_files_to_include_dir()<EOL>self._make_dep_lib_file_sym_links_and_copy_include_files()<EOL>self.setup_py.add_patchs_to_build_without_pkg_config(<EOL>self.rpm.lib_dir, self.rpm.include_dir<EOL>)<EOL>self.setup_py.apply_and_save()<EOL>self._build_and_install()<EOL> | Run install main logic. | f1234:c5:m1 |
def install_from_rpm_py_package(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Run install from RPM Python binding system package.
It is run when RPM does not have setup.py.in in the source
such as the RPM source is old. | f1234:c5:m2 |
def _make_lib_file_symbolic_links(self): | so_file_dict = {<EOL>'<STR_LIT>': {<EOL>'<STR_LIT>': self.rpm.lib_dir,<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': True,<EOL>},<EOL>'<STR_LIT>': {<EOL>'<STR_LIT>': self.rpm.lib_dir,<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': True,<EOL>},<EOL>'<STR_LIT>': {<EOL>'<STR_LIT>': self.rpm.lib_dir,<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': True,<EOL>},<EOL>'<STR_LIT>': {<EOL>'<STR_LIT>': self.rpm.lib_dir,<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>},<EOL>}<EOL>self._update_sym_src_dirs_conditionally(so_file_dict)<EOL>for name in so_file_dict:<EOL><INDENT>so_dict = so_file_dict[name]<EOL>pattern = '<STR_LIT>'.format(name)<EOL>so_files = Cmd.find(so_dict['<STR_LIT>'], pattern)<EOL>if not so_files:<EOL><INDENT>is_required = so_dict.get('<STR_LIT>', False)<EOL>if not is_required:<EOL><INDENT>message_format = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>)<EOL>Log.debug(message_format.format(name))<EOL>continue<EOL><DEDENT>message = '<STR_LIT>'.format(<EOL>pattern, so_dict['<STR_LIT>']<EOL>)<EOL>raise InstallError(message)<EOL><DEDENT>sym_dst_dir = os.path.abspath('<STR_LIT>'.format(<EOL>so_dict['<STR_LIT>']))<EOL>if not os.path.isdir(sym_dst_dir):<EOL><INDENT>Cmd.mkdir_p(sym_dst_dir)<EOL><DEDENT>cmd = '<STR_LIT>'.format(so_files[<NUM_LIT:0>],<EOL>sym_dst_dir,<EOL>name)<EOL>Cmd.sh_e(cmd)<EOL><DEDENT> | Make symbolic links for lib files.
Make symbolic links from system library files or downloaded lib files
to downloaded source library files.
For example, case: Fedora x86_64
Make symbolic links
from
a. /usr/lib64/librpmio.so* (one of them)
b. /usr/lib64/librpm.so* (one of them)
c. If rpm-build-libs package is installed,
/usr/lib64/librpmbuild.so* (one of them)
otherwise, downloaded and extracted rpm-build-libs.
./usr/lib64/librpmbuild.so* (one of them)
c. If rpm-build-libs package is installed,
/usr/lib64/librpmsign.so* (one of them)
otherwise, downloaded and extracted rpm-build-libs.
./usr/lib64/librpmsign.so* (one of them)
to
a. rpm/rpmio/.libs/librpmio.so
b. rpm/lib/.libs/librpm.so
c. rpm/build/.libs/librpmbuild.so
d. rpm/sign/.libs/librpmsign.so
.
This is a status after running "make" on actual rpm build process. | f1234:c5:m3 |
def _copy_each_include_files_to_include_dir(self): | src_header_dirs = [<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>]<EOL>with Cmd.pushd('<STR_LIT:..>'):<EOL><INDENT>src_include_dir = os.path.abspath('<STR_LIT>')<EOL>for header_dir in src_header_dirs:<EOL><INDENT>if not os.path.isdir(header_dir):<EOL><INDENT>message_format = "<STR_LIT>"<EOL>Log.debug(message_format.format(header_dir))<EOL>continue<EOL><DEDENT>header_files = Cmd.find(header_dir, '<STR_LIT>')<EOL>for header_file in header_files:<EOL><INDENT>pattern = '<STR_LIT>'.format(header_dir)<EOL>(dst_header_file, subs_num) = re.subn(pattern,<EOL>'<STR_LIT>', header_file)<EOL>if subs_num == <NUM_LIT:0>:<EOL><INDENT>message = '<STR_LIT>'.format(<EOL>header_file)<EOL>raise ValueError(message)<EOL><DEDENT>dst_header_file = os.path.abspath(<EOL>os.path.join(src_include_dir, '<STR_LIT>', dst_header_file)<EOL>)<EOL>dst_dir = os.path.dirname(dst_header_file)<EOL>if not os.path.isdir(dst_dir):<EOL><INDENT>Cmd.mkdir_p(dst_dir)<EOL><DEDENT>shutil.copyfile(header_file, dst_header_file)<EOL><DEDENT><DEDENT><DEDENT> | Copy include header files for each directory to include directory.
Copy include header files
from
rpm/
rpmio/*.h
lib/*.h
build/*.h
sign/*.h
to
rpm/
include/
rpm/*.h
.
This is a status after running "make" on actual rpm build process. | f1234:c5:m5 |
def _make_dep_lib_file_sym_links_and_copy_include_files(self): | if not self._rpm_py_has_popt_devel_dep():<EOL><INDENT>message = (<EOL>'<STR_LIT>'<EOL>)<EOL>Log.debug(message)<EOL>return<EOL><DEDENT>if self._is_popt_devel_installed():<EOL><INDENT>message = '<STR_LIT>'.format(<EOL>self.pacakge_popt_devel_name)<EOL>Log.debug(message)<EOL>return<EOL><DEDENT>if not self._is_package_downloadable():<EOL><INDENT>message = '''<STR_LIT>'''<EOL> | Make symbolick links for lib files and copy include files.
Do below steps for a dependency packages.
Dependency packages
- popt-devel
Steps
1. Make symbolic links from system library files or downloaded lib
files to downloaded source library files.
2. Copy include header files to include directory. | f1234:c5:m6 |
def _rpm_py_has_popt_devel_dep(self): | found = False<EOL>with open('<STR_LIT>') as f_in:<EOL><INDENT>for line in f_in:<EOL><INDENT>if re.match(r'<STR_LIT>', line):<EOL><INDENT>found = True<EOL>break<EOL><DEDENT><DEDENT><DEDENT>return found<EOL> | Check if the RPM Python binding has a depndency to popt-devel.
Search include header files in the source code to check it. | f1234:c5:m8 |
def _is_package_downloadable(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Check if the package system is downlodable. | f1234:c5:m9 |
def _is_popt_installed(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Check if the popt package is installed. | f1234:c5:m10 |
def _is_popt_devel_installed(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Check if the popt devel package is installed. | f1234:c5:m11 |
def _download_and_extract_popt_devel(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Download and extract popt devel package. | f1234:c5:m12 |
def __init__(self, rpm_py_version, python, rpm, **kwargs): | Installer.__init__(self, rpm_py_version, python, rpm, **kwargs)<EOL>self.package_sys_name = '<STR_LIT>'<EOL>self.pacakge_popt_name = '<STR_LIT>'<EOL>self.pacakge_popt_devel_name = '<STR_LIT>'<EOL> | Initialize this class. | f1234:c6:m0 |
def run(self): | try:<EOL><INDENT>if not self._is_rpm_all_lib_include_files_installed():<EOL><INDENT>self._make_lib_file_symbolic_links()<EOL>self._copy_each_include_files_to_include_dir()<EOL>self._make_dep_lib_file_sym_links_and_copy_include_files()<EOL>self.setup_py.add_patchs_to_build_without_pkg_config(<EOL>self.rpm.lib_dir, self.rpm.include_dir<EOL>)<EOL><DEDENT>self.setup_py.apply_and_save()<EOL>self._build_and_install()<EOL><DEDENT>except InstallError as e:<EOL><INDENT>if not self._is_rpm_all_lib_include_files_installed():<EOL><INDENT>org_message = str(e)<EOL>message = | Run install main logic. | f1234:c6:m1 |
def install_from_rpm_py_package(self): | self._download_and_extract_rpm_py_package()<EOL>if self.rpm.has_set_up_py_in():<EOL><INDENT>py_dir_name = '<STR_LIT>'.format(<EOL>sys.version_info[<NUM_LIT:0>], sys.version_info[<NUM_LIT:1>])<EOL><DEDENT>else:<EOL><INDENT>py_dir_name = '<STR_LIT:*>'<EOL><DEDENT>python_lib_dir_pattern = os.path.join(<EOL>'<STR_LIT>', '<STR_LIT:*>', py_dir_name, '<STR_LIT>')<EOL>rpm_dir_pattern = os.path.join(python_lib_dir_pattern, '<STR_LIT>')<EOL>downloaded_rpm_dirs = glob.glob(rpm_dir_pattern)<EOL>if not downloaded_rpm_dirs:<EOL><INDENT>message = '<STR_LIT>'.format(<EOL>rpm_dir_pattern)<EOL>raise RpmPyPackageNotFoundError(message)<EOL><DEDENT>src_rpm_dir = downloaded_rpm_dirs[<NUM_LIT:0>]<EOL>for rpm_dir in self.python.python_lib_rpm_dirs:<EOL><INDENT>if os.path.isdir(rpm_dir):<EOL><INDENT>Log.debug("<STR_LIT>".format(rpm_dir))<EOL>shutil.rmtree(rpm_dir)<EOL><DEDENT><DEDENT>dst_rpm_dir = self.python.python_lib_rpm_dir<EOL>Log.debug("<STR_LIT>".format(<EOL>src_rpm_dir, dst_rpm_dir))<EOL>shutil.copytree(src_rpm_dir, dst_rpm_dir)<EOL>file_name_pattern = '<STR_LIT>'<EOL>rpm_egg_info_pattern = os.path.join(<EOL>python_lib_dir_pattern, file_name_pattern)<EOL>downloaded_rpm_egg_infos = glob.glob(rpm_egg_info_pattern)<EOL>if downloaded_rpm_egg_infos:<EOL><INDENT>existing_rpm_egg_info_pattern = os.path.join(<EOL>self.python.python_lib_dir, file_name_pattern)<EOL>existing_rpm_egg_infos = glob.glob(existing_rpm_egg_info_pattern)<EOL>for existing_rpm_egg_info in existing_rpm_egg_infos:<EOL><INDENT>Log.debug("<STR_LIT>".format(<EOL>existing_rpm_egg_info))<EOL>os.remove(existing_rpm_egg_info)<EOL><DEDENT>Log.debug("<STR_LIT>".format(<EOL>downloaded_rpm_egg_infos[<NUM_LIT:0>], self.python.python_lib_dir))<EOL>shutil.copy2(downloaded_rpm_egg_infos[<NUM_LIT:0>],<EOL>self.python.python_lib_dir)<EOL><DEDENT> | Run install from RPM Python binding RPM package. | f1234:c6:m2 |
def _is_rpm_all_lib_include_files_installed(self): | return self.rpm.is_package_installed('<STR_LIT>')<EOL> | Check if all rpm lib and include files are installed.
If RPM rpm-devel package is installed, the files are installed. | f1234:c6:m3 |
def __init__(self, rpm_py_version, python, rpm, **kwargs): | Installer.__init__(self, rpm_py_version, python, rpm, **kwargs)<EOL>self.package_sys_name = '<STR_LIT>'<EOL>self.pacakge_popt_name = '<STR_LIT>'<EOL>self.pacakge_popt_devel_name = '<STR_LIT>'<EOL> | Initialize this class. | f1234:c7:m0 |
def install_from_rpm_py_package(self): | message = | Run install from RPM Python binding RPM package. | f1234:c7:m1 |
def _is_rpm_all_lib_include_files_installed(self): | return False<EOL> | Check if all rpm lib and include files are installed.
Return always false, because rpm-dev deb package does not
exist in Debian base OS. | f1234:c7:m2 |
def __init__(self, python, rpm_path, **kwargs): | if not python:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>if not rpm_path:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>self.python = python<EOL>self.rpm = self.create_rpm(rpm_path)<EOL>self.sys_installed = kwargs.get('<STR_LIT>', False)<EOL> | Initialize this class. | f1234:c8:m0 |
@classmethod<EOL><INDENT>def get_instance(cls, python, rpm_path, **kwargs):<DEDENT> | linux = None<EOL>if Cmd.which('<STR_LIT>'):<EOL><INDENT>linux = DebianLinux(python, rpm_path, **kwargs)<EOL><DEDENT>else:<EOL><INDENT>linux = FedoraLinux(python, rpm_path, **kwargs)<EOL><DEDENT>return linux<EOL> | Get OS object. | f1234:c8:m1 |
def create_rpm(self, rpm_path): | raise NotImplementedError('<STR_LIT>')<EOL> | Create Rpm object. | f1234:c8:m2 |
def create_installer(self, rpm_py_version, **kwargs): | raise NotImplementedError('<STR_LIT>')<EOL> | Create Installer object. | f1234:c8:m3 |
def verify_system_status(self): | if not sys.platform.startswith('<STR_LIT>'):<EOL><INDENT>raise InstallError('<STR_LIT>')<EOL><DEDENT>if self.python.is_system_python():<EOL><INDENT>if self.python.is_python_binding_installed():<EOL><INDENT>message = '''<STR_LIT>'''<EOL><DEDENT><DEDENT>on binding on system Python should be installed manually.<EOL>the proper RPM package of python{,<NUM_LIT:2>,<NUM_LIT:3>}-rpm,<EOL><INDENT>environment variable RPM_PY_SYS=true<EOL><INDENT>raise InstallError(message)<EOL><DEDENT><DEDENT>if self.rpm.is_system_rpm():<EOL><INDENT>self.verify_package_status()<EOL><DEDENT> | Verify system status. | f1234:c8:m4 |
def verify_package_status(self): | raise NotImplementedError('<STR_LIT>')<EOL> | Verify package stauts. | f1234:c8:m5 |
def __init__(self, python, rpm_path, **kwargs): | Linux.__init__(self, python, rpm_path, **kwargs)<EOL> | Initialize this class. | f1234:c9:m0 |
def verify_package_status(self): | <EOL>self.rpm.verify_packages_installed(['<STR_LIT>'])<EOL>message_format = | Verify dependency RPM package status. | f1234:c9:m1 |
def create_rpm(self, rpm_path): | return FedoraRpm(rpm_path)<EOL> | Create Rpm object. | f1234:c9:m2 |
def create_installer(self, rpm_py_version, **kwargs): | return FedoraInstaller(rpm_py_version, self.python, self.rpm, **kwargs)<EOL> | Create Installer object. | f1234:c9:m3 |
def __init__(self, python, rpm_path, **kwargs): | Linux.__init__(self, python, rpm_path, **kwargs)<EOL> | Initialize this class. | f1234:c10:m0 |
def verify_package_status(self): | pass<EOL> | Verify dependency Debian package status.
Right now pass everything.
Because if rpm command (Package: rpm) is installed, all the necessary
libraries should be installed.
See https://packages.ubuntu.com/search?keywords=rpm | f1234:c10:m1 |