function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def strip_comment_marker(text): """ Strip # markers at the front of a block of comment text. """ lines = [] for line in text.splitlines(): lines.append(line.lstrip('#')) text = textwrap.dedent('\n'.join(lines)) return text
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def _setup(self): from admin_sso import default_settings self._wrapped = Settings(default_settings)
frog32/django-admin-sso
[ 33, 20, 33, 2, 1329420183 ]
def __init__(self, settings_module): for setting in dir(settings_module): if setting == setting.upper(): setattr(self, setting, getattr(settings_module, setting))
frog32/django-admin-sso
[ 33, 20, 33, 2, 1329420183 ]
def format_template(template, **kw): return jinja2.Template(template).render(**kw)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def format_template(template, **kw): return jinja.from_string(template, **kw)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def relpath(path, start=os.path.curdir): """Return a relative version of a path""" from os.path import sep, curdir, join, abspath, commonprefix, \ pardir if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) # Work out how much of the filepath is shared by start and path. i = len(commonprefix([start_list, path_list])) rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def relpath(path, start=os.path.curdir): """Return a relative version of a path""" from os.path import sep, curdir, join, abspath, commonprefix, \ pardir, splitunc if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = splitunc(path) unc_start, rest = splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start)) else: raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0])) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def plot_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): return run(arguments, content, options, state_machine, state, lineno)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def _option_boolean(arg): if not arg or not arg.strip(): # no argument given, assume used as a flag return True elif arg.strip().lower() in ('no', '0', 'false'): return False elif arg.strip().lower() in ('yes', '1', 'true'): return True else: raise ValueError('"%s" unknown boolean' % arg)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def _option_align(arg): return directives.choice(arg, ("top", "middle", "bottom", "left", "center", "right"))
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def setup(app): setup.app = app setup.config = app.config setup.confdir = app.confdir options = {'alt': directives.unchanged, 'height': directives.length_or_unitless, 'width': directives.length_or_percentage_or_unitless, 'scale': directives.nonnegative_int, 'align': _option_align, 'class': directives.class_option, 'include-source': _option_boolean, 'format': _option_format, 'context': directives.flag, 'nofigs': directives.flag, 'encoding': directives.encoding } app.add_directive('plot', plot_directive, True, (0, 2, False), **options) app.add_config_value('plot_pre_code', None, True) app.add_config_value('plot_include_source', False, True) app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True) app.add_config_value('plot_basedir', None, True) app.add_config_value('plot_html_show_formats', True, True) app.add_config_value('plot_rcparams', {}, True) app.add_config_value('plot_apply_rcparams', False, True) app.add_config_value('plot_working_directory', None, True) app.add_config_value('plot_template', None, True) app.connect('doctree-read', mark_plot_labels)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def contains_doctest(text): try: # check if it's valid Python as-is compile(text, '<string>', 'exec') return False except SyntaxError: pass r = re.compile(r'^\s*>>>', re.M) m = r.search(text) return bool(m)
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def split_code_at_show(text): """ Split code at plt.show() """ parts = [] is_doctest = contains_doctest(text) part = [] for line in text.split("\n"): if (not is_doctest and line.strip() == 'plt.show()') or \ (is_doctest and line.strip() == '>>> plt.show()'): part.append(line) parts.append("\n".join(part)) part = [] else: part.append(line) if "\n".join(part).strip(): parts.append("\n".join(part)) return parts
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def __init__(self, basename, dirname): self.basename = basename self.dirname = dirname self.formats = []
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def filenames(self): return [self.filename(fmt) for fmt in self.formats]
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def run_code(code, code_path, ns=None, function_name=None): """ Import a Python module from a path, and run the function given by name, if function_name is not None. """ # Change the working directory to the directory of the example, so # it can get at its data files, if any. Add its path to sys.path # so it can import any helper modules sitting beside it. pwd = os.getcwd() old_sys_path = list(sys.path) if setup.config.plot_working_directory is not None: try: os.chdir(setup.config.plot_working_directory) except OSError as err: raise OSError(str(err) + '\n`plot_working_directory` option in' 'Sphinx configuration file must be a valid ' 'directory path') except TypeError as err: raise TypeError(str(err) + '\n`plot_working_directory` option in ' 'Sphinx configuration file must be a string or ' 'None') sys.path.insert(0, setup.config.plot_working_directory) elif code_path is not None: dirname = os.path.abspath(os.path.dirname(code_path)) os.chdir(dirname) sys.path.insert(0, dirname) # Redirect stdout stdout = sys.stdout sys.stdout = cStringIO.StringIO() # Reset sys.argv old_sys_argv = sys.argv sys.argv = [code_path] try: try: code = unescape_doctest(code) if ns is None: ns = {} if not ns: if setup.config.plot_pre_code is None: exec "import numpy as np\nfrom matplotlib import pyplot as plt\n" in ns else: exec setup.config.plot_pre_code in ns if "__main__" in code: exec "__name__ = '__main__'" in ns exec code in ns if function_name is not None: exec function_name + "()" in ns except (Exception, SystemExit), err: raise PlotError(traceback.format_exc()) finally: os.chdir(pwd) sys.argv = old_sys_argv sys.path[:] = old_sys_path sys.stdout = stdout return ns
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def render_figures(code, code_path, output_dir, output_base, context, function_name, config): """ Run a pyplot script and save the low and high res PNGs and a PDF in outdir. Save the images under *output_dir* with file names derived from *output_base* """ # -- Parse format list default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} formats = [] plot_formats = config.plot_formats if isinstance(plot_formats, (str, unicode)): plot_formats = eval(plot_formats) for fmt in plot_formats: if isinstance(fmt, str): formats.append((fmt, default_dpi.get(fmt, 80))) elif type(fmt) in (tuple, list) and len(fmt)==2: formats.append((str(fmt[0]), int(fmt[1]))) else: raise PlotError('invalid image format "%r" in plot_formats' % fmt) # -- Try to determine if all images already exist code_pieces = split_code_at_show(code) # Look for single-figure output files first # Look for single-figure output files first all_exists = True img = ImageFile(output_base, output_dir) for format, dpi in formats: if out_of_date(code_path, img.filename(format)): all_exists = False break img.formats.append(format) if all_exists: return [(code, [img])] # Then look for multi-figure output files results = [] all_exists = True for i, code_piece in enumerate(code_pieces): images = [] for j in xrange(1000): if len(code_pieces) > 1: img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir) else: img = ImageFile('%s_%02d' % (output_base, j), output_dir) for format, dpi in formats: if out_of_date(code_path, img.filename(format)): all_exists = False break img.formats.append(format) # assume that if we have one, we have them all if not all_exists: all_exists = (j > 0) break images.append(img) if not all_exists: break results.append((code_piece, images)) if all_exists: return results # We didn't find the files, so build them results = [] if context: ns = plot_context else: ns = {} for i, code_piece in enumerate(code_pieces): if not context or config.plot_apply_rcparams: clear_state(config.plot_rcparams) run_code(code_piece, code_path, ns, function_name) images = [] fig_managers = _pylab_helpers.Gcf.get_all_fig_managers() for j, figman in enumerate(fig_managers): if len(fig_managers) == 1 and len(code_pieces) == 1: img = ImageFile(output_base, output_dir) elif len(code_pieces) == 1: img = ImageFile("%s_%02d" % (output_base, j), output_dir) else: img = ImageFile("%s_%02d_%02d" % (output_base, i, j), output_dir) images.append(img) for format, dpi in formats: try: figman.canvas.figure.savefig(img.filename(format), dpi=dpi) except Exception,err: raise PlotError(traceback.format_exc()) img.formats.append(format) results.append((code_piece, images)) if not context or config.plot_apply_rcparams: clear_state(config.plot_rcparams) return results
Solid-Mechanics/matplotlib-4-abaqus
[ 34, 25, 34, 3, 1426720624 ]
def getCraftedText( fileName, text, repository = None ): "Home a gcode linear move file or text." return getCraftedTextFromText(archive.getTextIfEmpty(fileName, text), repository)
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def getNewRepository(): 'Get new repository.' return HomeRepository()
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def __init__(self): "Set the default settings, execute title & settings fileName." skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.home.html', self) self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Home', self, '') self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Home') self.activateHome = settings.BooleanSetting().getFromValue('Activate Home', self, True ) self.nameOfHomeFile = settings.StringSetting().getFromValue('Name of Home File:', self, 'home.gcode') self.executeTitle = 'Home'
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def __init__(self): self.distanceFeedRate = gcodec.DistanceFeedRate() self.extruderActive = False self.highestZ = None self.homeLines = [] self.layerCount = settings.LayerCount() self.lineIndex = 0 self.lines = None self.oldLocation = None self.shouldHome = False self.travelFeedRateMinute = 957.0
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def addHomeTravel( self, splitLine ): "Add the home travel gcode." location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine) self.highestZ = max( self.highestZ, location.z ) if not self.shouldHome: return self.shouldHome = False if self.oldLocation == None: return if self.extruderActive: self.distanceFeedRate.addLine('M103') self.addHopUp( self.oldLocation ) self.distanceFeedRate.addLinesSetAbsoluteDistanceMode(self.homeLines) self.addHopUp( self.oldLocation ) self.addFloat( self.oldLocation, location ) if self.extruderActive: self.distanceFeedRate.addLine('M101')
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def getCraftedGcode( self, gcodeText, repository ): "Parse gcode text and store the home gcode." self.repository = repository self.homeLines = settings.getAlterationFileLines(repository.nameOfHomeFile.value) if len(self.homeLines) < 1: return gcodeText self.lines = archive.getTextLines(gcodeText) self.parseInitialization( repository ) for self.lineIndex in xrange(self.lineIndex, len(self.lines)): line = self.lines[self.lineIndex] self.parseLine(line) return self.distanceFeedRate.output.getvalue()
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def parseLine(self, line): "Parse a gcode line and add it to the bevel gcode." splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) if len(splitLine) < 1: return firstWord = splitLine[0] if firstWord == 'G1': self.addHomeTravel(splitLine) self.oldLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine) elif firstWord == '(<layer>': self.layerCount.printProgressIncrement('home') if len(self.homeLines) > 0: self.shouldHome = True elif firstWord == 'M101': self.extruderActive = True elif firstWord == 'M103': self.extruderActive = False self.distanceFeedRate.addLine(line)
makerbot/ReplicatorG
[ 396, 234, 396, 37, 1255371319 ]
def build_args(self, spec, prefix): args = { 'PREFIX': prefix, 'APR': spec['apr'].prefix, 'APU': spec['apr-util'].prefix, 'OPENSSL': spec['openssl'].prefix, 'ZLIB': spec['zlib'].prefix, 'DEBUG': 'yes' if '+debug' in spec else 'no', } # SCons doesn't pass Spack environment variables to the # execution environment. Therefore, we can't use Spack's compiler # wrappers. Use the actual compilers. SCons seems to RPATH things # on its own anyway. args['CC'] = self.compiler.cc # Old versions of serf ignore the ZLIB variable on non-Windows platforms. # Also, there is no UUID variable to specify its installation location. # Pass explicit link flags for both. library_dirs = [] include_dirs = [] for dep in spec.dependencies(deptype='link'): query = self.spec[dep.name] library_dirs.extend(query.libs.directories) include_dirs.extend(query.headers.directories) rpath = self.compiler.cc_rpath_arg args['LINKFLAGS'] = '-L' + ' -L'.join(library_dirs) args['LINKFLAGS'] += ' ' + rpath + (' ' + rpath).join(library_dirs) args['CPPFLAGS'] = '-I' + ' -I'.join(include_dirs) return [key + '=' + value for key, value in args.items()]
LLNL/spack
[ 3244, 1839, 3244, 2847, 1389172932 ]
def test_stmt_simplify(): ib = tvm.tir.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = te.size_var("n") with ib.for_range(0, n, name="i") as i: with ib.if_scope(i < 12): A[i] = C[i] body = tvm.tir.LetStmt(n, 10, ib.get()) mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([A, C, n], body)) body = tvm.tir.transform.Simplify()(mod)["main"].body assert isinstance(body.body, tvm.tir.Store)
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def test_if_likely(): ib = tvm.tir.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = te.size_var("n") tx = te.thread_axis("threadIdx.x") ty = te.thread_axis("threadIdx.y") ib.scope_attr(tx, "thread_extent", 32) ib.scope_attr(ty, "thread_extent", 32) with ib.if_scope(ib.likely(tx * 32 + ty < n)): with ib.if_scope(ib.likely(tx * 32 + ty < n)): A[tx] = C[tx * 32 + ty] body = ib.get() mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([A, C, n], body)) body = tvm.tir.transform.Simplify()(mod)["main"].body assert isinstance(body.body.body, tvm.tir.IfThenElse) assert not isinstance(body.body.body.then_case, tvm.tir.IfThenElse)
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def f(i): start = W[i] extent = W[i + 1] - W[i] rv = te.reduce_axis((0, extent)) return te.sum(X[rv + start], axis=rv)
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def test_complex_likely_elimination(): def cumsum(X): """ Y[i] = sum(X[:i]) """ (m,) = X.shape s_state = te.placeholder((m + 1,), dtype="int32", name="state") s_init = te.compute((1,), lambda _: tvm.tir.const(0, "int32")) s_update = te.compute((m + 1,), lambda l: s_state[l - 1] + X[l - 1]) return tvm.te.scan(s_init, s_update, s_state, inputs=[X], name="cumsum") def sparse_lengths_sum(data, indices, lengths): oshape = list(data.shape) oshape[0] = lengths.shape[0] length_offsets = cumsum(lengths) def sls(n, d): gg = te.reduce_axis((0, lengths[n])) indices_idx = length_offsets[n] + gg data_idx = indices[indices_idx] data_val = data[data_idx, d] return te.sum(data_val, axis=gg) return te.compute(oshape, sls) m, n, d, i, l = ( te.size_var("m"), te.size_var("n"), te.size_var("d"), te.size_var("i"), te.size_var("l"), ) data_ph = te.placeholder((m, d * 32), name="data") indices_ph = te.placeholder((i,), name="indices", dtype="int32") lengths_ph = te.placeholder((n,), name="lengths", dtype="int32") Y = sparse_lengths_sum(data_ph, indices_ph, lengths_ph) s = te.create_schedule([Y.op]) (n, d) = s[Y].op.axis (do, di) = s[Y].split(d, factor=32) (gg,) = s[Y].op.reduce_axis s[Y].reorder(n, do, gg, di) s[Y].vectorize(di) stmt = tvm.lower(s, [data_ph, indices_ph, lengths_ph, Y], simple_mode=True) assert "if" not in str(stmt)
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def to_return(self): result = {} for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) return result
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def name(self): if self._values['name'] is None: return None name = str(self._values['name']).strip() if name == '': raise F5ModuleError( "You must specify a name for this module" ) return name
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def __init__(self, client): self.client = client self.have = None self.want = Parameters(self.client.module.params) self.changes = Parameters()
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def _update_changed_options(self): changed = {} for key in Parameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = attr1 if changed: self.changes = Parameters(changed) return True return False
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def _pool_is_unlicensed_eula_unaccepted(self, current): if current.state != 'LICENSED' and not self.want.accept_eula: return True return False
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def exists(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) if len(collection) == 1: return True elif len(collection) == 0: return False else: raise F5ModuleError( "Multiple license pools with the provided name were found!" )
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def should_update(self): if self._pool_is_licensed(): return False if self._pool_is_unlicensed_eula_unaccepted(): return False return True
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def update_on_device(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) resource = collection.pop() resource.modify( state='RELICENSE', method='AUTOMATIC' ) return self._wait_for_license_pool_state_to_activate(resource)
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def read_current_from_device(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) resource = collection.pop() result = resource.attrs return Parameters(result)
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def _wait_for_license_pool_state_to_activate(self, pool): error_values = ['EXPIRED', 'FAILED'] # Wait no more than 5 minutes for x in range(1, 30): pool.refresh() if pool.state == 'LICENSED': return True elif pool.state == 'WAITING_FOR_EULA_ACCEPTANCE': pool.modify( eulaText=pool.eulaText, state='ACCEPTED_EULA' ) elif pool.state in error_values: raise F5ModuleError(pool.errorText) time.sleep(10)
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def remove(self): if self.client.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the license pool") return True
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def __init__(self): self.supports_check_mode = True self.argument_spec = dict( accept_eula=dict( type='bool', default='no', choices=BOOLEANS ), base_key=dict( required=False, no_log=True ), name=dict( required=True ), state=dict( required=False, default='present', choices=['absent', 'present'] ) ) self.f5_product_name = 'iworkflow'
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def wrap_matcher(x): """Wraps argument in a matcher, if necessary. :returns: the argument as-is if it is already a matcher, otherwise wrapped in an :py:func:`~hamcrest.core.core.isequal.equal_to` matcher. """ if isinstance(x, Matcher): return x else: return equal_to(x)
axbaretto/beam
[ 9, 2, 9, 74, 1474583398 ]
def __init__(self): TestBase.__init__(self, 'abc', """
namhyung/uftrace
[ 2338, 351, 2338, 337, 1392115490 ]
def __init__( self, from_address, to_address, host, username, password, subject="catchy title", port=465, use_ssl=1, kwargs=None
klahnakoski/Bugzilla-ETL
[ 5, 8, 5, 8, 1375457640 ]
def __enter__(self): if self.server is not None: Log.error("Got a problem") if self.settings.use_ssl: self.server = smtplib.SMTP_SSL(self.settings.host, self.settings.port) else: self.server = smtplib.SMTP(self.settings.host, self.settings.port) if self.settings.username and self.settings.password: self.server.login(self.settings.username, self.settings.password) return self
klahnakoski/Bugzilla-ETL
[ 5, 8, 5, 8, 1375457640 ]
def send_email(self, from_address=None, to_address=None, subject=None, text_data=None, html_data=None
klahnakoski/Bugzilla-ETL
[ 5, 8, 5, 8, 1375457640 ]
def _get_socket_fixed(self, host, port, timeout): if self.debuglevel > 0: print>> sys.stderr, 'connect:', (host, port) new_socket = socket.create_connection((host, port), timeout) new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile) self.file = smtplib.SSLFakeFile(new_socket) return new_socket
klahnakoski/Bugzilla-ETL
[ 5, 8, 5, 8, 1375457640 ]
def _maybe_name(obj): """Returns object name if it has one, or a message otherwise. This is useful for names that apper in error messages. Args: obj: Object to get the name of. Returns: name, "None", or a "no name" message. """ if obj is None: return "None" elif hasattr(obj, "name"): return obj.name else: return "<no name for %s>" % type(obj)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, local_init_op=None, ready_op=None, ready_for_local_init_op=None, graph=None, recovery_wait_secs=30, local_init_run_options=None, local_init_feed_dict=None): """Creates a SessionManager. The `local_init_op` is an `Operation` that is run always after a new session was created. If `None`, this step is skipped. The `ready_op` is an `Operation` used to check if the model is ready. The model is considered ready if that operation returns an empty 1D string tensor. If the operation returns a non empty 1D string tensor, the elements are concatenated and used to indicate to the user why the model is not ready. The `ready_for_local_init_op` is an `Operation` used to check if the model is ready to run local_init_op. The model is considered ready if that operation returns an empty 1D string tensor. If the operation returns a non empty 1D string tensor, the elements are concatenated and used to indicate to the user why the model is not ready. If `ready_op` is `None`, the model is not checked for readiness. `recovery_wait_secs` is the number of seconds between checks that the model is ready. It is used by processes to wait for a model to be initialized or restored. Defaults to 30 seconds. Args: local_init_op: An `Operation` run immediately after session creation. Usually used to initialize tables and local variables. ready_op: An `Operation` to check if the model is initialized. ready_for_local_init_op: An `Operation` to check if the model is ready to run local_init_op. graph: The `Graph` that the model will use. recovery_wait_secs: Seconds between checks for the model to be ready. local_init_run_options: RunOptions to be passed to session.run when executing the local_init_op. local_init_feed_dict: Optional session feed dictionary to use when running the local_init_op. Raises: ValueError: If ready_for_local_init_op is not None but local_init_op is None """ # Sets default values of arguments. if graph is None: graph = ops.get_default_graph() self._local_init_op = local_init_op self._ready_op = ready_op self._ready_for_local_init_op = ready_for_local_init_op self._graph = graph self._recovery_wait_secs = recovery_wait_secs self._target = None self._local_init_run_options = local_init_run_options self._local_init_feed_dict = local_init_feed_dict if ready_for_local_init_op is not None and local_init_op is None: raise ValueError("If you pass a ready_for_local_init_op " "you must also pass a local_init_op " ", ready_for_local_init_op [%s]" % ready_for_local_init_op)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def prepare_session(self, master, init_op=None, saver=None, checkpoint_dir=None, checkpoint_filename_with_path=None, wait_for_checkpoint=False, max_wait_secs=7200, config=None, init_feed_dict=None, init_fn=None): """Creates a `Session`. Makes sure the model is ready to be used. Creates a `Session` on 'master'. If a `saver` object is passed in, and `checkpoint_dir` points to a directory containing valid checkpoint files, then it will try to recover the model from checkpoint. If no checkpoint files are available, and `wait_for_checkpoint` is `True`, then the process would check every `recovery_wait_secs`, up to `max_wait_secs`, for recovery to succeed. If the model cannot be recovered successfully then it is initialized by running the `init_op` and calling `init_fn` if they are provided. The `local_init_op` is also run after init_op and init_fn, regardless of whether the model was recovered successfully, but only if `ready_for_local_init_op` passes. If the model is recovered from a checkpoint it is assumed that all global variables have been initialized, in particular neither `init_op` nor `init_fn` will be executed. It is an error if the model cannot be recovered and no `init_op` or `init_fn` or `local_init_op` are passed. Args: master: `String` representation of the TensorFlow master to use. init_op: Optional `Operation` used to initialize the model. saver: A `Saver` object used to restore a model. checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the dir will be used to restore. checkpoint_filename_with_path: Full file name path to the checkpoint file. wait_for_checkpoint: Whether to wait for checkpoint to become available. max_wait_secs: Maximum time to wait for checkpoints to become available. config: Optional `ConfigProto` proto used to configure the session. init_feed_dict: Optional dictionary that maps `Tensor` objects to feed values. This feed dictionary is passed to the session `run()` call when running the init op. init_fn: Optional callable used to initialize the model. Called after the optional `init_op` is called. The callable must accept one argument, the session being initialized. Returns: A `Session` object that can be used to drive the model. Raises: RuntimeError: If the model cannot be initialized or recovered. ValueError: If both checkpoint_dir and checkpoint_filename_with_path are set. """ sess, is_loaded_from_checkpoint = self._restore_checkpoint( master, saver, checkpoint_dir=checkpoint_dir, checkpoint_filename_with_path=checkpoint_filename_with_path, wait_for_checkpoint=wait_for_checkpoint, max_wait_secs=max_wait_secs, config=config) if not is_loaded_from_checkpoint: if init_op is None and not init_fn and self._local_init_op is None: raise RuntimeError("Model is not initialized and no init_op or " "init_fn or local_init_op was given") if init_op is not None: sess.run(init_op, feed_dict=init_feed_dict) if init_fn: init_fn(sess) local_init_success, msg = self._try_run_local_init_op(sess) if not local_init_success: raise RuntimeError( "Init operations did not make model ready for local_init. " "Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op), init_fn, msg)) is_ready, msg = self._model_ready(sess) if not is_ready: raise RuntimeError( "Init operations did not make model ready. " "Init op: %s, init fn: %s, local_init_op: %s, error: %s" % (_maybe_name(init_op), init_fn, self._local_init_op, msg)) return sess
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")): """Creates a new `Session` and waits for model to be ready. Creates a new `Session` on 'master'. Waits for the model to be initialized or recovered from a checkpoint. It's expected that another thread or process will make the model ready, and that this is intended to be used by threads/processes that participate in a distributed training configuration where a different thread/process is responsible for initializing or recovering the model being trained. NB: The amount of time this method waits for the session is bounded by max_wait_secs. By default, this function will wait indefinitely. Args: master: `String` representation of the TensorFlow master to use. config: Optional ConfigProto proto used to configure the session. max_wait_secs: Maximum time to wait for the session to become available. Returns: A `Session`. May be None if the operation exceeds the timeout specified by config.operation_timeout_in_ms. Raises: tf.DeadlineExceededError: if the session is not available after max_wait_secs. """ self._target = master if max_wait_secs is None: max_wait_secs = float("Inf") timer = _CountDownTimer(max_wait_secs) while True: sess = session.Session(self._target, graph=self._graph, config=config) not_ready_msg = None not_ready_local_msg = None local_init_success, not_ready_local_msg = self._try_run_local_init_op( sess) if local_init_success: # Successful if local_init_op is None, or ready_for_local_init_op passes is_ready, not_ready_msg = self._model_ready(sess) if is_ready: return sess self._safe_close(sess) # Do we have enough time left to try again? remaining_ms_after_wait = ( timer.secs_remaining() - self._recovery_wait_secs) if remaining_ms_after_wait < 0: raise errors.DeadlineExceededError( None, None, "Session was not ready after waiting %d secs." % (max_wait_secs,)) logging.info("Waiting for model to be ready. " "Ready_for_local_init_op: %s, ready: %s", not_ready_local_msg, not_ready_msg) time.sleep(self._recovery_wait_secs)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _model_ready(self, sess): """Checks if the model is ready or not. Args: sess: A `Session`. Returns: A tuple (is_ready, msg), where is_ready is True if ready and False otherwise, and msg is `None` if the model is ready, a `String` with the reason why it is not ready otherwise. """ return _ready(self._ready_op, sess, "Model not ready")
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _try_run_local_init_op(self, sess): """Tries to run _local_init_op, if not None, and is ready for local init. Args: sess: A `Session`. Returns: A tuple (is_successful, msg), where is_successful is True if _local_init_op is None, or we ran _local_init_op, and False otherwise; and msg is a `String` with the reason why the model was not ready to run local init. """ if self._local_init_op is not None: is_ready_for_local_init, msg = self._model_ready_for_local_init(sess) if is_ready_for_local_init: logging.info("Running local_init_op.") sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict, options=self._local_init_run_options) logging.info("Done running local_init_op.") return True, None else: return False, msg return True, None
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, duration_secs): self._start_time_secs = time.time() self._duration_secs = duration_secs
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testResidualWrapper(self): wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper x = ops.convert_to_tensor_v2_with_dispatch( np.array([[1., 1., 1.]]), dtype="float32") m = ops.convert_to_tensor_v2_with_dispatch( np.array([[0.1, 0.1, 0.1]]), dtype="float32") base_cell = rnn_cell_impl.GRUCell( 3, kernel_initializer=init_ops.constant_initializer(0.5), bias_initializer=init_ops.constant_initializer(0.5)) g, m_new = base_cell(x, m) wrapper_object = wrapper_type(base_cell) children = wrapper_object._trackable_children() wrapper_object.get_config() # Should not throw an error self.assertIn("cell", children) self.assertIs(children["cell"], base_cell) g_res, m_new_res = wrapper_object(x, m) self.evaluate([variables_lib.global_variables_initializer()]) res = self.evaluate([g, g_res, m_new, m_new_res]) # Residual connections self.assertAllClose(res[1], res[0] + [1., 1., 1.]) # States are left untouched self.assertAllClose(res[2], res[3])
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def residual_with_slice_fn(inp, out): inp_sliced = array_ops.slice(inp, [0, 0], [-1, 3]) return inp_sliced + out
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testDeviceWrapper(self): wrapper_type = rnn_cell_wrapper_v2.DeviceWrapper x = array_ops.zeros([1, 3]) m = array_ops.zeros([1, 3]) cell = rnn_cell_impl.GRUCell(3) wrapped_cell = wrapper_type(cell, "/cpu:0") children = wrapped_cell._trackable_children() wrapped_cell.get_config() # Should not throw an error self.assertIn("cell", children) self.assertIs(children["cell"], cell) outputs, _ = wrapped_cell(x, m) self.assertIn("cpu:0", outputs.device.lower())
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testWrapperKerasStyle(self, wrapper, wrapper_v2): """Tests if wrapper cell is instantiated in keras style scope.""" wrapped_cell_v2 = wrapper_v2(rnn_cell_impl.BasicRNNCell(1)) self.assertIsNone(getattr(wrapped_cell_v2, "_keras_style", None)) wrapped_cell = wrapper(rnn_cell_impl.BasicRNNCell(1)) self.assertFalse(wrapped_cell._keras_style)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testWrapperWeights(self, wrapper): """Tests that wrapper weights contain wrapped cells weights.""" base_cell = layers.SimpleRNNCell(1, name="basic_rnn_cell") rnn_cell = wrapper(base_cell) rnn_layer = layers.RNN(rnn_cell) inputs = ops.convert_to_tensor_v2_with_dispatch([[[1]]], dtype=dtypes.float32) rnn_layer(inputs) wrapper_name = generic_utils.to_snake_case(wrapper.__name__) expected_weights = ["rnn/" + wrapper_name + "/" + var for var in ("kernel:0", "recurrent_kernel:0", "bias:0")] self.assertLen(rnn_cell.weights, 3) self.assertCountEqual([v.name for v in rnn_cell.weights], expected_weights) self.assertCountEqual([v.name for v in rnn_cell.trainable_variables], expected_weights) self.assertCountEqual([v.name for v in rnn_cell.non_trainable_variables], []) self.assertCountEqual([v.name for v in rnn_cell.cell.weights], expected_weights)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testWrapperV2Caller(self, wrapper): """Tests that wrapper V2 is using the LayerRNNCell's caller.""" with legacy_base_layer.keras_style_scope(): base_cell = rnn_cell_impl.MultiRNNCell( [rnn_cell_impl.BasicRNNCell(1) for _ in range(2)]) rnn_cell = wrapper(base_cell) inputs = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32) state = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32) _ = rnn_cell(inputs, [state, state]) weights = base_cell._cells[0].weights self.assertLen(weights, expected_len=2) self.assertTrue(all("_wrapper" in v.name for v in weights))
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testWrapperV2Build(self, wrapper): cell = rnn_cell_impl.LSTMCell(10) wrapper = wrapper(cell) wrapper.build((1,)) self.assertTrue(cell.built)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testResidualWrapperSerialization(self): wrapper_cls = rnn_cell_wrapper_v2.ResidualWrapper cell = layers.LSTMCell(10) wrapper = wrapper_cls(cell) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertDictEqual(config, reconstructed_wrapper.get_config()) self.assertIsInstance(reconstructed_wrapper, wrapper_cls) wrapper = wrapper_cls(cell, residual_fn=lambda i, o: i + i + o) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) # Assert the reconstructed function will perform the math correctly. self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 4) def residual_fn(inputs, outputs): return inputs * 3 + outputs wrapper = wrapper_cls(cell, residual_fn=residual_fn) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) # Assert the reconstructed function will perform the math correctly. self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 5)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def dropout_state_filter_visitor(unused_state): return False
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def testDropoutWrapperWithKerasLSTMCell(self): wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper cell = layers.LSTMCell(10) with self.assertRaisesRegex(ValueError, "does not work with "): wrapper_cls(cell) cell = layers.LSTMCellV2(10) with self.assertRaisesRegex(ValueError, "does not work with "): wrapper_cls(cell)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, browser_type, target_os, supports_tab_control): super(PossibleBrowser, self).__init__(app_type=browser_type, target_os=target_os) self._supports_tab_control = supports_tab_control self._credentials_path = None
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def browser_type(self): return self.app_type
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def supports_tab_control(self): return self._supports_tab_control
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def Create(self, finder_options): raise NotImplementedError()
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def IsRemote(self): return False
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def UpdateExecutableIfNeeded(self): pass
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def implementor(rpc_code, blocking=False): """ RPC implementation function. """ return partial(_add_implementor, rpc_code, blocking)
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def rpc_bulk(orchestrator, audit_name, rpc_code, *arguments): # Get the implementor for the RPC code. # Raise NotImplementedError if it's not defined. try: method, blocking = rpcMap[rpc_code] except KeyError: raise NotImplementedError("RPC code not implemented: %r" % rpc_code) # This can't be done with blocking implementors! if blocking: raise NotImplementedError( "Cannot run blocking RPC calls in bulk. Code: %r" % rpc_code) # Prepare a partial function call to the implementor. caller = partial(method, orchestrator, audit_name) # Use the built-in map() function to issue all the calls. # This ensures we support the exact same interface and functionality. return map(caller, *arguments)
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def rpc_send_message(orchestrator, audit_name, message): # Enqueue the ACK message. orchestrator.enqueue_msg(message)
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def __init__(self, orchestrator): """ :param orchestrator: Orchestrator instance. :type orchestrator: Orchestrator """ # Keep a reference to the Orchestrator. self.__orchestrator = orchestrator # Keep a reference to the global RPC map (it's faster this way). self.__rpcMap = rpcMap # Check all RPC messages have been mapped at this point. missing = MSG_RPC_CODES.difference(self.__rpcMap.keys()) if missing: msg = "Missing RPC implementors for codes: %s" msg %= ", ".join(str(x) for x in sorted(missing)) raise SyntaxError(msg)
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def orchestrator(self): """ :returns: Orchestrator instance. :rtype: Orchestrator """ return self.__orchestrator
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def execute_rpc(self, audit_name, rpc_code, response_queue, args, kwargs): """ Honor a remote procedure call request from a plugin. :param audit_name: Name of the audit requesting the call. :type audit_name: str :param rpc_code: RPC code. :type rpc_code: int :param response_queue: Response queue identity. :type response_queue: str :param args: Positional arguments to the call. :type args: tuple :param kwargs: Keyword arguments to the call. :type kwargs: dict """ try: # Get the implementor for the RPC code. # Raise NotImplementedError if it's not defined. try: target, blocking = self.__rpcMap[rpc_code] except KeyError: raise NotImplementedError( "RPC code not implemented: %r" % rpc_code) # If it's a blocking call... if blocking: # Run the implementor in a new thread. thread = Thread( target = self._execute_rpc_implementor_background, args = ( Config._context, audit_name, target, response_queue, args, kwargs), ) thread.daemon = True thread.start() # If it's a non-blocking call... else: # Call the implementor directly. self.execute_rpc_implementor( audit_name, target, response_queue, args, kwargs) # Catch exceptions and send them back. except Exception: if response_queue: error = self.prepare_exception(*sys.exc_info()) try: self.orchestrator.messageManager.send( response_queue, (False, error)) except IOError: import warnings warnings.warn("RPC caller died!") pass
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def _execute_rpc_implementor_background(self, context, audit_name, target, response_queue, args, kwargs): """ Honor a remote procedure call request from a plugin, from a background thread. Must only be used as the entry point for said background thread! :param context: Plugin execution context. :type context: PluginContext :param audit_name: Name of the audit requesting the call. :type audit_name: str :param target: RPC implementor function. :type target: callable :param response_queue: Response queue identity. :type response_queue: str :param args: Positional arguments to the call. :type args: tuple :param kwargs: Keyword arguments to the call. :type kwargs: dict """ Config._context = context self.execute_rpc_implementor( audit_name, target, response_queue, args, kwargs)
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def execute_rpc_implementor(self, audit_name, target, response_queue, args, kwargs): """ Honor a remote procedure call request from a plugin. :param audit_name: Name of the audit requesting the call. :type audit_name: str :param target: RPC implementor function. :type target: callable :param response_queue: Response queue identity. :type response_queue: str :param args: Positional arguments to the call. :type args: tuple :param kwargs: Keyword arguments to the call. :type kwargs: dict """ try: # Call the implementor and get the response. response = target(self.orchestrator, audit_name, *args, **kwargs) success = True # Catch exceptions and prepare them for sending. except Exception: if response_queue: response = self.prepare_exception(*sys.exc_info()) success = False # If the call was synchronous, # send the response/error back to the plugin. if response_queue: self.orchestrator.messageManager.send( response_queue, (success, response))
golismero/golismero
[ 814, 275, 814, 29, 1375181550 ]
def __init__(self, **kwargs): self._callback = kwargs.pop('callback')
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name = ET.SubElement(standard, "name") name.text = kwargs.pop('name') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id = ET.SubElement(seq, "seq-id") seq_id.text = kwargs.pop('seq_id') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') action = ET.SubElement(seq, "action") action.text = kwargs.pop('action') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_host_any_sip = ET.SubElement(seq, "src-host-any-sip") src_host_any_sip.text = kwargs.pop('src_host_any_sip') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_host_ip = ET.SubElement(seq, "src-host-ip") src_host_ip.text = kwargs.pop('src_host_ip') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_mask = ET.SubElement(seq, "src-mask") src_mask.text = kwargs.pop('src_mask') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') count = ET.SubElement(seq, "count") callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std") seq = ET.SubElement(hide_ip_acl_std, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') log = ET.SubElement(seq, "log") callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name = ET.SubElement(extended, "name") name.text = kwargs.pop('name') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id = ET.SubElement(seq, "seq-id") seq_id.text = kwargs.pop('seq_id') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') action = ET.SubElement(seq, "action") action.text = kwargs.pop('action') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') protocol_type = ET.SubElement(seq, "protocol-type") protocol_type.text = kwargs.pop('protocol_type') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_host_any_sip = ET.SubElement(seq, "src-host-any-sip") src_host_any_sip.text = kwargs.pop('src_host_any_sip') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_host_ip = ET.SubElement(seq, "src-host-ip") src_host_ip.text = kwargs.pop('src_host_ip') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') src_mask = ET.SubElement(seq, "src-mask") src_mask.text = kwargs.pop('src_mask') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport = ET.SubElement(seq, "sport") sport.text = kwargs.pop('sport') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp") sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp") sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp") sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp") sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list") ip = ET.SubElement(ip_acl, "ip") access_list = ET.SubElement(ip, "access-list") extended = ET.SubElement(access_list, "extended") name_key = ET.SubElement(extended, "name") name_key.text = kwargs.pop('name') hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext") seq = ET.SubElement(hide_ip_acl_ext, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp") sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp') callback = kwargs.pop('callback', self._callback) return callback(config)
BRCDcomm/pynos
[ 16, 8, 16, 2, 1437520628 ]