function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def _getPythonVersionFromPath(self, pathName, destdir): if destdir and pathName.startswith(destdir): pathName = pathName[len(destdir):] pathList = pathName.split('/') for dirName in pathList: if dirName.startswith('python') and self._stringIsPythonVersion( dirName[6:]): # python2.4 or python2.5 or python3.9 but not python.so return dirName return ''
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _isJava(self, m, contents=None): return m and isinstance(m, (magic.jar, magic.java)) and self._hasContents(m, contents)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _isPerl(self, path, m, f): return self._isPerlModule(path) or ( f.inode.perms() & 0111 and m and m.name == 'script' and 'interpreter' in m.contents and '/bin/perl' in m.contents['interpreter'])
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addDepToMap(self, path, depMap, depType, dep): "Add a single dependency to a map, regardless of whether path was listed before" if path not in depMap: depMap[path] = deps.DependencySet() depMap[path].addDep(depType, dep)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _recurseSymlink(path, destdir, fullpath=None): """ Recurse through symlinks in destdir and get the final path and fullpath. If initial fullpath (or destdir+path if fullpath not specified) does not exist, return path. """ if fullpath is None: fullpath = destdir + path while os.path.islink(fullpath): contents = os.readlink(fullpath) if contents.startswith('/'): fullpath = os.path.normpath(contents) else: fullpath = os.path.normpath( os.path.dirname(fullpath)+'/'+contents) return fullpath[len(destdir):], fullpath
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _enforceProvidedPath(self, path, fileType='interpreter', unmanagedError=False): key = path, fileType if key in self.cachedProviders: return self.cachedProviders[key] db = self._getDb() troveNames = [ x.getName() for x in db.iterTrovesByPath(path) ] if not troveNames: talk = {True: self.error, False: self.warn}[bool(unmanagedError)] talk('%s file %s not managed by conary' %(fileType, path)) return None troveName = sorted(troveNames)[0] # prefer corresponding :devel to :devellib if it exists package, component = troveName.split(':', 1) if component in ('devellib', 'lib'): for preferredComponent in ('devel', 'devellib'): troveSpec = ( ':'.join((package, preferredComponent)), None, None ) results = db.findTroves(None, [troveSpec], allowMissing = True) if troveSpec in results: troveName = results[troveSpec][0][0] break if troveName not in self.recipe._getTransitiveBuildRequiresNames(): self.recipe.reportMissingBuildRequires(troveName) self.cachedProviders[key] = troveName return troveName
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getRubyLoadPath(self, macros, rubyInvocation, bootstrap): # Returns tuple of (invocationString, loadPathList) destdir = macros.destdir if bootstrap: rubyLibPath = [destdir + x for x in self.bootstrapRubyLibs] rubyInvocation = (('LD_LIBRARY_PATH=%(destdir)s%(libdir)s ' 'RUBYLIB="'+':'.join(rubyLibPath)+'" ' +rubyInvocation)%macros) rubyLoadPath = util.popen( "%s -e 'puts $:'" % rubyInvocation).readlines() # get gem dir if rubygems is installed if os.access('%(bindir)s/gem' %macros, os.X_OK): rubyLoadPath.extend( util.popen("%s -rubygems -e 'puts Gem.default_dir'" % rubyInvocation).readlines()) rubyLoadPath = [ x.strip() for x in rubyLoadPath if x.startswith('/') ] loadPathList = rubyLoadPath[:] if bootstrap: rubyLoadPath = [ destdir+x for x in rubyLoadPath ] rubyInvocation = ('LD_LIBRARY_PATH=%(destdir)s%(libdir)s' ' RUBYLIB="'+':'.join(rubyLoadPath)+'"' ' %(destdir)s/%(ruby)s') % macros return (rubyInvocation, loadPathList)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getRubyFlagsFromPath(self, pathName, rubyVersion): pathList = pathName.split('/') pathList = [ x for x in pathList if x ] foundLib = False foundVer = False flags = set() for dirName in pathList: if not foundLib and dirName.startswith('lib'): foundLib = True flags.add(dirName) elif not foundVer and dirName.split('.')[:1] == rubyVersion.split('.')[:1]: # we only compare major and minor versions due to # ruby api version (dirName) differing from programs # version (rubyVersion) foundVer = True flags.add(dirName) if foundLib and foundVer: break return flags
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getperlincpath(self, perl, destdir): """ Fetch the perl @INC path, falling back to bootstrapPerlIncPath only if perl cannot be run. All elements of the search path will be resolved against symlinks in destdir if they exist. (CNY-2949) """ if not perl: return [] p = util.popen(r"""%s -e 'print join("\n", @INC)'""" %perl) perlIncPath = p.readlines() # make sure that the command completed successfully try: rc = p.close() perlIncPath = [x.strip() for x in perlIncPath if not x.startswith('.')] return [self._recurseSymlink(x, destdir)[0] for x in perlIncPath] except RuntimeError: return [self._recurseSymlink(x, destdir)[0] for x in self.bootstrapPerlIncPath]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _perlDestInc(destdir, perlDestInc): return ' '.join(['-I' + destdir + x for x in perlDestInc])
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getPython(self, macros, path): """ Takes a path Returns, for that path, a tuple of - the preferred instance of python to use - whether that instance is in the destdir """ m = self.recipe.magic[path] if m and m.name == 'script' and 'python' in m.contents['interpreter']: pythonPath = [m.contents['interpreter']] else: pythonVersion = self._getPythonVersionFromPath(path, None) # After PATH, fall back to %(bindir)s. If %(bindir)s should be # preferred, it needs to be earlier in the PATH. Include # unversioned python as a last resort for confusing cases. shellPath = os.environ.get('PATH', '').split(':') + [ '%(bindir)s' ] pythonPath = [] if pythonVersion: pythonPath = [ os.path.join(x, pythonVersion) for x in shellPath ] pythonPath.extend([ os.path.join(x, 'python') for x in shellPath ]) for pathElement in pythonPath: pythonDestPath = ('%(destdir)s'+pathElement) %macros if os.access(pythonDestPath, os.X_OK): return (pythonDestPath, True) for pathElement in pythonPath: pythonDestPath = pathElement %macros if os.access(pythonDestPath, os.X_OK): self._enforceProvidedPath(pythonDestPath) return (pythonDestPath, False) # Specified python not found on system (usually because of # bad interpreter path -- CNY-2050) if len(pythonPath) == 1: missingPythonPath = '%s ' % pythonPath[0] else: missingPythonPath = '' self.warn('Python interpreter %snot found for %s', missingPythonPath, path) return (None, None)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): _dependency.__init__(self, *args, **keywords) self.provisions = [] self.sonameSubtrees = set() self.sysPath = None self.monodisPath = None self.rubyInterpreter = None self.rubyVersion = None self.rubyInvocation = None self.rubyLoadPath = None self.perlIncPath = None self.pythonSysPathMap = {} self.exceptDeps = [] policy.Policy.__init__(self, *args, **keywords) self.depCache = self.dbDepCacheClass(self._getDb())
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def preProcess(self): macros = self.macros if self.bootstrapPythonFlags is not None: self.bootstrapPythonFlags = set(x % macros for x in self.bootstrapPythonFlags) if self.bootstrapSysPath: self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath] if self.pythonFlagNamespace is not None: self.pythonFlagNamespace = self.pythonFlagNamespace % macros if self.bootstrapPerlIncPath: self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath] self.rootdir = self.rootdir % macros self.fileFilters = [] self.binDirs = frozenset( x % macros for x in [ '%(bindir)s', '%(sbindir)s', '%(essentialbindir)s', '%(essentialsbindir)s', '%(libexecdir)s', ]) self.noProvDirs = frozenset( x % macros for x in [ '%(testdir)s', '%(debuglibdir)s', ]).union(self.binDirs) exceptDeps = [] for fE, rE in self.exceptDeps: try: exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % self.macros))) except sre_constants.error, e: self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e) self.exceptDeps= exceptDeps for filespec, provision in self.provisions: self.fileFilters.append( (filter.Filter(filespec, macros), provision % macros)) del self.provisions _dependency.preProcess(self)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def whiteOut(self, path, pkgFiles): # remove intentionally discarded provides for pkg, f in pkgFiles: if self.exceptDeps and path in pkg.providesMap: depSet = deps.DependencySet() for depClass, dep in pkg.providesMap[path].iterDeps(): for filt, exceptRe in self.exceptDeps: if filt.match(path): matchName = '%s: %s' %(depClass.tagName, str(dep)) if exceptRe.match(matchName): # found one to not copy dep = None break if dep is not None: depSet.addDep(depClass, dep) pkg.providesMap[path] = depSet
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def addPathDeps(self, path, dirpath, pkgFiles): # Because paths can change, individual files do not provide their # paths. However, within a trove, a file does provide its name. # Furthermore, non-regular files can be path dependency targets # Therefore, we have to handle this case a bit differently. for pkg, f in pkgFiles: if dirpath in self.binDirs and not isinstance(f, files.Directory): # CNY-930: automatically export paths in bindirs # CNY-1721: but not directories in bindirs f.flags.isPathDependencyTarget(True) if f.flags.isPathDependencyTarget(): pkg.provides.addDep(deps.FileDependencies, deps.Dependency(path))
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getELFinfo(self, m, soname): if 'provides' in m.contents and m.contents['provides']: return m.contents['provides'] else: # we need to synthesize some provides information return [('soname', soname, ())]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getPythonProvidesSysPath(self, path): """Generate an ordered list of python paths for the target package. This includes the current system path, plus any paths added by the new package in the destdir through .pth files or a newly built python. @return: (sysPath, pythonVersion) """ pythonPath, bootstrapPython = self._getPython(self.macros, path) if not pythonPath: # Most likely bad interpreter path in a .py file return (None, None) if pythonPath in self.pythonSysPathMap: return self.pythonSysPathMap[pythonPath] destdir = self.macros.destdir libdir = self.macros.libdir pythonVersion = self._getPythonVersion(pythonPath, destdir, libdir) # Get default sys.path from python interpreter, either the one just # built (in the case of a python bootstrap) or from the system. systemPaths = set(self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=False)) # Now add paths from the destdir's site-packages, typically due to # newly installed .pth files. systemPaths.update(self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=True)) # Sort in descending order so that the longest path matches first. sysPath = sorted(self._stripDestDir(systemPaths, destdir), reverse=True) self.pythonSysPathMap[pythonPath] = (sysPath, pythonVersion) return self.pythonSysPathMap[pythonPath]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addPythonProvides(self, path, m, pkgFiles, macros): if not self._isPythonModuleCandidate(path): return sysPath, pythonVersion = self._getPythonProvidesSysPath(path) if not sysPath: return # Add provides for every match in sys.path. For example, PIL.Imaging # and Imaging should both be provided since they are both reachable # names. for sysPathEntry in sysPath: if not path.startswith(sysPathEntry): continue newDepPath = path[len(sysPathEntry)+1:] if newDepPath.split('.')[0] == '__init__': # we don't allow bare __init__ as a python import # hopefully we'll find this init as a deeper import at some # other point in the sysPath continue elif ('site-packages' in newDepPath or 'lib-dynload' in newDepPath or 'plat-linux' in newDepPath ): # site-packages should be specifically excluded since both it # and its parent are always in sys.path. However, invalid # python package names in general are allowed due to certain # cases where relative imports happen inside a hyphenated # directory and the requires detector picks up on that. continue # Note that it's possible to have a false positive here. For # example, in the PIL case if PIL/__init__.py did not exist, # PIL.Imaging would still be provided. The odds of this causing # problems are so small that it is not checked for here. self._addPythonProvidesSingle(path, m, pkgFiles, macros, newDepPath)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addOneCILProvide(self, pkgFiles, path, name, ver): for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.CILDependencies, deps.Dependency(name, [(ver, deps.FLAG_SENSE_REQUIRED)]))
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addCILProvides(self, path, m, pkgFiles, macros): if not m or m.name != 'CIL': return fullpath = macros.destdir + path if not self.monodisPath: self.monodisPath = self._getmonodis(macros, path) if not self.monodisPath: return p = util.popen('%s --assembly %s' %( self.monodisPath, fullpath)) name = None ver = None for line in [ x.strip() for x in p.readlines() ]: if 'Name:' in line: name = line.split()[1] elif 'Version:' in line: ver = line.split()[1] p.close() # monodis did not give us any info if not name or not ver: return self._addOneCILProvide(pkgFiles, path, name, ver)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addRubyProvides(self, path, m, pkgFiles, macros, prov): flags = self._getRubyFlagsFromPath(path, self.rubyVersion) flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in sorted(list(flags))] dep = deps.Dependency(prov, flags) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.RubyDependencies, dep)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addPerlProvides(self, path, m, pkgFiles): # do not call perl to get @INC unless we have something to do for perl self._fetchPerlIncPath() # It is possible that we'll want to allow user-specified # additions to the perl search path, but if so, we need # to path-encode those files, so we can't just prepend # those elements to perlIncPath. We would need to end up # with something like "perl: /path/to/foo::bar" because # for perl scripts that don't modify @INC, they could not # find those scripts. It is not clear that we need this # at all, because most if not all of those cases would be # intra-package dependencies that we do not want to export. depPath = None for pathPrefix in self.perlIncPath: if path.startswith(pathPrefix): depPath = path[len(pathPrefix)+1:] break if depPath is None: return # foo/bar/baz.pm -> foo::bar::baz prov = '::'.join(depPath.split('/')).rsplit('.', 1)[0] dep = deps.Dependency(prov, []) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.PerlDependencies, dep)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): _dependency.__init__(self, *args, **keywords) self.bootstrapPythonFlags = set() self.bootstrapSysPath = [] self.bootstrapPerlIncPath = [] self.bootstrapRubyLibs = [] self.pythonFlagNamespace = None self.sonameSubtrees = set() self._privateDepMap = {} self.rpathFixup = [] self.exceptDeps = [] self.sysPath = None self.monodisPath = None self.rubyInterpreter = None self.rubyVersion = None self.rubyInvocation = None self.rubyLoadPath = None self.perlReqs = None self.perlPath = None self.perlIncArgs = None self._CILPolicyProvides = {} self.pythonSysPathMap = {} self.pythonModuleFinderMap = {} self.troveDeps = {} policy.Policy.__init__(self, *args, **keywords) self.depCache = self.dbDepCacheClass(self._getDb()) ISD = deps.InstructionSetDependency TISD = deps.TargetInstructionSetDependency instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD)) instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD)) self.allowableIsnSets = [ x.name for x in instructionDeps ]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def preProcess(self): macros = self.macros self.systemLibPaths = set(os.path.normpath(x % macros) for x in self.sonameSubtrees) self.bootstrapPythonFlags = set(x % macros for x in self.bootstrapPythonFlags) self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath] if self.pythonFlagNamespace is not None: self.pythonFlagNamespace = self.pythonFlagNamespace % macros self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath] # anything that any buildreqs have caused to go into ld.so.conf # or ld.so.conf.d/*.conf is a system library by definition, # but only look at paths, not (for example) "include" lines if os.path.exists('/etc/ld.so.conf'): self.systemLibPaths |= set(os.path.normpath(x.strip()) for x in file('/etc/ld.so.conf').readlines() if x.startswith('/')) for fileName in fixedglob.glob('/etc/ld.so.conf.d/*.conf'): self.systemLibPaths |= set(os.path.normpath(x.strip()) for x in file(fileName).readlines() if x.startswith('/')) self.rpathFixup = [(filter.Filter(x, macros), y % macros) for x, y in self.rpathFixup] exceptDeps = [] for fE, rE in self.exceptDeps: try: exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % macros))) except sre_constants.error, e: self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e) self.exceptDeps= exceptDeps _dependency.preProcess(self)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def doFile(self, path): pkgs = self.recipe.autopkg.findComponents(path) if not pkgs: return pkgFiles = [(x, x.getFile(path)) for x in pkgs] # this file object used only for tests, not for doing packaging f = pkgFiles[0][1] macros = self.recipe.macros fullpath = macros.destdir + path m = self.recipe.magic[path] if self._isELF(m, 'requires'): isnset = m.contents['isnset'] if isnset in self.allowableIsnSets: # only add requirements for architectures # that we are actually building for (this may include # major and minor architectures) self._addELFRequirements(path, m, pkgFiles) # now go through explicit requirements for info in self.included: for filt in self.included[info]: if filt.match(path): self._markManualRequirement(info, path, pkgFiles, m) # now check for automatic dependencies besides ELF if f.inode.perms() & 0111 and m and m.name == 'script': interp = m.contents['interpreter'] if interp.strip().startswith('/') and self._checkInclusion(interp, path): # no interpreter string warning is in BadInterpreterPaths if not (os.path.exists(interp) or os.path.exists(macros.destdir+interp)): # this interpreter not on system, warn # cannot be an error to prevent buildReq loops self.warn('interpreter "%s" (referenced in %s) missing', interp, path) # N.B. no special handling for /{,usr/}bin/env here; # if there has been an exception to # NormalizeInterpreterPaths, then it is a # real dependency on the env binary self._addRequirement(path, interp, [], pkgFiles, deps.FileDependencies) if (f.inode.perms() & 0111 and m and m.name == 'script' and os.path.basename(m.contents['interpreter']).startswith('python')): self._addPythonRequirements(path, fullpath, pkgFiles) elif self._isPython(path): self._addPythonRequirements(path, fullpath, pkgFiles) if (f.inode.perms() & 0111 and m and m.name == 'script' and os.path.basename(m.contents['interpreter']).startswith('ruby')): self._addRubyRequirements(path, fullpath, pkgFiles, script=True) elif '/ruby/' in path and path.endswith('.rb'): self._addRubyRequirements(path, fullpath, pkgFiles, script=False) if self._isCIL(m): if not self.monodisPath: self.monodisPath = self._getmonodis(macros, path) if not self.monodisPath: return p = util.popen('%s --assemblyref %s' %( self.monodisPath, fullpath)) for line in [ x.strip() for x in p.readlines() ]: if ': Version=' in line: ver = line.split('=')[1] elif 'Name=' in line: name = line.split('=')[1] self._addRequirement(path, name, [ver], pkgFiles, deps.CILDependencies) p.close() elif self.CILPolicyRE.match(path): name, ver = self._CILPolicyProvides[path] self._addRequirement(path, name, [ver], pkgFiles, deps.CILDependencies) if self._isJava(m, 'requires'): self._addJavaRequirements(path, m, pkgFiles) db = self._getDb() if self._isPerl(path, m, f): perlReqs = self._getPerlReqs(path, fullpath) for req in perlReqs: thisReq = deps.parseDep('perl: ' + req) if db.getTrovesWithProvides([thisReq]) or [ x for x in self.recipe.autopkg.getComponents() if x.provides.satisfies(thisReq)]: self._addRequirement(path, req, [], pkgFiles, deps.PerlDependencies) self.whiteOut(path, pkgFiles) self.unionDeps(path, pkgFiles)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def whiteOut(self, path, pkgFiles): # remove intentionally discarded dependencies for pkg, _ in pkgFiles: if self.exceptDeps and path in pkg.requiresMap: depSet = deps.DependencySet() for depClass, dep in pkg.requiresMap[path].iterDeps(): for filt, exceptRe in self.exceptDeps: if filt.match(path): matchName = '%s: %s' %(depClass.tagName, str(dep)) if exceptRe.match(matchName): # found one to not copy dep = None break if dep is not None: depSet.addDep(depClass, dep) pkg.requiresMap[path] = depSet
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addELFRequirements(self, path, m, pkgFiles): """ Add ELF and abi dependencies, including paths when not shlibs """ def appendUnique(ul, items): for item in items: if item not in ul: ul.append(item) def _canonicalRPATH(rpath, glob=False): # normalize all elements of RPATH l = [ util.normpath(x) for x in rpath.split(':') ] # CNY-3425 # prune system paths and relative paths from RPATH l = [ x for x in l if x not in self.systemLibPaths and x.startswith('/') ] if glob: destdir = self.macros.destdir dlen = len(destdir) gl = [] for item in l: # prefer destdir elements paths = util.braceGlob(destdir + item) paths = [ os.path.normpath(x[dlen:]) for x in paths ] appendUnique(gl, paths) # then look on system paths = util.braceGlob(item) paths = [ os.path.normpath(x) for x in paths ] appendUnique(gl, paths) l = gl return l rpathList = [] def _findSonameInRpath(soname): for rpath in rpathList: destpath = '/'.join((self.macros.destdir, rpath, soname)) if os.path.exists(destpath): return rpath destpath = '/'.join((rpath, soname)) if os.path.exists(destpath): return rpath # didn't find anything return None # fixup should come first so that its path elements can override # the included RPATH if necessary if self.rpathFixup: for f, rpath in self.rpathFixup: if f.match(path): # synthetic RPATH items are globbed rpathList = _canonicalRPATH(rpath, glob=True) break if m and 'RPATH' in m.contents and m.contents['RPATH']: rpathList += _canonicalRPATH(m.contents['RPATH']) depSet = self._createELFDepSet(m, m.contents['requires'], libPathMap=self._privateDepMap, getRPATH=_findSonameInRpath, path=path, isProvides=False) for pkg, _ in pkgFiles: self._addDepSetToMap(path, pkg.requiresMap, depSet)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getPythonRequiresModuleFinder(self, pythonPath, destdir, libdir, sysPath, bootstrapPython): if self.recipe.isCrossCompiling(): return None if pythonPath not in self.pythonModuleFinderMap: try: self.pythonModuleFinderMap[pythonPath] = pydeps.moduleFinderProxy(pythonPath, destdir, libdir, sysPath, self.error) except pydeps.ModuleFinderInitializationError, e: if bootstrapPython: # another case, like isCrossCompiling, where we cannot # run pythonPath -- ModuleFinderInitializationError # is raised before looking at any path, so should # be consistent for any pythonPath self.pythonModuleFinderMap[pythonPath] = None else: raise return self.pythonModuleFinderMap[pythonPath]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addPythonRequirements(self, path, fullpath, pkgFiles): destdir = self.recipe.macros.destdir destDirLen = len(destdir) (sysPath, pythonModuleFinder, pythonVersion )= self._getPythonRequiresSysPath(path) if not sysPath: # Probably a bad interpreter path return if not pythonModuleFinder: # We cannot (reliably) determine runtime python requirements # in the cross-compile case, so don't even try (for # consistency). return pythonModuleFinder.load_file(fullpath) data = pythonModuleFinder.getDepsForPath(fullpath) if data['result'] != 'ok': self.info('File %s is not a valid python file', path) return for depPath in data['paths']: if not depPath: continue flags = None absPath = None if depPath.startswith(destdir): depPath = depPath[destDirLen:] flags = self._getPythonFlagsFromPath(depPath) # The file providing this dependency is part of this package. absPath = depPath for sysPathEntry in sysPath: if depPath.startswith(sysPathEntry): newDepPath = depPath[len(sysPathEntry)+1:] if newDepPath not in ('__init__', '__init__.py'): # we don't allow bare __init__'s as dependencies. # hopefully we'll find this at deeper level in # in the sysPath if flags is None: # this is provided by the system, so we have # to see with which flags it is provided with flags = self._getPythonFlags(depPath, self.bootstrapPythonFlags) depPath = newDepPath break if depPath.startswith('/'): # a python file not found in sys.path will not have been # provided, so we must not depend on it either return if not (depPath.endswith('.py') or depPath.endswith('.pyc') or depPath.endswith('.so')): # Not something we provide, so not something we can # require either. Drop it and go on. We have seen # this when a script in /usr/bin has ended up in the # requires list. continue if depPath.endswith('module.so'): # Strip 'module.so' from the end, make it a candidate cands = [ depPath[:-9] + '.so', depPath ] cands = [ self._normalizePythonDep(x) for x in cands ] if absPath: depName = self._checkPackagePythonDeps(pkgFiles, absPath, cands, flags) else: depName = self._checkSystemPythonDeps(cands, flags) else: depName = self._normalizePythonDep(depPath) if depName == '__future__': continue self._addRequirement(path, depName, flags, pkgFiles, deps.PythonDependencies) #if data['missing']: # self.warn("Python file %s is missing requirements: %s" % ( # path, ', '.join(data['missing'])))
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _checkSystemPythonDeps(self, depNames, flags): if flags: flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] for dp in depNames: depSet = deps.DependencySet() depSet.addDep(deps.PythonDependencies, deps.Dependency(dp, flags)) troves = self.depCache.getProvides([depSet]) if troves: return dp return depNames[-1]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _addRubyRequirements(self, path, fullpath, pkgFiles, script=False): macros = self.recipe.macros destdir = macros.destdir destDirLen = len(destdir) if self.rubyInterpreter is None: self.rubyInterpreter, bootstrap = self._getRuby(macros, path) if not self.rubyInterpreter: return self.rubyInvocation, self.rubyLoadPath = self._getRubyLoadPath( macros, self.rubyInterpreter, bootstrap) self.rubyVersion = self._getRubyVersion(macros) elif self.rubyInterpreter is False: return if not script: if not util.isregular(fullpath) or os.path.islink(fullpath): return foundInLoadPath = False for pathElement in self.rubyLoadPath: if path.startswith(pathElement): foundInLoadPath = True break if not foundInLoadPath: return # This is a very limited hack, but will work for the 90% case # better parsing may be written later # Note that we only honor "require" at the beginning of # the line and only requirements enclosed in single quotes # to avoid conditional requirements and requirements that # do any sort of substitution. Because most ruby packages # contain multiple ruby modules, getting 90% of the ruby # dependencies will find most of the required packages in # practice depEntries = [x.strip() for x in file(fullpath) if x.startswith('require ') or x.startswith('require(')] depEntries = (x.split() for x in depEntries) depEntries = (x[1].strip("\"'") for x in depEntries if len(x) == 2 and x[1].startswith("'") and x[1].endswith("'")) depEntries = set(depEntries) # I know of no way to ask ruby to report deps from scripts # Unfortunately, so far it seems that there are too many # Ruby modules which have code that runs in the body; this # code runs slowly, has not been useful in practice for # filtering out bogus dependencies, and has been hanging # and causing other unintended side effects from modules # that have code in the main body. #if not script: # depClosure = util.popen(r'''%s -e "require '%s'; puts $\""''' # %(self.rubyInvocation%macros, fullpath)).readlines() # depClosure = set([x.split('.')[0] for x in depClosure]) # # remove any entries from the guessed immediate requirements # # that are not in the closure # depEntries = set(x for x in depEntries if x in depClosure) def _getDepEntryPath(depEntry): for prefix in (destdir, ''): for pathElement in self.rubyLoadPath: for suffix in ('.rb', '.so'): candidate = util.searchPath( os.path.basename(depEntry) + suffix, prefix + pathElement, ) if candidate: return candidate return None for depEntry in depEntries: depEntryPath = _getDepEntryPath(depEntry) if depEntryPath is None: continue if depEntryPath.startswith(destdir): depPath = depEntryPath[destDirLen:] else: depPath = depEntryPath flags = self._getRubyFlagsFromPath(depPath, self.rubyVersion) self._addRequirement(path, depEntry, flags, pkgFiles, deps.RubyDependencies)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _getPerlReqs(self, path, fullpath): if self.perlReqs is None: self._fetchPerl() if not self.perlPath: # no perl == bootstrap, but print warning self.info('Unable to find perl interpreter,' ' disabling perl: requirements') self.perlReqs = False return [] # get the base directory where conary lives. In a checked # out version, this would be .../conary/conary/build/package.py # chop off the last 3 directories to find where # .../conary/Scandeps and .../conary/scripts/perlreqs.pl live basedir = '/'.join(sys.modules[__name__].__file__.split('/')[:-3]) scandeps = '/'.join((basedir, 'conary/ScanDeps')) if (os.path.exists(scandeps) and os.path.exists('%s/scripts/perlreqs.pl' % basedir)): perlreqs = '%s/scripts/perlreqs.pl' % basedir else: # we assume that conary is installed in # $prefix/$libdir/python?.?/site-packages. Use this # assumption to find the prefix for # /usr/lib/conary and /usr/libexec/conary regexp = re.compile(r'(.*)/lib(64){0,1}/python[1-9].[0-9]/site-packages') match = regexp.match(basedir) if not match: # our regexp didn't work. fall back to hardcoded # paths prefix = '/usr' else: prefix = match.group(1) # ScanDeps is not architecture specific scandeps = '%s/lib/conary/ScanDeps' %prefix if not os.path.exists(scandeps): # but it might have been moved to lib64 for multilib scandeps = '%s/lib64/conary/ScanDeps' %prefix perlreqs = '%s/libexec/conary/perlreqs.pl' %prefix self.perlReqs = '%s -I%s %s %s' %( self.perlPath, scandeps, self.perlIncArgs, perlreqs) if self.perlReqs is False: return [] cwd = os.getcwd() os.chdir(os.path.dirname(fullpath)) try: p = os.popen('%s %s' %(self.perlReqs, fullpath)) finally: try: os.chdir(cwd) except: pass reqlist = [x.strip().split('//') for x in p.readlines()] # make sure that the command completed successfully rc = p.close() if rc: # make sure that perl didn't blow up assert(os.WIFEXITED(rc)) # Apparantly ScanDeps could not handle this input return [] # we care only about modules right now # throwing away the filenames for now, but we might choose # to change that later reqlist = [x[2] for x in reqlist if x[0] == 'module'] # foo/bar/baz.pm -> foo::bar::baz reqlist = ['::'.join(x.split('/')).rsplit('.', 1)[0] for x in reqlist] return reqlist
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _checkInclusion(self, info, path): if info in self.excluded: for filt in self.excluded[info]: # exception handling is per-requirement, # so handled specially if filt.match(path): self.info('ignoring requirement match for %s: %s', path, info) return False return True
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def preProcess(self): # We want to inherit the exceptions from the Requires class, so we # need to peek into the Required policy object. We can still pass # explicit exceptions into the pluggable sub-policies, and they will # only apply to the sub-policy. exceptions = self.recipe._policyMap['Requires'].exceptions if exceptions: Requires.updateArgs(self, exceptions=exceptions, allowUnusedFilters = True) Requires.preProcess(self)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def error(self, *args, **kwargs): return self.recipe._policyMap['Requires'].error(*args, **kwargs)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def info(self, *args, **kwargs): return self.recipe._policyMap['Requires'].info(*args, **kwargs)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def doFile(self, path): pkgs = self.recipe.autopkg.findComponents(path) if not pkgs: return pkgFiles = [(x, x.getFile(path)) for x in pkgs] macros = self.recipe.macros fullpath = macros.destdir + path self.addPluggableRequirements(path, fullpath, pkgFiles, macros) self.whiteOut(path, pkgFiles) self.unionDeps(path, pkgFiles)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def do(self): if use.Use.bootstrap._get(): return for comp in self.recipe.autopkg.getComponents(): comp.requires -= comp.provides
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def preProcess(self): self.libRe = re.compile( '^(%(libdir)s' '|/%(lib)s' '|%(x11prefix)s/%(lib)s' '|%(krbprefix)s/%(lib)s)(/|$)' %self.recipe.macros) self.libReException = re.compile('^/usr/(lib|%(lib)s)/(python|ruby).*$') self.baseIsnset = use.Arch.getCurrentArch()._name self.baseArchFlavor = use.Arch.getCurrentArch()._toDependency() self.archFlavor = use.createFlavor(None, use.Arch._iterUsed()) self.packageFlavor = deps.Flavor() self.troveMarked = False self.componentMap = self.recipe.autopkg.componentMap ISD = deps.InstructionSetDependency TISD = deps.TargetInstructionSetDependency instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD)) instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD)) self.allowableIsnSets = [ x.name for x in instructionDeps ]
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def hasLibInPath(self, path): return self.libRe.match(path) and not self.libReException.match(path)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def doFile(self, path): autopkg = self.recipe.autopkg pkg = autopkg.findComponent(path) if pkg is None: return f = pkg.getFile(path) m = self.recipe.magic[path] if m and m.name == 'ELF' and 'isnset' in m.contents: isnset = m.contents['isnset'] elif self.hasLibInPath(path) or self.hasLibInDependencyFlag(path, f): # all possible paths in a %(lib)s-derived path get default # instruction set assigned if they don't have one already if f.hasContents: isnset = self.baseIsnset else: # this file can't be marked by arch, but the troves # and package must be. (e.g. symlinks and empty directories) # we don't need to union in the base arch flavor more # than once. if self.troveMarked: return self.packageFlavor.union(self.baseArchFlavor) self.troveMarked = True return else: return flv = deps.Flavor() flv.addDep(deps.InstructionSetDependency, deps.Dependency(isnset, [])) # get the Arch.* dependencies # set the flavor for the file to match that discovered in the # magic - but do not let that propagate up to the flavor of # the package - instead the package will have the flavor that # it was cooked with. This is to avoid unnecessary or extra files # causing the entire package from being flavored inappropriately. # Such flavoring requires a bunch of Flavor exclusions to fix. # Note that we need to set all shared paths between containers # to share flavors and ensure that fileIds are the same for pkg in autopkg.findComponents(path): f = pkg.getFile(path) f.flavor.set(flv) # get the Arch.* dependencies flv.union(self.archFlavor) if isnset in self.allowableIsnSets: self.packageFlavor.union(flv)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def preProcess(self): if self.exceptions: self.error('%s does not honor exceptions' % self.__class__.__name__) self.exceptions = None if self.inclusions: self.inclusions = None
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def parseInfoFile(self, path): infoname = "info-%s:%s" % (os.path.basename(path), self.component) data = {} try: data = dict([x.strip().split('=', 1) \ for x in open(path).readlines()]) extraKeys = set(data.keys()).difference(self.legalKeys) if extraKeys: for key in extraKeys: self.error("%s is not is not a valid value for %s" % \ (key, infoname)) self.parseError = True except ValueError: self.error("Unable to parse info file for '%s'" % infoname) self.parseError = True return data
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def addRequires(self, path): realpath, fileObj = self.recipe.autopkg.findComponent(path)[path] data = self.parseInfoFile(realpath) pkg = self.recipe.autopkg.componentMap[path] infoname = os.path.basename(path) if path in pkg.requiresMap: # only deps related to userinfo/troveinfo are allowed self.error("Illegal requirement on 'info-%s:%s': '%s'" % \ (infoname, self.component, str(pkg.requiresMap[path]))) pkg.requiresMap[path] = deps.DependencySet() depSet = self.getRequires(infoname, data) fileObj.requires.set(depSet) pkg.requiresMap[path].union(depSet) pkg.requires.union(depSet)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def parseInfoFile(self, path): if self.recipe._getCapsulePathsForFile(path): return {} data = _ProcessInfoPackage.parseInfoFile(self, path) if data: supplemental = data.get('SUPPLEMENTAL') if supplemental is not None: data['SUPPLEMENTAL'] = supplemental.split(',') return data
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def getRequires(self, infoname, data): groupname = data.get('GROUP', infoname) supp = data.get('SUPPLEMENTAL', []) depSet = deps.DependencySet() for grpDep in supp: depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(grpDep, [])) if not self.recipe._provideGroup.get(infoname): depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(groupname, [])) return depSet
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def getProvides(self, groupname, data): depSet = deps.DependencySet() depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(groupname, [])) return depSet
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): self.found = set() policy.Policy.__init__(self, *args, **keywords)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def do(self): # If absolutely no buildRequires were found automatically, # assume that the buildRequires list has been carefully crafted # for some reason that the buildRequires enforcement policy # doesn't yet support, and don't warn that all of the listed # buildRequires might be excessive. if self.found and self.recipe._logFile: r = self.recipe def getReqNames(key): return set(x.split('=')[0] for x in r._recipeRequirements[key]) recipeReqs = getReqNames('buildRequires') superReqs = getReqNames('buildRequiresSuper') foundPackages = set(x.split(':')[0] for x in self.found) superClosure = r._getTransitiveDepClosure(superReqs) foundClosure = r._getTransitiveDepClosure(self.found) def removeCore(candidates): # conary, python, and setup are always required; gcc # is often an implicit requirement, and sqlite:lib is # listed explicitly make bootstrapping easier return set(x for x in candidates if not x.startswith('conary') and not x.startswith('python:') and not x.startswith('gcc:') and not x in ('libgcc:devellib', 'setup:runtime', 'sqlite:lib')) def removeSome(candidates): # at this point, we don't have good enough detection # of :runtime in particular to recommend getting rid # of it return set(x for x in removeCore(candidates) if not x.endswith(':runtime')) def removeDupComponents(candidates): # If any component is required, we don't really need # to flag others as excessive in superclass excess return set(x for x in candidates if x.split(':')[0] not in foundPackages) # for superclass reqs excessSuperReqs = superReqs - foundClosure if excessSuperReqs: # note that as this is for debugging only, we do not # remove runtime requirements deDupedSuperReqs = sorted(list( removeDupComponents(removeCore(excessSuperReqs)))) if deDupedSuperReqs: self._reportExcessSuperclassBuildRequires(deDupedSuperReqs) excessReqs = recipeReqs - self.found redundantReqs = recipeReqs.intersection(superClosure) if excessReqs or redundantReqs: excessBuildRequires = sorted(list( removeSome(excessReqs.union(redundantReqs)))) # all potential excess build requires might have # been removed by removeSome if excessBuildRequires: self._reportExcessBuildRequires(excessBuildRequires)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def _reportExcessSuperclassBuildRequires(self, reqList): self.recipe._logFile.reportExcessSuperclassBuildRequires( sorted(list(reqList)))
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): self.errors = set() policy.Policy.__init__(self, *args, **keywords)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def do(self): if self.errors and self.recipe._logFile: self.recipe._logFile.reportMissingBuildRequires( sorted(list(self.errors)))
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): self.errors = [] policy.Policy.__init__(self, *args, **keywords)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def do(self): if self.errors: msg = self.groupError and 'Group' or 'Package' raise policy.PolicyError, ('%s Policy errors found:\n%%s' % msg) \ % "\n".join(self.errors)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def __init__(self, *args, **keywords): policy.PackagePolicy.__init__(self, *args, **keywords)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def do(self): if not self.contents: return # Build component map availTroveNames = dict((x.name, None) for x in self.recipe.autopkg.getComponents()) availTroveNames.update(self.recipe.packages) troveNames = set(self.troveNames) & set(availTroveNames) # We don't support compatibility classes for troves (yet) self.recipe._addTroveScript(troveNames, self.contents, self._troveScriptName, None)
sassoftware/conary
[ 47, 9, 47, 4, 1396904066 ]
def setUp(self): self.user1 = db_utils.create_user() self.user2 = db_utils.create_user() self.friend1 = db_utils.create_user() db_utils.create_friendship(self.user1, self.friend1) self.count = 20 self.pivot = 5 s = db_utils.create_characters(self.count, self.user1, ratings.GENERAL.code) self.pivotid = s[self.pivot]
Weasyl/weasyl
[ 106, 28, 106, 115, 1462586006 ]
def test_count_nextid(self): self.assertEqual( self.pivot, character.select_count(self.user1, ratings.GENERAL.code, nextid=self.pivotid))
Weasyl/weasyl
[ 106, 28, 106, 115, 1462586006 ]
def test_cannot_see_non_friends_character(self): """ Should not be able to see a non-friend's friends-ony character in a listing. """ db_utils.create_character(self.user2, friends_only=True) self.assertEqual( self.count, character.select_count(self.user1, ratings.GENERAL.code))
Weasyl/weasyl
[ 106, 28, 106, 115, 1462586006 ]
def __init__(self): super(MenuBarItemStylesExample, self).__init__() self._menubar = MenuBar() menuCommand = MenuCommand(self) # Save reference to individual items so we can add sub-menu items to # them f = self._menubar.addItem('File', None) newItem = f.addItem('New', None) f.addItem('Open f...', menuCommand) f.addSeparator() # Add a style name for a menu item, then use CSS to alter the visuals f.setStyleName('file') newItem.addItem('File', menuCommand) newItem.addItem('Folder', menuCommand) newItem.addItem('Project...', menuCommand) f.addItem('Close', menuCommand) f.addItem('Close All', menuCommand).setStyleName('close-all') f.addSeparator() f.addItem('Save', menuCommand) f.addItem('Save As...', menuCommand) f.addItem('Save All', menuCommand) edit = self._menubar.addItem('Edit', None) edit.addItem('Undo', menuCommand) edit.addItem('Redo', menuCommand).setEnabled(False) edit.addSeparator() edit.addItem('Cut', menuCommand) edit.addItem('Copy', menuCommand) edit.addItem('Paste', menuCommand) edit.addSeparator() find = edit.addItem('Find/Replace', menuCommand) # Actions can be added inline as well, of course find.addItem('Google Search', SearchCommand(self)) find.addSeparator() find.addItem('Find/Replace...', menuCommand) find.addItem('Find Next', menuCommand) find.addItem('Find Previous', menuCommand) view = self._menubar.addItem('View', None) view.addItem('Show/Hide Status Bar', menuCommand) view.addItem('Customize Toolbar...', menuCommand) view.addSeparator() view.addItem('Actual Size', menuCommand) view.addItem('Zoom In', menuCommand) view.addItem('Zoom Out', menuCommand) self.addComponent(self._menubar)
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def __init__(self, c): self._c = c
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def __init__(self, c): self._c = c
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def log(level: int, x: Any) -> None: if logging.getLogger(None).isEnabledFor(level): for line in pprint.pformat(x).split('\n'): logging.log(level, line)
project-chip/connectedhomeip
[ 5774, 1369, 5774, 982, 1583255110 ]
def test_box_nms_op(): def test_box_nms_forward(data, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1, cid=0, bid=-1, force=False, in_format='corner', out_format='corner'): for dtype in ['float16', 'float32', 'float64']: data = mx.nd.array(data, dtype=dtype) out = mx.contrib.nd.box_nms(data, overlap_thresh=thresh, valid_thresh=valid, topk=topk, coord_start=coord, score_index=score, id_index=cid, background_id=bid, force_suppress=force, in_format=in_format, out_format=out_format) assert_almost_equal(out.asnumpy(), expected.astype(dtype), rtol=1e-3, atol=1e-3) def test_box_nms_backward(data, grad, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1, cid=0, bid=-1, force=False, in_format='corner', out_format='corner'): in_var = mx.sym.Variable('data') arr_data = mx.nd.array(data) arr_grad = mx.nd.empty(arr_data.shape) op = mx.contrib.sym.box_nms(in_var, overlap_thresh=thresh, valid_thresh=valid, topk=topk, coord_start=coord, score_index=score, id_index=cid, background_id=bid, force_suppress=force, in_format=in_format, out_format=out_format) exe = op.bind(ctx=default_context(), args=[arr_data], args_grad=[arr_grad]) exe.forward(is_train=True) exe.backward(mx.nd.array(grad)) assert_almost_equal(arr_grad.asnumpy(), expected) def corner_to_center(data): out = np.reshape(data, (-1, 6)).copy() out[:, 2] = (data[:, 2] + data[:, 4]) / 2.0 out[:, 3] = (data[:, 3] + data[:, 5]) / 2.0 out[:, 4] = data[:, 4] - data[:, 2] out[:, 5] = data[:, 5] - data[:, 3] invalid = np.where(data[:, 0] < 0)[0] out[invalid, :] = -1 return out def center_to_corner(data): data = np.reshape(data, (-1, 6)).copy() out[:, 2] = data[:, 2] - data[:, 4] / 2.0 out[:, 3] = data[:, 3] - data[:, 5] / 2.0 out[:, 4] = data[:, 2] + data[:, 4] / 2.0 out[:, 5] = data[:, 3] + data[:, 5] / 2.0 invalid = np.where(data[:, 0] < 0)[0] out[invalid, :] = -1 return out def swap_position(data, expected, coord=2, score=1, cid=0, new_col=0): data = np.reshape(data, (-1, 6)) expected = np.reshape(expected, (-1, 6)) new_coord = random.randint(0, 6 + new_col - 4) others = list(range(new_coord)) + list(range(new_coord + 4, 6 + new_col)) random.shuffle(others) new_score = others[0] new_cid = others[1] new_data = np.full((data.shape[0], data.shape[1] + new_col), -1.0) new_expected = np.full((expected.shape[0], expected.shape[1] + new_col), -1.0) new_data[:, new_coord:new_coord+4] = data[:, coord:coord+4] new_data[:, new_score] = data[:, score] new_data[:, new_cid] = data[:, cid] new_expected[:, new_coord:new_coord+4] = expected[:, coord:coord+4] new_expected[:, new_score] = expected[:, score] new_expected[:, new_cid] = expected[:, cid] return new_data, new_expected, new_coord, new_score, new_cid # manually set up test cases boxes = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2], [0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]] # case1 force = True thresh = 0.5 expected = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2], [0, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1]] grad = np.random.rand(4, 6) expected_in_grad = grad[(1, 3, 2, 0), :] expected_in_grad[1, :] = 0 test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes), grad, expected_in_grad, force=force, thresh=thresh) # case2: multi batch boxes2 = [boxes] * 3 expected2 = [expected] * 3 grad2 = np.array([grad.tolist()] * 3) expected_in_grad2 = np.array([expected_in_grad.tolist()] * 3) test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh) # another new dim boxes2 = [boxes2] * 2 expected2 = [expected2] * 2 grad2 = np.array([grad2.tolist()] * 2) expected_in_grad2 = np.array([expected_in_grad2.tolist()] * 2) test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh) # case3: thresh thresh = 0.1 boxes3 = boxes expected3 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]] grad3 = np.random.rand(4, 6) expected_in_grad3 = grad3[(1, 3, 2, 0), :] expected_in_grad3[(1, 2), :] = 0 test_box_nms_forward(np.array(boxes3), np.array(expected3), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes3), grad3, expected_in_grad3, force=force, thresh=thresh) # case4: non-force boxes4 = boxes force = False expected4 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2], [-1, -1, -1, -1, -1, -1]] grad4 = np.random.rand(4, 6) expected_in_grad4 = grad4[(1, 2, 3, 0), :] expected_in_grad4[2, :] = 0 test_box_nms_forward(np.array(boxes4), np.array(expected4), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes4), grad4, expected_in_grad4, force=force, thresh=thresh) # case5: different coding boxes5 = corner_to_center(np.array(boxes4)) test_box_nms_forward(np.array(boxes5), np.array(expected4), force=force, thresh=thresh, in_format='center') expected5 = corner_to_center(np.array(expected4)) test_box_nms_forward(np.array(boxes4), np.array(expected5), force=force, thresh=thresh, out_format='center') test_box_nms_forward(np.array(boxes5), np.array(expected5), force=force, thresh=thresh, in_format='center', out_format='center') # case6: different position boxes6, expected6, new_coord, new_score, new_id = swap_position(np.array(boxes4), np.array(expected4), new_col=2) test_box_nms_forward(np.array(boxes6), np.array(expected6), force=force, thresh=thresh, coord=new_coord, score=new_score, cid=new_id) # case7: no id, should be same with force=True force = False thresh = 0.5 test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh, cid=-1) # case8: multi-batch thresh + topk boxes8 = [[[1, 1, 0, 0, 10, 10], [1, 0.4, 0, 0, 10, 10], [1, 0.3, 0, 0, 10, 10]], [[2, 1, 0, 0, 10, 10], [2, 0.4, 0, 0, 10, 10], [2, 0.3, 0, 0, 10, 10]], [[3, 1, 0, 0, 10, 10], [3, 0.4, 0, 0, 10, 10], [3, 0.3, 0, 0, 10, 10]]] expected8 = [[[1, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]], [[2, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]], [[3, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]] grad8 = np.random.rand(3, 3, 6) expected_in_grad8 = np.zeros((3, 3, 6)) expected_in_grad8[(0, 1, 2), (0, 0, 0), :] = grad8[(0, 1, 2), (0, 0, 0), :] force = False thresh = 0.5 valid = 0.5 topk = 2 test_box_nms_forward(np.array(boxes8), np.array(expected8), force=force, thresh=thresh, valid=valid, topk=topk) test_box_nms_backward(np.array(boxes8), grad8, expected_in_grad8, force=force, thresh=thresh, valid=valid, topk=topk) # case9: background id filter out # default background id -1 boxes9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [0, 0.4, 0.1, 0.1, 0.2, 0.2], [1, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, 0.6, 0.5, 0.5, 0.7, 0.8]] expected9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]] force = True thresh = 0.5 grad9 = np.random.rand(4, 6) expected_in_grad9 = grad9[(0, 2, 1, 3), :] expected_in_grad9[(1, 3), :] = 0 test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh) test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh) # set background id background_id = 0 expected9 = [[-1, 0.6, 0.5, 0.5, 0.7, 0.8], [1, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]] grad9 = np.random.rand(4, 6) expected_in_grad9 = grad9[(2, 3, 1, 0), :] expected_in_grad9[(0, 1), :] = 0 test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh, bid=background_id) test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh, bid=background_id)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def numpy_box_iou(a, b, fmt='corner'): def area(left, top, right, bottom): return np.maximum(0, right - left) * np.maximum(0, bottom - top) assert a.shape[-1] == 4 assert b.shape[-1] == 4 oshape = a.shape[:-1] + b.shape[:-1] a = a.reshape((-1, 4)) ashape = a.shape b = b.reshape((-1, 4)) a = np.tile(a, reps=[1, b.shape[0]]).reshape((-1, 4)) b = np.tile(b, reps=[ashape[0], 1]).reshape((-1, 4)) if fmt == 'corner': al, at, ar, ab = np.split(a, 4, axis=-1) bl, bt, br, bb = np.split(b, 4, axis=-1) elif fmt == 'center': ax, ay, aw, ah = np.split(a, 4, axis=-1) bx, by, bw, bh = np.split(b, 4, axis=-1) al, at, ar, ab = ax - aw / 2, ay - ah / 2, ax + aw / 2, ay + ah / 2 bl, bt, br, bb = bx - bw / 2, by - bh / 2, bx + bw / 2, by + bh / 2 else: raise NotImplementedError("Fmt {} not supported".format(fmt)) width = np.maximum(0, np.minimum(ar, br) - np.maximum(al, bl)) height = np.maximum(0, np.minimum(ab, bb) - np.maximum(at, bt)) intersect = width * height union = area(al, at, ar, ab) + area(bl, bt, br, bb) - intersect union[np.where(intersect <= 0)] = 1e-12 iou = intersect / union return iou.reshape(oshape)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def test_bipartite_matching_op(): def assert_match(inputs, x, y, threshold, is_ascend=False): for dtype in ['float16', 'float32', 'float64']: inputs = mx.nd.array(inputs, dtype=dtype) x = np.array(x, dtype=dtype) y = np.array(y, dtype=dtype) a, b = mx.nd.contrib.bipartite_matching(inputs, threshold=threshold, is_ascend=is_ascend) assert_array_equal(a.asnumpy().astype('int64'), x.astype('int64')) assert_array_equal(b.asnumpy().astype('int64'), y.astype('int64')) assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [1, -1, 0], [2, 0], 1e-12, False) assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [-1, 0, 1], [1, 2], 100, True)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def test_gradient_multiplier_op(): # We use the quadratic function in combination with gradient multiplier def f(x, a, b, c): return a * x**2 + b * x + c a = np.random.random_sample() b = np.random.random_sample() c = np.random.random_sample() m = np.random.random_sample() - 0.5 data = mx.symbol.Variable('data') quad_sym = mx.sym.contrib.quadratic(data=data, a=a, b=b, c=c) gr_q_sym = mx.sym.contrib.gradientmultiplier(quad_sym, scalar=m) for dtype in [np.float16, np.float32, np.float64]: for ndim in range(1, 6): shape = rand_shape_nd(ndim, 5) data_np = np.random.randn(*shape).astype(dtype) expected = f(data_np, a, b, c) backward_expected = (2 * a * data_np + b) * m # check imperative forward output = mx.nd.contrib.quadratic(mx.nd.array(data_np), a=a, b=b, c=c) output = mx.nd.contrib.gradientmultiplier(output, scalar=m) assert_almost_equal(output.asnumpy(), expected, rtol=1e-2 if dtype is np.float16 else 1e-5, atol=1e-2 if dtype is np.float16 else 1e-5) # check forward check_symbolic_forward(gr_q_sym, [data_np], [expected], rtol=1e-2 if dtype is np.float16 else 1e-5, atol=1e-2 if dtype is np.float16 else 1e-5) # check backward check_symbolic_backward(gr_q_sym, [data_np], [np.ones(expected.shape)], [backward_expected], rtol=1e-2 if dtype is np.float16 else 1e-5, atol=1e-2 if dtype is np.float16 else 1e-5)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def test_box_encode_op(): anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4)) refs = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4)) samples = mx.nd.array([[0, 1]]) matches = mx.nd.array([[0, 1]]) means = mx.nd.array([0.0, 0.0, 0.0, 0.0]) stds = mx.nd.array([0.1, 0.1, 0.2, 0.2]) Y, mask = mx.nd.contrib.box_encode(samples, matches, anchors, refs, means, stds) assert_allclose(Y.asnumpy(), np.zeros((1, 2, 4)), atol=1e-5, rtol=1e-5) assert_allclose(mask.asnumpy(), np.array([[[0., 0., 0., 0.], [1., 1., 1., 1.]]]), atol=1e-5, rtol=1e-5)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def test_op_mrcnn_mask_target(): if default_context().device_type != 'gpu': return num_rois = 2 num_classes = 4 mask_size = (3, 3) ctx = mx.gpu(0) # (B, N, 4) rois = mx.nd.array([[[2.3, 4.3, 2.2, 3.3], [3.5, 5.5, 0.9, 2.4]]], ctx=ctx) gt_masks = mx.nd.arange(0, 4*32*32, ctx=ctx).reshape(1, 4, 32, 32) # (B, N) matches = mx.nd.array([[2, 0]], ctx=ctx) # (B, N) cls_targets = mx.nd.array([[2, 1]], ctx=ctx) mask_targets, mask_cls = mx.nd.contrib.mrcnn_mask_target(rois, gt_masks, matches, cls_targets, num_rois=num_rois, num_classes=num_classes, mask_size=mask_size) # Ground truth outputs were generated with GluonCV's target generator # gluoncv.model_zoo.mask_rcnn.MaskTargetGenerator(1, num_rois, num_classes, mask_size) gt_mask_targets = mx.nd.array([[[[[2193.4 , 2193.7332 , 2194.0667 ], [2204.0667 , 2204.4 , 2204.7334 ], [2214.7334 , 2215.0667 , 2215.4 ]], [[2193.4 , 2193.7332 , 2194.0667 ], [2204.0667 , 2204.4 , 2204.7334 ], [2214.7334 , 2215.0667 , 2215.4 ]], [[2193.4 , 2193.7332 , 2194.0667 ], [2204.0667 , 2204.4 , 2204.7334 ], [2214.7334 , 2215.0667 , 2215.4 ]], [[2193.4 , 2193.7332 , 2194.0667 ], [2204.0667 , 2204.4 , 2204.7334 ], [2214.7334 , 2215.0667 , 2215.4 ]]], [[[ 185. , 185.33334, 185.66667], [ 195.66667, 196.00002, 196.33334], [ 206.33333, 206.66666, 207. ]], [[ 185. , 185.33334, 185.66667], [ 195.66667, 196.00002, 196.33334], [ 206.33333, 206.66666, 207. ]], [[ 185. , 185.33334, 185.66667], [ 195.66667, 196.00002, 196.33334], [ 206.33333, 206.66666, 207. ]], [[ 185. , 185.33334, 185.66667], [ 195.66667, 196.00002, 196.33334], [ 206.33333, 206.66666, 207. ]]]]]) gt_mask_cls = mx.nd.array([[0,0,1,0], [0,1,0,0]]) gt_mask_cls = gt_mask_cls.reshape(1,2,4,1,1).broadcast_axes(axis=(3,4), size=(3,3)) assert_almost_equal(mask_targets.asnumpy(), gt_mask_targets.asnumpy()) assert_almost_equal(mask_cls.asnumpy(), gt_mask_cls.asnumpy())
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def plugin(self): return plugins.get(self.plugin_name)
Netflix/lemur
[ 1615, 314, 1615, 125, 1434472567 ]
def get_attn(attn_type): if isinstance(attn_type, torch.nn.Module): return attn_type module_cls = None if attn_type is not None: if isinstance(attn_type, str): attn_type = attn_type.lower() # Lightweight attention modules (channel and/or coarse spatial). # Typically added to existing network architecture blocks in addition to existing convolutions. if attn_type == 'se': module_cls = SEModule elif attn_type == 'ese': module_cls = EffectiveSEModule elif attn_type == 'eca': module_cls = EcaModule elif attn_type == 'ecam': module_cls = partial(EcaModule, use_mlp=True) elif attn_type == 'ceca': module_cls = CecaModule elif attn_type == 'ge': module_cls = GatherExcite elif attn_type == 'gc': module_cls = GlobalContext elif attn_type == 'gca': module_cls = partial(GlobalContext, fuse_add=True, fuse_scale=False) elif attn_type == 'cbam': module_cls = CbamModule elif attn_type == 'lcbam': module_cls = LightCbamModule # Attention / attention-like modules w/ significant params # Typically replace some of the existing workhorse convs in a network architecture. # All of these accept a stride argument and can spatially downsample the input. elif attn_type == 'sk': module_cls = SelectiveKernel elif attn_type == 'splat': module_cls = SplitAttn # Self-attention / attention-like modules w/ significant compute and/or params # Typically replace some of the existing workhorse convs in a network architecture. # All of these accept a stride argument and can spatially downsample the input. elif attn_type == 'lambda': return LambdaLayer elif attn_type == 'bottleneck': return BottleneckAttn elif attn_type == 'halo': return HaloAttn elif attn_type == 'nl': module_cls = NonLocalAttn elif attn_type == 'bat': module_cls = BatNonLocalAttn # Woops! else: assert False, "Invalid attn module (%s)" % attn_type elif isinstance(attn_type, bool): if attn_type: module_cls = SEModule else: module_cls = attn_type return module_cls
rwightman/pytorch-image-models
[ 23978, 3956, 23978, 96, 1549086672 ]
def setUp(self): super(NotificationsTestCase, self).setUp() self.net_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1) def fake_get_nw_info(cls, ctxt, instance): self.assertTrue(ctxt.is_admin) return self.net_info self.stubs.Set(network_api.API, 'get_instance_nw_info', fake_get_nw_info) fake_network.set_stub_network_methods(self.stubs) fake_notifier.stub_notifier(self.stubs) self.addCleanup(fake_notifier.reset) self.flags(compute_driver='nova.virt.fake.FakeDriver', network_manager='nova.network.manager.FlatManager', notify_on_state_change="vm_and_task_state", host='testhost') self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) self.instance = self._wrapped_create()
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_send_api_fault_disabled(self): self.flags(notify_api_faults=False) notifications.send_api_fault("http://example.com/foo", 500, None) self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_notif_disabled(self): # test config disable of the notifications self.flags(notify_on_state_change=None) old = copy.copy(self.instance) self.instance["vm_state"] = vm_states.ACTIVE old_vm_state = old['vm_state'] new_vm_state = self.instance["vm_state"] old_task_state = old['task_state'] new_task_state = self.instance["task_state"] notifications.send_update_with_states(self.context, self.instance, old_vm_state, new_vm_state, old_task_state, new_task_state, verify_states=True) notifications.send_update(self.context, old, self.instance) self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_send_no_notif(self): # test notification on send no initial vm state: old_vm_state = self.instance['vm_state'] new_vm_state = self.instance['vm_state'] old_task_state = self.instance['task_state'] new_task_state = self.instance['task_state'] notifications.send_update_with_states(self.context, self.instance, old_vm_state, new_vm_state, old_task_state, new_task_state, service="compute", host=None, verify_states=True) self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_send_on_task_change(self): # pretend we just transitioned to task SPAWNING: params = {"task_state": task_states.SPAWNING} (old_ref, new_ref) = db.instance_update_and_get_original(self.context, self.instance['uuid'], params) notifications.send_update(self.context, old_ref, new_ref) self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_vm_update_with_states(self): notifications.send_update_with_states(self.context, self.instance, vm_states.BUILDING, vm_states.ACTIVE, task_states.SPAWNING, task_states.SPAWNING, verify_states=True) self.assertEqual(1, len(fake_notifier.NOTIFICATIONS)) notif = fake_notifier.NOTIFICATIONS[0] payload = notif.payload access_ip_v4 = self.instance["access_ip_v4"] access_ip_v6 = self.instance["access_ip_v6"] display_name = self.instance["display_name"] hostname = self.instance["hostname"] node = self.instance["node"] self.assertEqual(vm_states.BUILDING, payload["old_state"]) self.assertEqual(vm_states.ACTIVE, payload["state"]) self.assertEqual(task_states.SPAWNING, payload["old_task_state"]) self.assertEqual(task_states.SPAWNING, payload["new_task_state"]) self.assertEqual(payload["access_ip_v4"], access_ip_v4) self.assertEqual(payload["access_ip_v6"], access_ip_v6) self.assertEqual(payload["display_name"], display_name) self.assertEqual(payload["hostname"], hostname) self.assertEqual(payload["node"], node)
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_update_no_service_name(self): notifications.send_update_with_states(self.context, self.instance, vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING, None) self.assertEqual(1, len(fake_notifier.NOTIFICATIONS)) # service name should default to 'compute' notif = fake_notifier.NOTIFICATIONS[0] self.assertEqual('compute.testhost', notif.publisher_id)
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_update_with_host_name(self): notifications.send_update_with_states(self.context, self.instance, vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING, None, host="someotherhost") self.assertEqual(1, len(fake_notifier.NOTIFICATIONS)) # service name should default to 'compute' notif = fake_notifier.NOTIFICATIONS[0] self.assertEqual('compute.someotherhost', notif.publisher_id)
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_send_access_ip_update(self): notifications.send_update(self.context, self.instance, self.instance) self.assertEqual(1, len(fake_notifier.NOTIFICATIONS)) notif = fake_notifier.NOTIFICATIONS[0] payload = notif.payload access_ip_v4 = self.instance["access_ip_v4"] access_ip_v6 = self.instance["access_ip_v6"] self.assertEqual(payload["access_ip_v4"], access_ip_v4) self.assertEqual(payload["access_ip_v6"], access_ip_v6)
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def test_send_no_state_change(self): called = [False] def sending_no_state_change(context, instance, **kwargs): called[0] = True self.stubs.Set(notifications, '_send_instance_update_notification', sending_no_state_change) notifications.send_update(self.context, self.instance, self.instance) self.assertTrue(called[0])
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def fail_sending(context, instance, **kwargs): raise Exception('failed to notify')
CiscoSystems/nova
[ 4, 9, 4, 1, 1323390460 ]
def start(self, action_name: str) -> None: """Defines how to start recording an action."""
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def stop(self, action_name: str) -> None: """Defines how to record the duration once an action is complete."""
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def summary(self) -> str: """Create profiler summary in text format."""
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def setup(self, **kwargs: Any) -> None: """Execute arbitrary pre-profiling set-up steps as defined by subclass."""
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def teardown(self, **kwargs: Any) -> None: """Execute arbitrary post-profiling tear-down steps as defined by subclass."""
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def __init__( self, dirpath: Optional[Union[str, Path]] = None, filename: Optional[str] = None,
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def profile(self, action_name: str) -> Generator: """ Yields a context manager to encapsulate the scope of a profiled action. Example:: with self.profile('load training data'): # load training data code The profiler will start once you've entered the context and will automatically stop once you exit the code block. """ try: self.start(action_name) yield action_name finally: self.stop(action_name)
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def _rank_zero_info(self, *args, **kwargs) -> None: if self._local_rank in (None, 0): log.info(*args, **kwargs)
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def _prepare_streams(self) -> None: if self._write_stream is not None: return if self.filename: filepath = os.path.join(self.dirpath, self._prepare_filename()) fs = get_filesystem(filepath) file = fs.open(filepath, "a") self._output_file = file self._write_stream = file.write else: self._write_stream = self._rank_zero_info
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def _stats_to_str(self, stats: Dict[str, str]) -> str: stage = f"{self._stage.upper()} " if self._stage is not None else "" output = [stage + "Profiler Report"] for action, value in stats.items(): header = f"Profile stats for: {action}" if self._local_rank is not None: header += f" rank: {self._local_rank}" output.append(header) output.append(value) return os.linesep.join(output)
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def teardown(self, stage: Optional[str] = None) -> None: """ Execute arbitrary post-profiling tear-down steps. Closes the currently open file and stream. """ self._write_stream = None if self._output_file is not None: self._output_file.close() self._output_file = None # can't pickle TextIOWrapper
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def start(self, action_name: str) -> None: raise NotImplementedError
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def summary(self) -> str: raise NotImplementedError
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def local_rank(self) -> int: return 0 if self._local_rank is None else self._local_rank
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def start(self, action_name: str) -> None: pass
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def check_parameters(pjson): if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson: no_db_logger.info('No Parameter') abort(400) cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']} return client(cli['account'], cli['card'], cli['appid'])
zznn/futu-openAPI
[ 38, 18, 38, 1, 1462535088 ]
def hello_world(): no_db_logger.info('server start#####') return 'hello 22222222 world!'
zznn/futu-openAPI
[ 38, 18, 38, 1, 1462535088 ]
def trade_token(): trade_pswd = request.json['trade_pswd'] account = request.json['app_account'] card = request.json['card'] appid = request.json['appid'] cc = check_parameters(request.json) message = cc.get_trade_token(trade_pswd) if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken': no_db_logger.info('didn\'t get accesstoken') return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False) if message['result_code'] == 0: token = message['data']['trade_token'] save_update_token(account, appid, None, token, card, True) return jsonify(**message)
zznn/futu-openAPI
[ 38, 18, 38, 1, 1462535088 ]