function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def __init__(self, root, cwd, badfn=None, relativeuipath=True):
self._root = root
self._cwd = cwd
if badfn is not None:
self.bad = badfn
self._relativeuipath = relativeuipath | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __call__(self, fn):
return self.matchfn(fn) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def bad(self, f, msg):
"""Callback from dirstate.walk for each explicit file that can't be
found/accessed, with an error message.""" | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def abs(self, f):
"""Convert a repo path back to path that is relative to the root of the
matcher."""
return f | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def uipath(self, f):
"""Convert repo path to a display path. If patterns or -I/-X were used
to create this matcher, the display path will be relative to cwd.
Otherwise it is relative to the root of the repo."""
return (self._relativeuipath and self.rel(f)) or self.abs(f) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _files(self):
return [] | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _fileset(self):
return set(self._files) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def matchfn(self, f):
return False | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def always(self):
"""Matcher will match everything and .files() will be empty --
optimization might be possible."""
return False | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def prefix(self):
"""Matcher will match the paths in .files() recursively --
optimization might be possible."""
return False | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, badfn=None, relativeuipath=False):
super(alwaysmatcher, self).__init__(
root, cwd, badfn, relativeuipath=relativeuipath
) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def matchfn(self, f):
return True | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __repr__(self):
return "<alwaysmatcher>" | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, badfn=None):
super(nevermatcher, self).__init__(root, cwd, badfn) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def isexact(self):
return True | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
return False | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, badfn=None, gitignorepaths=None):
super(gitignorematcher, self).__init__(root, cwd, badfn)
gitignorepaths = gitignorepaths or []
self._matcher = pathmatcher.gitignorematcher(root, gitignorepaths) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def explain(self, f):
return self._matcher.explain(f, True) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __repr__(self):
return "<gitignorematcher>" | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, badfn=None, rules=[]):
super(treematcher, self).__init__(root, cwd, badfn)
rules = list(rules)
self._matcher = pathmatcher.treematcher(rules)
self._rules = rules | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
matched = self._matcher.match_recursive(dir)
if matched is None:
return True
elif matched is True:
return "all"
else:
assert matched is False
return False | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def normalizerootdir(dir, funcname):
if dir == ".":
util.nouideprecwarn(
"match.%s() no longer accepts '.', use '' instead." % funcname, "20190805"
)
return ""
return dir | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _makeglobrecursive(pat):
"""Make a glob pattern recursive by appending "/**" to it"""
if pat.endswith("/") or not pat:
return pat + "**"
else:
return pat + "/**" | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _convertretoglobs(repat):
"""Attempt to convert a regular expression pattern to glob patterns.
A single regular expression pattern might be converted into multiple
glob patterns.
Return None if conversion is unsupported.
>>> _convertretoglobs("abc*") is None
True
>>> _convertretoglobs("xx/yy/(?!zz/kk)")
['xx/yy/**', '!xx/yy/zz/kk/**']
>>> _convertretoglobs("x/y/(?:.*/)?BUCK")
['x/y/**/BUCK']
"""
m = _repat1.match(repat)
if m:
prefix, excluded = m.groups()
return ["%s/**" % prefix, "!%s/%s/**" % (prefix, excluded)]
m = _repat2.match(repat)
if m:
prefix, name = m.groups()
return ["%s/**/%s" % (prefix, name)]
return None | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, kindpats, ctx=None, badfn=None):
super(patternmatcher, self).__init__(root, cwd, badfn)
# kindpats are already normalized to be relative to repo-root.
# Can we use tree matcher?
rules = _kindpatstoglobs(kindpats, recursive=False)
fallback = True
if rules is not None:
try:
matcher = treematcher(root, cwd, badfn=badfn, rules=rules)
# Replace self to 'matcher'.
self.__dict__ = matcher.__dict__
self.__class__ = matcher.__class__
fallback = False
except ValueError:
# for example, Regex("Compiled regex exceeds size limit of 10485760 bytes.")
pass
if fallback:
self._prefix = _prefix(kindpats)
self._pats, self.matchfn = _buildmatch(ctx, kindpats, "$", root)
self._files = _explicitfiles(kindpats) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _dirs(self):
return set(util.dirs(self._fileset)) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def prefix(self):
return self._prefix | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, kindpats, ctx=None, badfn=None):
super(includematcher, self).__init__(root, cwd, badfn)
# Can we use tree matcher?
rules = _kindpatstoglobs(kindpats, recursive=True)
fallback = True
if rules is not None:
try:
matcher = treematcher(root, cwd, badfn=badfn, rules=rules)
# Replace self to 'matcher'.
self.__dict__ = matcher.__dict__
self.__class__ = matcher.__class__
fallback = False
except ValueError:
# for example, Regex("Compiled regex exceeds size limit of 10485760 bytes.")
pass
if fallback:
self._pats, self.matchfn = _buildmatch(ctx, kindpats, "(?:/|$)", root)
# prefix is True if all patterns are recursive, so certain fast paths
# can be enabled. Unfortunately, it's too easy to break it (ex. by
# using "glob:*.c", "re:...", etc).
self._prefix = _prefix(kindpats)
roots, dirs = _rootsanddirs(kindpats)
# roots are directories which are recursively included.
# If self._prefix is True, then _roots can have a fast path for
# visitdir to return "all", marking things included unconditionally.
# If self._prefix is False, then that optimization is unsound because
# "roots" might contain entries that is not recursive (ex. roots will
# include "foo/bar" for pattern "glob:foo/bar/*.c").
self._roots = set(roots)
# dirs are directories which are non-recursively included.
# That is, files under that directory are included. But not
# subdirectories.
self._dirs = set(dirs)
# Try to use a more efficient visitdir implementation
visitdir = _buildvisitdir(kindpats)
if visitdir:
self.visitdir = visitdir | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __repr__(self):
return "<includematcher includes=%r>" % self._pats | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, root, cwd, files, badfn=None):
super(exactmatcher, self).__init__(root, cwd, badfn)
if isinstance(files, list):
self._files = files
else:
self._files = list(files) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _dirs(self):
return set(util.dirs(self._fileset)) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def isexact(self):
return True | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, m1, m2):
super(differencematcher, self).__init__(m1._root, m1._cwd)
self._m1 = m1
self._m2 = m2
self.bad = m1.bad
self.traversedir = m1.traversedir | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _files(self):
if self.isexact():
return [f for f in self._m1.files() if self(f)]
# If m1 is not an exact matcher, we can't easily figure out the set of
# files, because its files() are not always files. For example, if
# m1 is "path:dir" and m2 is "rootfileins:.", we don't
# want to remove "dir" from the set even though it would match m2,
# because the "dir" in m1 may not be a file.
return self._m1.files() | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def isexact(self):
return self._m1.isexact() | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def intersectmatchers(m1, m2):
"""Composes two matchers by matching if both of them match.
The second matcher's non-matching-attributes (root, cwd, bad, traversedir)
are ignored.
"""
if m1 is None or m2 is None:
return m1 or m2
if m1.always():
m = copy.copy(m2)
# TODO: Consider encapsulating these things in a class so there's only
# one thing to copy from m1.
m.bad = m1.bad
m.traversedir = m1.traversedir
m.abs = m1.abs
m.rel = m1.rel
m._relativeuipath |= m1._relativeuipath
return m
if m2.always():
m = copy.copy(m1)
m._relativeuipath |= m2._relativeuipath
return m
return intersectionmatcher(m1, m2) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, m1, m2):
super(intersectionmatcher, self).__init__(m1._root, m1._cwd)
self._m1 = m1
self._m2 = m2
self.bad = m1.bad
self.traversedir = m1.traversedir | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _files(self):
if self.isexact():
m1, m2 = self._m1, self._m2
if not m1.isexact():
m1, m2 = m2, m1
return [f for f in m1.files() if m2(f)]
# It neither m1 nor m2 is an exact matcher, we can't easily intersect
# the set of files, because their files() are not always files. For
# example, if intersecting a matcher "-I glob:foo.txt" with matcher of
# "path:dir2", we don't want to remove "dir2" from the set.
return self._m1.files() + self._m2.files() | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
dir = normalizerootdir(dir, "visitdir")
visit1 = self._m1.visitdir(dir)
if visit1 == "all":
return self._m2.visitdir(dir)
# bool() because visit1=True + visit2='all' should not be 'all'
return bool(visit1 and self._m2.visitdir(dir)) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def isexact(self):
return self._m1.isexact() or self._m2.isexact() | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, path, matcher):
super(subdirmatcher, self).__init__(matcher._root, matcher._cwd)
self._path = path
self._matcher = matcher
self._always = matcher.always()
self._files = [
f[len(path) + 1 :] for f in matcher._files if f.startswith(path + "/")
]
# If the parent repo had a path to this subrepo and the matcher is
# a prefix matcher, this submatcher always matches.
if matcher.prefix():
self._always = any(f == path for f in matcher._files) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def abs(self, f):
return self._matcher.abs(self._path + "/" + f) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def uipath(self, f):
return self._matcher.uipath(self._path + "/" + f) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
dir = normalizerootdir(dir, "visitdir")
if dir == "":
dir = self._path
else:
dir = self._path + "/" + dir
return self._matcher.visitdir(dir) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def prefix(self):
return self._matcher.prefix() and not self._always | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, matchers):
m1 = matchers[0]
super(unionmatcher, self).__init__(m1._root, m1._cwd)
self.traversedir = m1.traversedir
self._matchers = matchers | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
r = False
for m in self._matchers:
v = m.visitdir(dir)
if v == "all":
return v
r |= v
return r | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, m1, m2):
super(xormatcher, self).__init__(m1._root, m1._cwd)
self.traversedir = m1.traversedir
self.m1 = m1
self.m2 = m2 | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
m1dir = self.m1.visitdir(dir)
m2dir = self.m2.visitdir(dir)
# if both matchers return "all" then we know for sure we don't need
# to visit this directory. Same if all matchers return False. In all
# other case we have to visit a directory.
if m1dir == "all" and m2dir == "all":
return False
if not m1dir and not m2dir:
return False
return True | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def __init__(self, matcher):
self._matcher = matcher | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def visitdir(self, dir):
if self(dir):
return "all"
return self._matcher.visitdir(dir) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def patkind(pattern, default=None):
"""If pattern is 'kind:pat' with a known kind, return kind."""
return _patsplit(pattern, default)[0] | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _globre(pat):
r"""Convert an extended glob string to a regexp string.
>>> from . import pycompat
>>> def bprint(s):
... print(s)
>>> bprint(_globre(br'?'))
.
>>> bprint(_globre(br'*'))
[^/]*
>>> bprint(_globre(br'**'))
.*
>>> bprint(_globre(br'**/a'))
(?:.*/)?a
>>> bprint(_globre(br'a/**/b'))
a/(?:.*/)?b
>>> bprint(_globre(br'[a*?!^][^b][!c]'))
[a*?!^][\^b][^c]
>>> bprint(_globre(br'{a,b}'))
(?:a|b)
>>> bprint(_globre(br'.\*\?'))
\.\*\?
"""
i, n = 0, len(pat)
res = ""
group = 0
escape = util.re.escape
def peek():
return i < n and pat[i : i + 1]
while i < n:
c = pat[i : i + 1]
i += 1
if c not in "*?[{},\\":
res += escape(c)
elif c == "*":
if peek() == "*":
i += 1
if peek() == "/":
i += 1
res += "(?:.*/)?"
else:
res += ".*"
else:
res += "[^/]*"
elif c == "?":
res += "."
elif c == "[":
j = i
if j < n and pat[j : j + 1] in "!]":
j += 1
while j < n and pat[j : j + 1] != "]":
j += 1
if j >= n:
res += "\\["
else:
stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
if stuff[0:1] == "!":
stuff = "^" + stuff[1:]
elif stuff[0:1] == "^":
stuff = "\\" + stuff
res = "%s[%s]" % (res, stuff)
elif c == "{":
group += 1
res += "(?:"
elif c == "}" and group:
res += ")"
group -= 1
elif c == "," and group:
res += "|"
elif c == "\\":
p = peek()
if p:
i += 1
res += escape(p)
else:
res += escape(c)
else:
res += escape(c)
return res | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _buildmatch(ctx, kindpats, globsuffix, root):
"""Return regexp string and a matcher function for kindpats.
globsuffix is appended to the regexp of globs."""
matchfuncs = []
subincludes, kindpats = _expandsubinclude(kindpats, root)
if subincludes:
submatchers = {}
def matchsubinclude(f):
for prefix, matcherargs in subincludes:
if f.startswith(prefix):
mf = submatchers.get(prefix)
if mf is None:
mf = match(*matcherargs)
submatchers[prefix] = mf
if mf(f[len(prefix) :]):
return True
return False
matchfuncs.append(matchsubinclude)
fset, kindpats = _expandsets(kindpats, ctx)
if fset:
matchfuncs.append(fset.__contains__)
regex = ""
if kindpats:
regex, mf = _buildregexmatch(kindpats, globsuffix)
matchfuncs.append(mf)
if len(matchfuncs) == 1:
return regex, matchfuncs[0]
else:
return regex, lambda f: any(mf(f) for mf in matchfuncs) | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _patternrootsanddirs(kindpats):
"""Returns roots and directories corresponding to each pattern.
This calculates the roots and directories exactly matching the patterns and
returns a tuple of (roots, dirs) for each. It does not return other
directories which may also need to be considered, like the parent
directories.
"""
r = []
d = []
for kind, pat, source in kindpats:
if kind == "glob": # find the non-glob prefix
root = []
for p in pat.split("/"):
if "[" in p or "{" in p or "*" in p or "?" in p:
break
root.append(p)
r.append("/".join(root))
elif kind in ("relpath", "path"):
if pat == ".":
pat = ""
r.append(pat)
elif kind in ("rootfilesin",):
if pat == ".":
pat = ""
d.append(pat)
else: # relglob, re, relre
r.append("")
return r, d | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _rootsanddirs(kindpats):
"""Returns roots and exact directories from patterns.
roots are directories to match recursively, whereas exact directories should
be matched non-recursively. The returned (roots, dirs) tuple will also
include directories that need to be implicitly considered as either, such as
parent directories.
>>> _rootsanddirs(
... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
... (b'glob', b'g*', b'')])
(['g/h', 'g/h', ''], ['', 'g'])
>>> _rootsanddirs(
... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
([], ['g/h', '', '', 'g'])
>>> _rootsanddirs(
... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
... (b'path', b'', b'')])
(['r', 'p/p', ''], ['', 'p'])
>>> _rootsanddirs(
... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
... (b'relre', b'rr', b'')])
(['', '', ''], [''])
"""
r, d = _patternrootsanddirs(kindpats)
# Append the parents as non-recursive/exact directories, since they must be
# scanned to get to either the roots or the other exact directories.
d.extend(sorted(util.dirs(d)))
d.extend(sorted(util.dirs(r)))
return r, d | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def _prefix(kindpats):
"""Whether all the patterns match a prefix (i.e. recursively)"""
for kind, pat, source in kindpats:
if kind not in ("path", "relpath"):
return False
return True | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def readpatternfile(filepath, warn, sourceinfo=False):
"""parse a pattern file, returning a list of
patterns. These patterns should be given to compile()
to be validated and converted into a match function.
trailing white space is dropped.
the escape character is backslash.
comments start with #.
empty lines are skipped.
lines can be of the following formats:
syntax: regexp # defaults following lines to non-rooted regexps
syntax: glob # defaults following lines to non-rooted globs
re:pattern # non-rooted regular expression
glob:pattern # non-rooted glob
pattern # pattern of the current default type
if sourceinfo is set, returns a list of tuples:
(pattern, lineno, originalline). This is useful to debug ignore patterns.
"""
syntaxes = {
"re": "relre:",
"regexp": "relre:",
"glob": "relglob:",
"include": "include",
"subinclude": "subinclude",
}
syntax = "relre:"
patterns = []
fp = open(filepath, "rb")
for lineno, line in enumerate(util.iterfile(fp), start=1):
if "#" in line:
global _commentre
if not _commentre:
_commentre = util.re.compile(br"((?:^|[^\\])(?:\\\\)*)#.*")
# remove comments prefixed by an even number of escapes
m = _commentre.search(line)
if m:
line = line[: m.end(1)]
# fixup properly escaped comments that survived the above
line = line.replace("\\#", "#")
line = line.rstrip()
if not line:
continue
if line.startswith("syntax:"):
s = line[7:].strip()
try:
syntax = syntaxes[s]
except KeyError:
if warn:
warn(_("%s: ignoring invalid syntax '%s'\n") % (filepath, s))
continue
linesyntax = syntax
for s, rels in pycompat.iteritems(syntaxes):
if line.startswith(rels):
linesyntax = rels
line = line[len(rels) :]
break
elif line.startswith(s + ":"):
linesyntax = rels
line = line[len(s) + 1 :]
break
if sourceinfo:
patterns.append((linesyntax + line, lineno, line))
else:
patterns.append(linesyntax + line)
fp.close()
return patterns | facebookexperimental/eden | [
4737,
192,
4737,
106,
1462467227
] |
def list(request, nick = None):
template = loader.get_template("list.html")
from django.core.urlresolvers import reverse
from forms import PresenceForm
form = PresenceForm()
if nick is not None:
form.initial['nick'] = nick
form_target = reverse(list, kwargs = {'nick': nick})
else:
form_target = reverse(list)
if request.POST.get('nick', '') != '':
context = {
'address': request.META['REMOTE_ADDR'],
'uri': request.META['REQUEST_URI'],
}
if 'enter' in request.POST:
presence.person_entered(request.POST['nick'], context)
else: # 'leave' in request.POST
presence.person_left(request.POST['nick'], context)
# tell the browser to reload the page, but with GET request
return django.shortcuts.redirect(request.path)
context = RequestContext(request, {
'form_target': form_target,
'form': form,
'present': presence.list_people(),
'sensors': presence.list_simple_sensors(),
'complex_sensors': presence.list_complex_sensors(),
})
return HttpResponse(template.render(context)) | HackerspaceWroclaw/wlokalu | [
4,
3,
4,
3,
1402168066
] |
def get_test_client(nowait=False):
client = get_es_connection('default')
# wait for yellow status
for _ in range(1 if nowait else 5):
try:
client.cluster.health(wait_for_status="yellow")
return client
except ConnectionError:
time.sleep(0.1)
else:
# timeout
raise SkipTest("Elasticsearch failed to start") | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def _get_client():
return get_test_client() | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def setUpClass(cls):
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
connections.configure(**settings.ELASTICSEARCH_CONNECTIONS)
cls.es_client = cls._get_client()
IngestClient(cls.es_client).put_pipeline(id='ingest_attachment', body={
'description': "Extract attachment information",
'processors': [
{
"attachment": {
"field": "data",
"indexed_chars": "-1"
},
"remove": {
"field": "data"
}
}
]
})
super().setUpClass() | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def tearDown(self):
self.es_client.indices.delete(index="*", ignore=404)
self.es_client.indices.delete_template(name="*", ignore=404) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def setUpTestData(cls):
cls.url = reverse('search-list')
Feature.objects.create(name='archival descriptions', enabled=True)
cls.user = User.objects.create()
permission = Permission.objects.get(codename='search')
cls.user.user_permissions.add(permission)
org_group_type = GroupType.objects.create(codename='organization')
cls.group1 = Group.objects.create(name='group1', group_type=org_group_type)
cls.group1.add_member(cls.user.essauth_member)
cls.group2 = Group.objects.create(name='group2', group_type=org_group_type)
cls.group2.add_member(cls.user.essauth_member)
cls.component_type = TagVersionType.objects.create(name='component', archive_type=False)
cls.archive_type = TagVersionType.objects.create(name='archive', archive_type=True) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def create_agent():
return Agent.objects.create(
type=AgentType.objects.create(main_type=MainAgentType.objects.create()),
ref_code=RefCode.objects.create(
country=Country.objects.get(iso='SE'),
repository_code='repo',
),
level_of_detail=0,
record_status=0,
script=0,
language=Language.objects.get(iso_639_1='sv'),
create_date=timezone.now(),
) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def test_filter_on_component_agent(self):
agent = self.create_agent()
component_tag = Tag.objects.create()
component_tag_version = TagVersion.objects.create(
tag=component_tag,
type=self.component_type,
elastic_index="component",
)
structure_type = StructureType.objects.create()
structure_template = Structure.objects.create(type=structure_type, is_template=True)
archive_tag = Tag.objects.create()
archive_tag_version = TagVersion.objects.create(
tag=archive_tag,
type=self.archive_type,
elastic_index="archive",
)
structure, archive_tag_structure = structure_template.create_template_instance(archive_tag)
Archive.from_obj(archive_tag_version).save(refresh='true')
TagStructure.objects.create(tag=component_tag, parent=archive_tag_structure, structure=structure)
AgentTagLink.objects.create(
agent=agent,
tag=component_tag_version,
type=AgentTagLinkRelationType.objects.create(),
)
Component.from_obj(component_tag_version).save(refresh='true')
res = self.client.get(self.url, {'agents': str(agent.pk)})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 1)
self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def test_filter_appraisal_date(self):
component_tag = Tag.objects.create(appraisal_date=make_aware(datetime(year=2020, month=1, day=1)))
component_tag_version = TagVersion.objects.create(
tag=component_tag,
type=self.component_type,
elastic_index="component",
)
doc = Component.from_obj(component_tag_version)
doc.save(refresh='true')
with self.subTest('2020-01-01 is after or equal to 2020-01-01'):
res = self.client.get(self.url, data={'appraisal_date_after': '2020-01-01'})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 1)
with self.subTest('2020-01-01 not after 2020-01-02'):
res = self.client.get(self.url, data={'appraisal_date_after': '2020-01-02'})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0)
with self.subTest('2020-01-01 not before 2019-12-31'):
res = self.client.get(self.url, data={'appraisal_date_before': '2019-12-31'})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0)
with self.subTest('2020-01-01 between 2019-01-01 and 2020-01-01'):
res = self.client.get(self.url, data={
'appraisal_date_after': '2019-01-01',
'appraisal_date_before': '2020-01-01',
})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 1)
with self.subTest('2020-01-01 between 2020-01-01 and 2020-12-31'):
res = self.client.get(self.url, data={
'appraisal_date_after': '2020-01-01',
'appraisal_date_before': '2020-12-31',
})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 1)
with self.subTest('2020-01-01 not between 2020-01-02 and 2020-12-31'):
res = self.client.get(self.url, data={
'appraisal_date_after': '2020-01-02',
'appraisal_date_before': '2020-12-31',
})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0)
with self.subTest('2020-01-01 not between 2019-01-01 and 2019-12-31'):
res = self.client.get(self.url, data={
'appraisal_date_after': '2019-01-01',
'appraisal_date_before': '2019-12-31',
})
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0)
with self.subTest('invalid range 2020-12-31 - 2020-01-01'):
res = self.client.get(self.url, data={
'appraisal_date_after': '2020-12-31',
'appraisal_date_before': '2020-01-01',
})
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def setUpTestData(cls):
cls.url = reverse('search-list')
Feature.objects.create(name='archival descriptions', enabled=True)
org_group_type = GroupType.objects.create(codename='organization')
cls.group = Group.objects.create(group_type=org_group_type)
cls.component_type = TagVersionType.objects.create(name='component', archive_type=False)
cls.archive_type = TagVersionType.objects.create(name='archive', archive_type=True) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def test_search_document_in_ip_with_other_user_responsible_without_permission_to_see_it(self):
other_user = User.objects.create(username='other')
self.group.add_member(other_user.essauth_member)
ip = InformationPackage.objects.create(responsible=other_user)
self.group.add_object(ip)
document_tag = Tag.objects.create(information_package=ip)
document_tag_version = TagVersion.objects.create(
tag=document_tag,
type=self.component_type,
elastic_index="document",
)
File.from_obj(document_tag_version).save(refresh='true')
res = self.client.get(self.url)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def setUpTestData(cls):
cls.url = reverse('search-list')
Feature.objects.create(name='archival descriptions', enabled=True)
cls.component_type = TagVersionType.objects.create(name='component', archive_type=False)
cls.security_levels = [1, 2, 3, 4, 5] | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def test_user_with_no_security_level(self):
component_tag = Tag.objects.create()
component_tag_version = TagVersion.objects.create(
tag=component_tag,
type=self.component_type,
elastic_index="component",
security_level=None,
)
Component.from_obj(component_tag_version).save(refresh='true')
with self.subTest('no security level'):
res = self.client.get(self.url)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 1)
self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk))
for lvl in self.security_levels[1:]:
with self.subTest(f'security level {lvl}'):
component_tag_version.security_level = lvl
component_tag_version.save()
Component.from_obj(component_tag_version).save(refresh='true')
res = self.client.get(self.url)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data['hits']), 0) | ESSolutions/ESSArch_Core | [
16,
12,
16,
33,
1478000556
] |
def translate_unity(unity):
return UNITIES.get(unity, UNITIES["NONE"]) | CroissanceCommune/autonomie | [
21,
14,
21,
284,
1381245170
] |
def upgrade():
from autonomie.models.task import WorkUnit
from autonomie.models.task.estimation import EstimationLine
from autonomie.models.task.invoice import InvoiceLine
from autonomie.models.task.invoice import CancelInvoiceLine
from autonomie_base.models.base import DBSESSION
# Adding some characters to the Lines
for table in "estimation_line", "invoice_line", "cancelinvoice_line":
op.alter_column(table, "unity", type_=sa.String(100))
for value in UNITS:
unit = WorkUnit(label=value)
DBSESSION().add(unit)
for factory in (EstimationLine, InvoiceLine, CancelInvoiceLine):
for line in factory.query():
line.unity = translate_unity(line.unity)
DBSESSION().merge(line) | CroissanceCommune/autonomie | [
21,
14,
21,
284,
1381245170
] |
def loop():
return asyncio.get_event_loop() | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def evt():
return event.build_event("event") | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def sample_plugin():
class TestPlugin(BasePlugin):
@event.event
def on_test(self):
pass
return TestPlugin | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_type(self):
assert isinstance(event.Priority.DEFAULT, int) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_lookup(self):
assert event.Priority.lookup(event.Priority.CORE) is event.Priority.CORE
assert event.Priority.lookup(event.Priority.CORE.value) is event.Priority.CORE
assert event.Priority.lookup(-12312412) == -12312412 | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_build_event(self):
evt = event.build_event("evt_name", arg1="val1", arg2=None)
assert evt.name == "evt_name"
assert evt.args == {'arg1': "val1", 'arg2': None} | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_bool(self):
prio_set_list = event._PrioritizedSetList()
assert bool(prio_set_list) is False
prio_set_list.add(0, None)
assert bool(prio_set_list) is True | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_add_already_added(self):
prio_set_list = event._PrioritizedSetList()
obj = object()
prio_set_list.add(0, obj)
with pytest.raises(ValueError) as excinfo:
prio_set_list.add(0, obj)
excinfo.match(r"has already been added")
with pytest.raises(ValueError) as excinfo:
prio_set_list.add(1, obj)
excinfo.match(r"has already been added") | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_iter(self):
prio_set_list = event._PrioritizedSetList()
objs = [(i,) for i in range(5)]
for i, obj in enumerate(objs):
prio_set_list.add(-i, obj)
for i, set_ in enumerate(prio_set_list):
assert set_ == (-i, {objs[i]}) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_no_param_usage(self):
@event.event
def func_name(self):
pass
@event.event
def on_test(self):
pass
assert hasattr(on_test, '_h_info')
h_info = on_test._h_info
assert h_info.event_name == "test"
assert func_name._h_info.event_name == "func_name"
assert h_info.handler is on_test
assert h_info.priority is event.Priority.DEFAULT
assert h_info.should_enable
assert not h_info.is_async | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def on_test(self):
pass | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_async_handler(self):
@event.event(enable=False)
async def on_async_test(self):
pass
assert hasattr(on_async_test, '_h_info')
h_info = on_async_test._h_info
assert h_info.event_name == 'async_test'
assert h_info.handler is on_async_test
assert h_info.priority is event.Priority.DEFAULT
assert not h_info.should_enable
assert h_info.is_async | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def on_test(self):
pass | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_core_event_deco(self):
@event.core_event
def on_test(self):
pass
assert hasattr(on_test, '_h_info')
h_info = on_test._h_info
assert h_info.priority is event.Priority.CORE | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_from_handler(self):
@event.event
def handler():
pass
h_inst = event.HandlerInstance.from_handler(handler)
assert h_inst.info is handler._h_info
assert h_inst.enabled
assert h_inst.handler is handler._h_info.handler | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def func():
pass | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_hash(self):
@event.event
def handler():
pass
h_inst = event.HandlerInstance.from_handler(handler)
h_inst2 = event.HandlerInstance.from_handler(handler)
assert h_inst is not h_inst2
assert hash(h_inst) == hash(h_inst2)
assert h_inst != h_inst2 | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_extend(self, evt, loop):
async def corofunc():
pass
coro = corofunc()
coro2 = corofunc()
# silence "coroutine never awaited" warnings
loop.run_until_complete(coro)
loop.run_until_complete(coro2)
rval = event.ReturnValue(append_events=[evt])
rval2 = event.ReturnValue(eat=True, schedule={coro})
rval3 = event.ReturnValue(append_events=[evt], insert_events=[evt],
schedule={coro, coro2})
rset = event.ResultSet()
rset2 = event.ResultSet()
rset.extend(rval)
assert not rset.eat
assert rset.append_events == [evt]
rset.extend(rval2)
assert rset.eat
assert rset.schedule == {coro}
rset2.extend(rval3)
rset.extend(rset2)
rset.extend(None)
assert rset.eat
assert rset.append_events == [evt, evt]
assert rset.insert_events == [evt]
assert rset.schedule == {coro, coro2} | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_type(self):
rset = event.ResultSet()
with pytest.raises(NotImplementedError):
rset.extend([])
with pytest.raises(NotImplementedError):
rset.extend(False) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def dispatcher(self):
return event.EventDispatcher() | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_register_plugin(self, dispatcher):
name = "some_name"
class AClass:
@event.event(name)
def handler(self):
pass
@event.event(name)
async def hander(self):
pass
obj = AClass()
h_insts = dispatcher.register_plugin(obj)
assert len(dispatcher.event_map) == 1
assert len(h_insts) == 2
for h_inst in h_insts:
assert h_inst in dispatcher.event_map[name] | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_dispatch_priority(self, dispatcher, loop, evt):
called = list()
@event.event(evt.name, priority=0)
async def corofunc():
called.append(corofunc)
@event.event(evt.name, priority=1)
def corofunc2():
called.append(corofunc2)
h_inst = event.HandlerInstance.from_handler(corofunc)
h_inst2 = event.HandlerInstance.from_handler(corofunc2)
dispatcher.register(h_inst)
dispatcher.register(h_inst2)
loop.run_until_complete(dispatcher.dispatch(evt))
assert called == [corofunc2, corofunc] | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_dispatch_exception(self, loop, evt):
logger = mock.Mock(Logger)
dispatcher = event.EventDispatcher(logger=logger)
called = 0
@event.event(evt.name)
async def corofunc():
nonlocal called
called += 1
raise ValueError("yeah async")
@event.event(evt.name)
def handler():
nonlocal called
called += 1
raise ValueError("yeah sync")
dispatcher.register(event.HandlerInstance.from_handler(corofunc))
dispatcher.register(event.HandlerInstance.from_handler(handler))
assert not logger.exception.called
loop.run_until_complete(dispatcher.dispatch(evt))
assert called == 2
assert logger.exception.call_count == 2 | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def test_dispatch_eat(self, loop, evt):
dispatcher = event.EventDispatcher()
called = [False] * 3
@event.event(evt.name, priority=1)
def corofunc():
called[0] = True
@event.event(evt.name, priority=0)
async def corofunc2():
called[1] = True
return event.ReturnValue(eat=True)
@event.event(evt.name, priority=-1)
async def corofunc3():
called[2] = True
dispatcher.register(event.HandlerInstance.from_handler(corofunc))
dispatcher.register(event.HandlerInstance.from_handler(corofunc2))
dispatcher.register(event.HandlerInstance.from_handler(corofunc3))
result = loop.run_until_complete(dispatcher.dispatch(evt))
assert result.eat
assert called == [True, True, False] | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def corofunc1():
called[0] += 1
return event.ReturnValue(insert_events=[evt2], append_events=[evt]) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def corofunc2():
called[1] += 1
return event.ReturnValue(insert_events=[evt3], append_events=[evt2]) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |
def corofunc3():
called[2] += 1
async def corofunc():
pass
return event.ReturnValue(append_events=[evt3], schedule={corofunc()}) | chireiden/shanghai | [
4,
2,
4,
6,
1461341617
] |