text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self) -> WebElement:
"""Locator for temperature module on deckmap."""
return self.base.clickable_wrapper(ModuleSetup.temperature_module)<|fim_middle|>get_temperature_module<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(query, args, **kwargs):
r = self.get('/httpbin' + query, **kwargs)
self.assertEqual(r.json()['args'], args)<|fim_middle|>f<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# test the remaining errors and warnings
fs_1 = 48000
fs_2 = 96000
# use power signal, which needs match_amplitude="time"
signal = pf.signals.sine(3000, 128, full_period=True, sampling_rate=fs_1)
signal.fft_norm = "amplitude"
# test ValueError with wrong value for match_amplitude
with pytest.raises(ValueError, match='match_amplitude must be "time"'):
pf.dsp.resample(signal, fs_2, match_amplitude="freq")
# test TypeError for input is not a pyfar.Signal
with pytest.raises(TypeError,
match="Input data has to be of type pyfar.Signal"):
pf.dsp.resample([0, 1, 0], fs_2, match_amplitude="freq")
# test ValueError for invalid match_amplitude, must be "time" or "freq"
with pytest.raises(ValueError,
match="match_amplitude is 'invalid_match_amplitude'"):
pf.dsp.resample(signal, fs_2,
match_amplitude="invalid_match_amplitude")
# test warning for target sampling rate is not divisible by 10
with pytest.warns(UserWarning,
match="At least one sampling rate is not divisible"):
pf.dsp.resample(signal, 12345, match_amplitude="time")
# test warning for target sampling rate realisation with an error
signal2 = pf.signals.impulse(128, 64, sampling_rate=48000)
with pytest.warns(UserWarning,
match="The target sampling rate was realized with"):
pf.dsp.resample(signal2, 420, frac_limit=100)<|fim_middle|>test_resample_assertions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, task, script, err):
line_number, error_line = self.get_error_line_number_and_content(script, err)
if isinstance(err, SpiffWorkflowException):
err.line_number = line_number
err.error_line = error_line
err.add_note(f"Python script error on line {line_number}: '{error_line}'")
return err
detail = err.__class__.__name__
if len(err.args) > 0:
detail += ":" + err.args[0]
return WorkflowTaskException(detail, task=task, exception=err, line_number=line_number, error_line=error_line)<|fim_middle|>create_task_exec_exception<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
wb.wb_supervisor_simulation_reset()<|fim_middle|>simulation_reset<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[Sequence['outputs.DatabaseBackupSettingResponse']]:
"""
Databases included in the backup.
"""
return pulumi.get(self, "databases")<|fim_middle|>databases<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>():
a1 = analysis.analyzer(
"my_analyzer1",
tokenizer="keyword",
filter=[
"lowercase",
analysis.token_filter("my_filter1", "stop", stopwords=["a", "b"]),
],
)
a2 = analysis.analyzer(
"my_analyzer2",
tokenizer=analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3),
filter=[analysis.token_filter("my_filter2", "stop", stopwords=["c", "d"])],
)
m = mapping.AsyncMapping()
m.field("title", "text", analyzer=a1, search_analyzer=a2)
m.field(
"text",
"text",
analyzer=a1,
fields={
"english": Text(analyzer=a1),
"unknown": Keyword(analyzer=a1, search_analyzer=a2),
},
)
assert {
"analyzer": {
"my_analyzer1": {
"filter": ["lowercase", "my_filter1"],
"tokenizer": "keyword",
"type": "custom",
},
"my_analyzer2": {
"filter": ["my_filter2"],
"tokenizer": "trigram",
"type": "custom",
},
},
"filter": {
"my_filter1": {"stopwords": ["a", "b"], "type": "stop"},
"my_filter2": {"stopwords": ["c", "d"], "type": "stop"},
},
"tokenizer": {"trigram": {"max_gram": 3, "min_gram": 3, "type": "nGram"}},
} == m._collect_analysis()<|fim_middle|>test_mapping_can_collect_multiple_analyzers<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, request: Request, organization) -> Response:
"""
List one or more plugin configurations, including a `projectList` for each plugin which contains
all the projects that have that specific plugin both configured and enabled.
- similar to the `OrganizationPluginsEndpoint`, and can eventually replace it
:qparam plugins array[string]: an optional list of plugin ids (slugs) if you want specific plugins.
If not set, will return configurations for all plugins.
"""
desired_plugins = []
for slug in request.GET.getlist("plugins") or ():
# if the user request a plugin that doesn't exist, throw 404
try:
desired_plugins.append(plugins.METHOD_NAME(slug))
except KeyError:
return Response({"detail": "Plugin %s not found" % slug}, status=404)
# if no plugins were specified, grab all plugins but limit by those that have the ability to be configured
if not desired_plugins:
desired_plugins = list(plugins.plugin_that_can_be_configured())
# `keys_to_check` are the ProjectOption keys that tell us if a plugin is enabled (e.g. `plugin:enabled`) or are
# configured properly, meaning they have the required information - plugin.required_field - needed for the
# plugin to work (ex:`opsgenie:api_key`)
keys_to_check = []
for plugin in desired_plugins:
keys_to_check.append("%s:enabled" % plugin.slug)
if plugin.required_field:
keys_to_check.append(f"{plugin.slug}:{plugin.required_field}")
# Get all the project options for org that have truthy values
project_options = ProjectOption.objects.filter(
key__in=keys_to_check, project__organization=organization
).exclude(value__in=[False, ""])
"""
This map stores info about whether a plugin is configured and/or enabled
{
"plugin_slug": {
"project_id": { "enabled": True, "configured": False },
},
}
"""
info_by_plugin_project = {}
for project_option in project_options:
[slug, field] = project_option.key.split(":")
project_id = project_option.project_id
# first add to the set of all projects by plugin
info_by_plugin_project.setdefault(slug, {}).setdefault(
project_id, {"enabled": False, "configured": False}
)
# next check if enabled
if field == "enabled":
info_by_plugin_project[slug][project_id]["enabled"] = True
# if the projectoption is not the enable field, it's configuration field
else:
info_by_plugin_project[slug][project_id]["configured"] = True
# get the IDs of all projects for found project options and grab them from the DB
project_id_set = {project_option.project_id for project_option in project_options}
projects = Project.objects.filter(id__in=project_id_set, status=ObjectStatus.ACTIVE)
# create a key/value map of our projects
project_map = {project.id: project for project in projects}
# iterate through the desired plugins and serialize them
serialized_plugins = []
for plugin in desired_plugins:
serialized_plugin = serialize(plugin, request.user, PluginSerializer())
if serialized_plugin["isDeprecated"]:
continue
serialized_plugin["projectList"] = []
info_by_project = info_by_plugin_project.METHOD_NAME(plugin.slug, {})
# iterate through the projects
for project_id, plugin_info in info_by_project.items():
# if the project is being deleted
if project_id not in project_map:
continue
project = project_map[project_id]
# only include plugins which are configured
if not plugin_info["configured"]:
continue
serialized_plugin["projectList"].append(
{
"projectId": project.id,
"projectSlug": project.slug,
"projectName": project.name, # TODO(steve): do we need?
"enabled": plugin_info["enabled"],
"configured": plugin_info["configured"], # TODO(steve): do we need?
"projectPlatform": project.platform,
}
)
# sort by the projectSlug
serialized_plugin["projectList"].sort(key=lambda x: x["projectSlug"])
serialized_plugins.append(serialized_plugin)
if not serialized_plugins:
raise Http404
return Response(serialized_plugins)<|fim_middle|>get<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
staff_api_client, app, permission_manage_products
):
# given
app.is_active = False
app.save()
app_extension = AppExtension.objects.create(
app=app,
label="Create product with App",
url="https://www.example.com/app-product",
mount=AppExtensionMount.PRODUCT_OVERVIEW_MORE_ACTIONS,
)
app_extension.permissions.add(permission_manage_products)
variables = {}
# when
response = staff_api_client.post_graphql(
QUERY_APP_EXTENSIONS,
variables,
)
# then
content = get_graphql_content(response)
extensions_data = content["data"]["appExtensions"]["edges"]
assert len(extensions_data) == 0<|fim_middle|>test_app_extensions_app_not_active<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
q = Q0()
result = q.answer("carbetocin")
assert result["id"] == "KEGG:C18365" and result["category"] == "metabolite"
result = q.answer("DOID:9281")
assert result["id"] == "DOID:9281" and result["category"] == "disease"
result = q.answer("lovastatin")
assert result["id"] == "CHEMBL.COMPOUND:CHEMBL503" and result["category"] == "chemical_substance"
result = q.answer("blobfoodle")
assert result == None<|fim_middle|>test_q0_answer<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self) -> None:
await self._client.METHOD_NAME()<|fim_middle|>close<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, other: AcceptableFValOtherInput, third: AcceptableFValOtherInput) -> 'FVal':
"""
Fused multiply-add. Return self*other+third with no rounding of the
intermediate product self*other
"""
evaluated_other = _evaluate_input(other)
evaluated_third = _evaluate_input(third)
return FVal(self.num.METHOD_NAME(evaluated_other, evaluated_third))<|fim_middle|>fma<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(diff_level=0):
def replace_heading(match):
if diff_level == 0:
return match.group(0)
else:
return '\n' + '#' * (match.group(0).count('#') + diff_level) + ' '
return replace_heading<|fim_middle|>replace_heading_func<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*args, **kwargs):
try:
return fun(*args, **kwargs)
except ResourcePathNotFound:
return paramiko.SFTP_NO_SUCH_FILE
except ValidationException:
return paramiko.SFTP_FAILURE
except AccessException:
return paramiko.SFTP_PERMISSION_DENIED
except Exception:
logger.exception('SFTP server internal error')
return paramiko.SFTP_FAILURE<|fim_middle|>wrapped<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, username, work_id, edition_id=NULL_EDITION_VALUE):
"""Remove a patron's specific booknote by work_id.
Technical note: work_id is not an optional argument and
intentionally does not default to None (to reduce
accidents/risk), however if one passes None as a value to
work_id, this method will remove all booknotes for a patron
(useful for a patron who may decide to close their account.
Q: Is there a way to add a dryrun=False param to make this safer?
return: a list of the IDs affected
"""
oldb = db.get_db()
where = {
'username': username,
'work_id': int(work_id),
'edition_id': edition_id,
}
try:
return oldb.delete(
'booknotes',
where=(
'work_id=$work_id AND username=$username AND edition_id=$edition_id'
),
vars=where,
)
except: # we want to catch no entry exists
return None<|fim_middle|>remove<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
issues: List[LinterIssue], *, lint: Optional[projects.Lint]
) -> None:
"""Mark any remaining filename match as ignored."""
if lint is None:
return
for issue in issues:
files = lint.ignored_files(issue.name)
for pattern in files:
if (
issue.filename
and issue.result != LinterResult.IGNORED
and fnmatch.fnmatch(issue.filename, pattern)
):
emit.verbose(
f"Ignore {issue.name!r} linter issue ({issue.filename!r} "
f"matches {pattern!r})"
)
issue.result = LinterResult.IGNORED
break<|fim_middle|>ignore_matching_filenames<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, dp_id, dp_name, event_dict):
"""Notify of an event."""
assert isinstance(event_dict, dict)
self.event_id += 1
event = {
"version": 1,
"time": time.time(),
"dp_id": dp_id,
"dp_name": dp_name,
"event_id": self.event_id,
}
for header_key in list(event):
assert header_key not in event_dict
event.update(event_dict)
self.metrics.faucet_event_id.set(event["event_id"])
if self.event_q.full():
self.event_q.get()
self.event_q.put(event)<|fim_middle|>notify<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(domain, info):
"""
Determine the URL corresponding to Python object
Notes
-----
From https://github.com/numpy/numpy/blob/v1.15.1/doc/source/conf.py, 7c49cfa
on Jul 31. License BSD-3. https://github.com/numpy/numpy/blob/v1.15.1/LICENSE.txt
"""
if domain != "py":
return None
modname = info["module"]
fullname = info["fullname"]
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split("."):
try:
obj = getattr(obj, part)
except Exception: # NOQA: PERF203
return None
# strip decorators, which would resolve to the source of the decorator
# possibly an upstream bug in getsourcefile, bpo-1764286
try:
unwrap = inspect.unwrap
except AttributeError:
pass
else:
obj = unwrap(obj)
try:
fn = inspect.getsourcefile(obj)
except Exception:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except Exception:
lineno = None
linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1) if lineno else ""
fn = relpath(fn, start=pathlib.Path(tmuxp.__file__).parent)
if "dev" in about["__version__"]:
return "{}/blob/master/{}/{}/{}{}".format(
about["__github__"],
"src",
about["__package_name__"],
fn,
linespec,
)
else:
return "{}/blob/v{}/{}/{}/{}{}".format(
about["__github__"],
about["__version__"],
"src",
about["__package_name__"],
fn,
linespec,
)<|fim_middle|>linkcode_resolve<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, provider: ExternalProviders, value: NotificationSettingOptionValues
) -> None:
type = NotificationSettingTypes.ISSUE_ALERTS
NotificationSetting.objects.update_settings(provider, type, value, user_id=self.user.id)
NotificationSetting.objects.update_settings(
provider, type, value, user_id=self.user.id, project=self.project
)<|fim_middle|>set_setting<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if self._cmake is not None:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions["CMAKE_INSTALL_DATAROOTDIR"] = "lib"
self._cmake.definitions["SKIP_component_name"] = False
self._cmake.configure(source_folder=self._source_subfolder)
return self._cmake<|fim_middle|>configure_cmake<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.iface_re = re.compile(r"\d+: (?P<iface>[\w\-@]+):")
self.ip_re = re.compile(r"\s+inet (?P<ip4>[\d.]+)(?:/| )")
self.ip6_re = re.compile(r"\s+inet6 (?P<ip6>[\da-f:]+)(?:/\d{1,3}| ) scope global dynamic")<|fim_middle|>post_config_hook<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(vw, fva):
'''
this analysis module will identify thunk_reg functions, which place the .GOT pointer
into some register which is then accessed later.
doing so allows for position-independent code.
store funcva in "thunk_reg" VaSet in case we identify multiples (not likely) or misidentify
something.
then store the module base in metadata as "PIE_GOT", accessible by other analysis modules.
then store the register used in this function in function metadata as "PIE_reg"
'''
got = None
for segva, segsz, segnm, segimg in vw.getSegments():
if segnm == '.got':
got = segva
break
# if we don't have a segment named ".got" we fail.
if got is None:
return
# roll through the first few opcodes looking for one to load a register with .got's address
success = 0
tva = fva
emu = vw.getEmulator(va=fva)
emu._prep(tva)
for x in range(MAX_INIT_OPCODES):
op = emu.parseOpcode(tva)
tmode = emu.getFlag(PSR_T_bit)
emu.executeOpcode(op)
newtmode = emu.getFlag(PSR_T_bit)
if newtmode != tmode:
emu.setFlag(PSR_T_bit, tmode)
if op.iflags & (envi.IF_BRANCH_COND) == (envi.IF_BRANCH_COND):
break
if not len(op.opers):
continue
operval = op.getOperValue(0, emu)
if operval == got:
success = True
reg = op.opers[0].reg
vw.setVaSetRow('thunk_reg', (fva, reg, 0))
if vw.getFunctionMeta(fva, 'PIE_reg') is None:
vw.setFunctionMeta(fva, 'PIE_reg', reg)
vw.setComment(op.va, 'Position Indendent Code Register Set: %s' % vw.arch._arch_reg.getRegisterName(reg))
if vw.getMeta('PIE_GOT') is None:
vw.setMeta('PIE_GOT', got)
break
tva += len(op)
if op.isReturn():
logger.debug("thunk_reg: returning before finding PIE data")
break
if not success:
return
logger.debug('funcva 0x%x using thunk_reg for PIE', fva)
# now check through all the functions and track references
emumon = AnalysisMonitor(vw, fva)
emu.setEmulationMonitor(emumon)
try:
emu.runFunction(fva, maxhit=1)
except Exception:
logger.exception("Error emulating function 0x%x\n\t%r", fva, emumon.emuanom)
# now roll through tracked references and make xrefs/comments
items = list(emumon.tracker.items())
items.sort()
for va, tgt in items:
# if we already have xrefs, don't make more...
if vw.getLocation(tgt) is None:
try:
vw.followPointer(tgt)
except envi.SegmentationViolation:
logger.debug("SegV: %x (va:0x%x)", tgt, va)
emumon.emuanom.append("SegV: %x (va:0x%x)" % (tgt, va))
continue
nogo = False
for xfr, xto, xtype, xflag in vw.getXrefsFrom(va):
if xto == tgt:
nogo = True
if not nogo:
logger.debug("PIE XREF: 0x%x -> 0x%x", va, tgt)
try:
vw.addXref(va, tgt, REF_DATA, 0)
except Exception as e:
logger.exception('error adding XREF: %s', e)
## FIXME: force analysis of the xref. very likely string for current example code.
# set comment. if existing comment, by default, don't... otherwise prepend the info before the existing comment
curcmt = vw.getComment(va)
cmt = "0x%x: %s" % (tgt, vw.reprPointer(tgt))
if curcmt is None or not len(curcmt):
vw.setComment(va, cmt)
elif cmt not in curcmt:
cmt = "0x%x: %s ;\n %s" % (tgt, vw.reprPointer(tgt), curcmt)
vw.setComment(va, cmt)
logger.debug("PIE XREF: %x %s", va, cmt)
logger.debug("ANOMS: \n%r", emumon.emuanom)<|fim_middle|>analyze_function<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
return make_response(jsonify({"message": "signed out."}), 302)<|fim_middle|>signout<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
admin_client, environment, environment_api_key, slack_environment_config
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-detail",
args=[environment_api_key, slack_environment_config],
)
# When
response = admin_client.get(url)
# Then
assert response.status_code == status.HTTP_200_OK
assert response.json()["id"] == slack_environment_config<|fim_middle|>test_get_environment_config_returns_200<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
diag = Diagnosis()
def foo_diagnostic():
diag.fail("foo", "a")
def bar_diagnostic():
diag.success("bar", "b")
diag.register(foo_diagnostic, bar_diagnostic)
my_results = diag.run_explicit()
assert my_results == [
Diagnosis.Result(Diagnosis.DIAGNOSIS_FAIL, "foo", "a", None, None, None, None),
Diagnosis.Result(Diagnosis.DIAGNOSIS_SUCCESS, "bar", "b", None, None, None, None),
]
# Explicit diagnostics must not be stored
assert not diag.diagnoses<|fim_middle|>test_registering_and_running_explicit_diagnostics<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(xs: List[Tensor], aggr: Optional[str]) -> Optional[Tensor]:
if len(xs) == 0:
return None
elif aggr is None:
return torch.stack(xs, dim=1)
elif len(xs) == 1:
return xs[0]
elif aggr == "cat":
return torch.cat(xs, dim=-1)
else:
out = torch.stack(xs, dim=0)
out = getattr(torch, aggr)(out, dim=0)
out = out[0] if isinstance(out, tuple) else out
return out<|fim_middle|>group<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# AES128 encrypt and decrypt
kb = h('9062430C8CDA3388922E6D6A509F5B7A')
conf = h('94B491F481485B9A0678CD3C4EA386AD')
keyusage = 2
plain = b'9 bytesss'
ctxt = h('68FB9679601F45C78857B2BF820FD6E53ECA8D42FD4B1D7024A09205ABB7CD2E'
'C26C355D2F')
k = Key(Enctype.AES128, kb)
self.assertEqual(encrypt(k, keyusage, plain, conf), ctxt)
self.assertEqual(decrypt(k, keyusage, ctxt), plain)<|fim_middle|>test_ae_s128<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(user, threshold):
"""When a comment is marked as spam, make appropriate status update to user model
"""
total_spam = ProposalComment.objects.filter(commenter=user, is_spam=True).count()
if total_spam >= threshold:
if user.is_active is True:
user.is_active = False
user.save()
else:
if user.is_active is False:
user.is_active = True
user.save()<|fim_middle|>user_action_for_spam<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, queryset, name, value):
try:
parts = value.split(",", 1)
lat, lng = float(parts[0]), float(parts[1])
return queryset.filter(geom__covers=Point(lng, lat))
except (ValueError, IndexError):
pass
return queryset<|fim_middle|>latlng_filter<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
torch.manual_seed(0)
transformer = Transformer2DModel(
sample_size=16,
num_layers=2,
patch_size=4,
attention_head_dim=8,
num_attention_heads=2,
in_channels=4,
out_channels=8,
attention_bias=True,
activation_fn="gelu-approximate",
num_embeds_ada_norm=1000,
norm_type="ada_norm_zero",
norm_elementwise_affine=False,
)
vae = AutoencoderKL()
scheduler = DDIMScheduler()
components = {"transformer": transformer.eval(), "vae": vae.eval(), "scheduler": scheduler}
return components<|fim_middle|>get_dummy_components<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(i):
"""Allow getting Unicode character on narrow python builds."""
try:
return unichar(i)
except ValueError: # pragma: no cover
return struct.pack('i', i).decode('utf-32')<|fim_middle|>uchr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(tmp_path):
t = Table()
t["a"] = [1, 2, 3, 4, 5]
t["b"] = ["<b>a</b>", "b", "c", "d", "e"]
t["a"].unit = "m"
tmpfile = tmp_path / "test.html"
t.write(
tmpfile,
format="jsviewer",
table_id="test",
max_lines=3,
jskwargs={"display_length": 5},
table_class="display hover",
htmldict=dict(raw_html_cols="b"),
)
ref = REFERENCE % dict(
lines=format_lines(t["a"][:3], t["b"][:3]),
table_class="display hover",
table_id="test",
length="5",
display_length="5, 10, 25, 50, 100, 500, 1000",
datatables_css_url=(
"https://cdn.datatables.net/1.10.12/css/jquery.dataTables.css"
),
datatables_js_url=(
"https://cdn.datatables.net/1.10.12/js/jquery.dataTables.min.js"
),
jquery_url="https://code.jquery.com/" + JQUERY_MIN_JS,
)
with open(tmpfile) as f:
assert f.read().strip() == ref.strip()<|fim_middle|>test_write_jsviewer_options<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
msg = ('test', 1)
dbus_object = DummyDBusObject()
device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_object.notify_child(msg)
self.assertEqual(dbus_object.notify_msg, msg)<|fim_middle|>test_device_notify_child<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, lang, file_path):
"""
{
"original": "artigo02.pdf",
"en": "artigo02-en.pdf",
}
"""
self._renditions[lang] = self.file_path(file_path)<|fim_middle|>add_rendition<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")<|fim_middle|>etag<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
lookup_type: str, handler_or_path: Union[str, Type[LookupHandler]]
) -> None:
"""Register a lookup handler.
Args:
lookup_type: Name to register the handler under.
handler_or_path: A function or a path to a handler.
"""
handler = handler_or_path
LOGGER.debug("registering CFNgin lookup: %s=%s", lookup_type, handler_or_path)
if isinstance(handler_or_path, str):
handler = cast(type, load_object_from_string(handler_or_path))
else:
handler = handler_or_path
try:
if issubclass(handler, LookupHandler):
CFNGIN_LOOKUP_HANDLERS[lookup_type] = handler
return
# Handler is a not a new-style handler
except Exception: # pylint: disable=broad-except
LOGGER.debug("failed to validate lookup handler", exc_info=True)
LOGGER.error(
'lookup "%s" uses an unsupported format; to learn how to write '
"lookups visit %s/page/cfngin/lookups/index.html#writing-a-custom-lookup",
lookup_type,
DOC_SITE,
)
raise TypeError(
f"lookup {handler_or_path} must be a subclass of "
"runway.lookups.handlers.base.LookupHandler"
)<|fim_middle|>register_lookup_handler<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(webcam):
webcam_dict = webcam.config.dict()
webcam_dict["provider"] = webcam.providerIdentifier
return webcam_dict<|fim_middle|>to_dict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, organization, data, actor=None):
raise NotImplementedError<|fim_middle|>create_repository<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, p):
"fields : ID"
p[0] = [p[1]]<|fim_middle|>p_fields_id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
tnow = Time.now()
iers_b = iers.IERS_B.open()
delta1, status1 = tnow.get_delta_ut1_utc(iers_b, return_status=True)
assert status1 == iers.TIME_BEYOND_IERS_RANGE
with iers.earth_orientation_table.set(iers.IERS_B.open()):
delta2, status2 = tnow.get_delta_ut1_utc(return_status=True)
assert status2 == status1
with pytest.raises(iers.IERSRangeError):
tnow.ut1<|fim_middle|>test_ut1_iers_b<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
state = [
'module.commcarehq.module.servers.aws_instance.server[0]',
'module.commcarehq.module.servers.aws_instance.server[1]',
'module.commcarehq.module.servers.aws_instance.server[2]',
'module.commcarehq.module.servers.aws_instance.server[3]',
'module.commcarehq.module.servers.aws_instance.server[4]',
'module.commcarehq.module.Users.aws_iam_account_alias.alias',
'aws_key_pair.droberts',
]
server_names = ['web0', 'celery0', 'proxy0', 'couch0', 'es0']
def change_root(environment, old_resource_address):
parts = old_resource_address.split('.')
if parts[:2] == ['module', 'commcarehq']:
parts = parts[2:]
return '.'.join(parts)
def rotate_servers(environment, old_resource_address):
parts = old_resource_address.split('.')
address_index_syntax_matcher = re.compile(r'\[(\d+)\]$')
if parts[:3] == ['module', 'servers', 'aws_instance'] and parts[3].startswith(
'server'):
index = int(address_index_syntax_matcher.search(parts[3]).group(1))
if index == 2:
index = 4
elif index > 2:
index -= 1
parts = ['module', 'servers', 'aws_instance', 'server[{}]'.format(index)]
return '.'.join(parts)
def name_servers(environment, old_resource_address):
parts = old_resource_address.split('.')
address_index_syntax_matcher = re.compile(r'\[(\d+)\]$')
if parts[:3] == ['module', 'servers', 'aws_instance'] and parts[3].startswith('server'):
index = int(address_index_syntax_matcher.search(parts[3]).group(1))
name = server_names[index]
parts = ['aws_instance', name]
return '.'.join(parts)
migration = [
Migration(number=1, slug='change-root', get_new_resource_address=change_root),
Migration(number=2, slug='rotate-servers', get_new_resource_address=rotate_servers),
Migration(number=3, slug='name-servers', get_new_resource_address=name_servers),
]
environment = namedtuple('MockEnv', 'env_name')(env_name='test')
migration_plans = make_migration_plans(environment, state, migration)
expected_state_0 = [
'module.commcarehq.module.servers.aws_instance.server[0]',
'module.commcarehq.module.servers.aws_instance.server[1]',
'module.commcarehq.module.servers.aws_instance.server[2]',
'module.commcarehq.module.servers.aws_instance.server[3]',
'module.commcarehq.module.servers.aws_instance.server[4]',
'module.commcarehq.module.Users.aws_iam_account_alias.alias',
'aws_key_pair.droberts',
]
expected_moves_0 = [
['module.commcarehq.module.servers.aws_instance.server[0]', 'module.servers.aws_instance.server[0]'],
['module.commcarehq.module.servers.aws_instance.server[1]', 'module.servers.aws_instance.server[1]'],
['module.commcarehq.module.servers.aws_instance.server[2]', 'module.servers.aws_instance.server[2]'],
['module.commcarehq.module.servers.aws_instance.server[3]', 'module.servers.aws_instance.server[3]'],
['module.commcarehq.module.servers.aws_instance.server[4]', 'module.servers.aws_instance.server[4]'],
['module.commcarehq.module.Users.aws_iam_account_alias.alias', 'module.Users.aws_iam_account_alias.alias'],
]
expected_state_1 = [
'module.servers.aws_instance.server[0]',
'module.servers.aws_instance.server[1]',
'module.servers.aws_instance.server[2]',
'module.servers.aws_instance.server[3]',
'module.servers.aws_instance.server[4]',
'module.Users.aws_iam_account_alias.alias',
'aws_key_pair.droberts',
]
expected_moves_1 = [
['module.servers.aws_instance.server[2]', 'module.servers.aws_instance.server-tmp-0[4]'],
['module.servers.aws_instance.server[3]', 'module.servers.aws_instance.server[2]'],
['module.servers.aws_instance.server[4]', 'module.servers.aws_instance.server[3]'],
['module.servers.aws_instance.server-tmp-0[4]', 'module.servers.aws_instance.server[4]'],
]
expected_state_2 = [
'module.servers.aws_instance.server[0]',
'module.servers.aws_instance.server[1]',
'module.servers.aws_instance.server[4]',
'module.servers.aws_instance.server[2]',
'module.servers.aws_instance.server[3]',
'module.Users.aws_iam_account_alias.alias',
'aws_key_pair.droberts',
]
expected_moves_2 = [
['module.servers.aws_instance.server[0]', 'aws_instance.web0'],
['module.servers.aws_instance.server[1]', 'aws_instance.celery0'],
['module.servers.aws_instance.server[4]', 'aws_instance.es0'],
['module.servers.aws_instance.server[2]', 'aws_instance.proxy0'],
['module.servers.aws_instance.server[3]', 'aws_instance.couch0'],
]
expected_state_3 = [
'aws_instance.web0',
'aws_instance.celery0',
'aws_instance.es0',
'aws_instance.proxy0',
'aws_instance.couch0',
'module.Users.aws_iam_account_alias.alias',
'aws_key_pair.droberts',
]
assert_equal(len(migration_plans), 3)
assert_equal(migration_plans[0], MigrationPlan(
migration=migration[0],
start_state=expected_state_0,
moves=expected_moves_0,
end_state=expected_state_1,
))
assert_equal(migration_plans[1], MigrationPlan(
migration=migration[1],
start_state=expected_state_1,
moves=expected_moves_1,
end_state=expected_state_2,
))
assert_equal(migration_plans[2], MigrationPlan(
migration=migration[2],
start_state=expected_state_2,
moves=expected_moves_2,
end_state=expected_state_3,
))<|fim_middle|>test_make_migration_plans<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Process an upload."""
if self.response_builder.get("chunked_id", None) is None:
return self.process_file()
return self.process_chunked_upload()<|fim_middle|>process_upload<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Check if quantizaion can be done."""
node = self.node
if not self.quantizer.is_valid_quantize_weight(node.input[0]):
return False
return True<|fim_middle|>quantize_check<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(stack, include_func_start_lineno=False):
"""Converts a stack extracted using extract_stack() to a traceback stack.
Args:
stack: A list of n 5-tuples,
(filename, lineno, name, frame_globals, func_start_lineno).
include_func_start_lineno: True if function start line number should be
included as the 5th entry in return tuples.
Returns:
A tuple of n 4-tuples or 5-tuples
(filename, lineno, name, code, [optional: func_start_lineno]), where the
code tuple element is calculated from the corresponding elements of the
input tuple.
"""
def _tuple_generator(): # pylint: disable=missing-docstring
for frame in stack:
filename = frame.filename
lineno = frame.lineno
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, frame.globals)
if line:
line = line.strip()
else:
line = None
if include_func_start_lineno:
yield (filename, lineno, frame.name, line, frame.func_start_lineno)
else:
yield (filename, lineno, frame.name, line)
return tuple(_tuple_generator())<|fim_middle|>convert_stack<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwdict):
self.extension_manager.suspend()
retval = fnc(self, *args, **kwdict)
self.extension_manager.resume()
return retval<|fim_middle|>inner<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, num):
try:
int(num)
except (TypeError, ValueError):
return False
return True<|fim_middle|>isintlike<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_scaled_mode<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"Compare with another gridcell (-1,0,1)"
return<|fim_middle|>compare<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
os.makedirs('output_data')
generate_test_dir_1()
generate_test_dir_2()<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
runner = nnUNetV2Runner(input_config=self.data_src_cfg, trainer_class_name="nnUNetTrainer_1epoch")
with skip_if_downloading_fails():
runner.run(run_train=False, run_find_best_configuration=False, run_predict_ensemble_postprocessing=False)
runner.train(configs="3d_fullres")
runner.find_best_configuration(configs="3d_fullres")
runner.predict_ensemble_postprocessing()<|fim_middle|>test_nnunetv2runner<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""test attribute label"""
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# get label
field_label = analystmemo_1._meta.get_field('analystmemo_id').verbose_name
# compare
self.assertEqual(field_label, 'analystmemo id')<|fim_middle|>test_analystmemo_id_attribute_label<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
mock = MagicMock(return_value=0)
with patch.dict(debian_service.__salt__, {"cmd.retcode": mock}):
# Test successful command (0 retcode)
assert debian_service.disable("foo")
# Confirm expected command was run
mock.assert_called_once_with("update-rc.d foo disable")
# Test unsuccessful command (nonzero retcode)
mock.return_value = 1
assert not debian_service.disable("foo")<|fim_middle|>test_disable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(symbol: str) -> pd.DataFrame:
"""Returns open interest by exchange for a certain symbol
[Source: https://coinglass.github.io/API-Reference/]
Parameters
----------
symbol : str
Crypto Symbol to search open interest futures (e.g., BTC)
Returns
-------
pd.DataFrame
funding rate per exchange
"""
url = api_url + f"futures/funding_rates_chart?symbol={symbol.upper()}&type=C"
headers = {"coinglassSecret": get_current_user().credentials.API_COINGLASS_KEY}
response = request(url, headers=headers)
df = pd.DataFrame()
if response.status_code == 200:
res_json = json.loads(response.text)
if res_json["success"]:
if "data" in res_json:
data = res_json["data"]
time = data["dateList"]
time_new = []
for elem in time:
time_actual = dt.datetime.utcfromtimestamp(elem / 1000)
time_new.append(time_actual)
df = pd.DataFrame(
data={
"date": time_new,
"price": data["priceList"],
**data["dataMap"],
}
)
df = df.set_index("date")
else:
console.print(f"No data found for {symbol}.\n")
elif "secret invalid" in res_json["msg"]:
console.print("[red]Invalid API Key[/red]\n")
else:
console.print(res_json["msg"])
elif response.status_code == 429:
console.print("[red]Exceeded number of calls per minute[/red]\n")
elif response.status_code == 429:
console.print(
"[red]IP address autobanned for exceeding calls limit multiple times.[/red]\n"
)
return df<|fim_middle|>get_funding_rate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
""" Save any outstanding setting changes to the :class:`~plexapi.server.PlexServer`. This
performs a full reload() of Settings after complete.
"""
params = {}
for setting in self.all():
if setting._setValue:
log.info('Saving PlexServer setting %s = %s', setting.id, setting._setValue)
params[setting.id] = quote(setting._setValue)
if not params:
raise BadRequest('No setting have been modified.')
querystr = '&'.join(f'{k}={v}' for k, v in params.items())
url = f'{self.key}?{querystr}'
self._server.query(url, self._server._session.put)
self.reload()<|fim_middle|>save<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
with open(TTPROGRAM_TTX, "r", encoding="utf-8") as f:
ttProgramXML = f.read()
p = Program()
p.fromBytecode(BYTECODE)
ttfont = TestFont()
buf = StringIO()
writer = XMLWriter(buf)
try:
p.toXML(writer, ttfont)
finally:
output_string = buf.getvalue()
assert output_string == ttProgramXML<|fim_middle|>test_xml_indentation<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, ctx, op):
(cond, x, y), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
with device(device_id):
ctx[op.outputs[0].key] = xp.where(cond, x, y)<|fim_middle|>execute<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Write a restart file for a continous simulation divided into multiple jobs.
"""
with open("restart.out", "w") as f:
f.write("count {0} \n".format(str(self._count)))<|fim_middle|>write_restart<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return """\
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-
hierarchy/blob/v3.1.1/README.md#squarify_ratio this
option specifies the desired aspect ratio of the
generated rectangles. The ratio must be specified as a
number greater than or equal to one. Note that the
orientation of the generated rectangles (tall or wide)
is not implied by the ratio; for example, a ratio of
two will attempt to produce a mixture of rectangles
whose width:height ratio is either 2:1 or 1:2. When
using "squarify", unlike d3 which uses the Golden Ratio
i.e. 1.618034, Plotly applies 1 to increase squares in
treemap layouts.
"""<|fim_middle|>prop_descriptions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> tuple[int, ...]:
return self.array.METHOD_NAME<|fim_middle|>shape<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, posterior: AbstractPosterior, dataset: Dataset, key: KeyArray
) -> AbstractPosterior:
"""
Takes a posterior and corresponding dataset and optimizes the posterior using the
GPJax `fit` method.
Args:
posterior: Posterior being optimized.
dataset: Dataset used for optimizing posterior.
key: A JAX PRNG key for generating random numbers.
Returns:
Optimized posterior.
"""
opt_posterior, _ = gpx.fit(
model=posterior,
objective=self.optimization_objective,
train_data=dataset,
optim=self.optimizer,
num_iters=self.num_optimization_iters,
safe=True,
key=key,
verbose=False,
)
return opt_posterior<|fim_middle|>optimize_posterior<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, interface):
'''See if the selected brightness was applied
Note: this doesn't guarantee that screen brightness
changed.
'''
if (
abs(self.get_actual_brightness(interface) -
self.get_last_set_brightness(interface)) > 1
):
return 1
else:
return 0<|fim_middle|>was_brightness_applied<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, d, guard):
m = dict(self)
if guard == true:
return Guards(m)
m[d] = guard
return m<|fim_middle|>impose<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
codebuild_client = mock.MagicMock
project_name = "test-project"
project_arn = f"arn:aws:codebuild:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:project/{project_name}"
codebuild_client.projects = [
Project(
name=project_name,
arn=project_arn,
region="eu-west-1",
last_invoked_time=None,
buildspec=None,
)
]
with mock.patch(
"prowler.providers.aws.services.codebuild.codebuild_service.Codebuild",
codebuild_client,
):
from prowler.providers.aws.services.codebuild.codebuild_project_user_controlled_buildspec.codebuild_project_user_controlled_buildspec import (
codebuild_project_user_controlled_buildspec,
)
check = codebuild_project_user_controlled_buildspec()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search(
"does not use an user controlled buildspec",
result[0].status_extended,
)
assert result[0].resource_id == project_name
assert result[0].resource_arn == project_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION<|fim_middle|>test_project_not_buildspec<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(_request, locationid):
"""
Get rooms for presentation in OSM map based on location
"""
location = Location.objects.get(pk=locationid)
rooms = location.get_all_rooms().filter(position__isnull=False)
return _process_room_position(rooms)<|fim_middle|>get_rooms_with_position_for_location<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, warn_logger):
optimizer = self.get_optimizer()
self.mock_popen.return_value.returncode = 1
self.mock_popen.return_value.communicate.return_value = (
"Output",
"Error",
)
optimizer.run_optimizer(".jpg", "garbage")
warn_logger.assert_called_once()<|fim_middle|>test_should_log_warning_when_failed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, prefix: str = ''):
raise NotImplementedError<|fim_middle|>wipe<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
client = boto3.client("logs", "us-east-1")
client.create_log_group(logGroupName="test1")
assert client.describe_queries(logGroupName="test1")["queries"] == []<|fim_middle|>test_describe_queries_on_log_group_without<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
super().METHOD_NAME()
self.person = PersonFactory.create(name="John Doe")<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(storage: StorageProvider, chunk_key: str):
return hash_inputs(storage.root, chunk_key)<|fim_middle|>get_stream_key<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
perms = S_IRUSR|S_IWUSR|S_IXUSR|S_IRGRP|S_IWGRP|S_IXGRP|S_IROTH|S_IWOTH|S_IXOTH|((S_IWGRP|S_ISGID) if self.coursedir.groupshared else 0)
# if root doesn't exist, create it and set permissions
if not os.path.exists(self.root):
self.log.warning("Creating exchange directory: {}".format(self.root))
try:
os.makedirs(self.root)
os.chmod(self.root, perms)
except PermissionError:
self.fail("Could not create {}, permission denied.".format(self.root))
else:
old_perms = oct(os.stat(self.root)[ST_MODE] & 0o777)
new_perms = oct(perms & 0o777)
if old_perms != new_perms:
self.log.warning(
"Permissions for exchange directory ({}) are invalid, changing them from {} to {}".format(
self.root, old_perms, new_perms))
try:
os.chmod(self.root, perms)
except PermissionError:
self.fail("Could not change permissions of {}, permission denied.".format(self.root))<|fim_middle|>ensure_root<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, node_uuid: NodeID) -> RunningDynamicServiceDetails:
return await self._scheduler.METHOD_NAME(node_uuid)<|fim_middle|>get_stack_status<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(model):
"""Disables calibration in whole network. Should be run always before running interference."""
for name, module in model.named_modules():
if isinstance(module, quant_nn.TensorQuantizer):
if module._calibrator is not None:
module.enable_quant()
module.disable_calib()
else:
module.enable()<|fim_middle|>disable_calibration<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self):
request_context = RequestContext.test_context()
request_context.message_receipt = MessageReceipt()
request_context.connection_record = async_mock.MagicMock()
with async_mock.patch.object(
test_module, "PresentationManager", autospec=True
) as mock_pres_mgr:
mock_pres_mgr.return_value.receive_proposal = async_mock.CoroutineMock()
request_context.message = PresentationProposal()
request_context.connection_ready = False
handler = test_module.PresentationProposalHandler()
responder = MockResponder()
with self.assertRaises(test_module.HandlerException) as err:
await handler.handle(request_context, responder)
assert (
err.exception.message
== "Connection used for presentation proposal not ready"
)
assert not responder.messages<|fim_middle|>test_called_not_ready<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if self.size_limit is not None:
while len(self) > self.size_limit:
self.popitem(last=False)<|fim_middle|>check_size_limit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
return render(request, 'landing/password_reset_complete.html', {'debug': settings.DEBUG})<|fim_middle|>password_reset_complete<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(api_mock_tftp, reset_singleton):
# Arrange
manager_obj = in_tftpd.get_manager(api_mock_tftp)
# Act
result = manager_obj.write_boot_files_distro(api_mock_tftp.distros()[0])
# Assert
assert result == 0<|fim_middle|>test_manager_write_boot_files_distro<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
before = Mesh.from_vertices_and_faces([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]], [[0, 1, 2, 3]])
after = compas.json_loads(compas.json_dumps(before))
assert before.dtype == after.dtype
assert before.attributes == after.attributes
assert all(before.has_vertex(vertex) for vertex in after.vertices())
assert all(after.has_vertex(vertex) for vertex in before.vertices())
assert all(before.has_face(face) for face in after.faces())
assert all(after.has_face(face) for face in before.faces())
assert all(before.has_edge(edge) for edge in after.edges())
assert all(after.has_edge(edge) for edge in before.edges())
assert all(before.face_vertices(a) == after.face_vertices(b) for a, b in zip(before.faces(), after.faces()))
assert before.guid == after.guid<|fim_middle|>test_json_mesh<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, dashboard_id, create_cell, **kwargs): ...<|fim_middle|>post_dashboards_id_cells_with_http_info<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
d = paddle.to_tensor(list(self.deque), dtype="float32")
return d.mean().item()<|fim_middle|>avg<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
### use a sin-gaussian as a signal
sigt = TimeSeries(self.sig2, self.del_t)
sig_tilde = make_frequency_series(sigt)
del_f = sig_tilde.get_delta_f()
psd = FrequencySeries(self.Psd, del_f)
flow = self.low_frequency_cutoff
with _context:
hautocor, hacorfr, hnrm = matched_filter_core(self.htilde, self.htilde, psd=psd, \
low_frequency_cutoff=flow, high_frequency_cutoff=self.fmax)
hautocor = hautocor * float(np.real(1./hautocor[0]))
snr, cor, nrm = matched_filter_core(self.htilde, sig_tilde, psd=psd, \
low_frequency_cutoff=flow, high_frequency_cutoff=self.fmax)
hacor = Array(hautocor.real(), copy=True)
indx = np.array([301440, 301450, 301460])
snr = snr*nrm
with _context:
dof, achisq, indices= \
autochisq_from_precomputed(snr, snr, hacor, indx, stride=3,
num_points=20)
obt_snr = abs(snr[indices[1]])
obt_ach = achisq[1]
self.assertTrue(obt_snr > 12.0 and obt_snr < 15.0)
self.assertTrue(obt_ach > 6.8e3)
self.assertTrue(achisq[0] > 6.8e3)
self.assertTrue(achisq[2] > 6.8e3)<|fim_middle|>test_sg<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Return job exit code"""
return self._exitcode<|fim_middle|>exitcode<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
Device.load("test_config.json")
if skip_runtime_test():
return
device = Device()
np.random.seed(0)
for dtype, low, high, atol, rtol, op, op_params in [
("float32", -127, 128, 1e-7, 1e-7, relay.add, {}),
("uint8", 0, 255, 1.0, 0.0, relay.qnn.op.add, _qnn_params),
("int8", -127, 128, 1.0, 0.0, relay.qnn.op.add, _qnn_params),
]:
shape = (2, 2)
for inputs in [
{
"a": tvm.nd.array(np.random.uniform(low, high, shape).astype(dtype)),
"b": tvm.nd.array(np.random.uniform(low, high, shape).astype(dtype)),
}
]:
outputs = []
func = _get_model(shape, dtype, iter(inputs), op, op_params)
for acl in [True, False]:
outputs.append(build_and_run(func, inputs, 1, None, device, enable_acl=acl)[0])
config = {
"shape": shape,
"dtype": dtype,
"inputs": inputs,
"operation": op,
"op_params": op_params,
}
verify(outputs, atol=atol, rtol=rtol, config=config, verify_saturation=False)<|fim_middle|>test_runtime_add<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(group_name, username):
return seaserv.ccnet_threaded_rpc.METHOD_NAME(group_name, username)<|fim_middle|>create_group<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.generate_message: gapic_v1.method.wrap_method(
self.generate_message,
default_timeout=None,
client_info=client_info,
),
self.count_message_tokens: gapic_v1.method.wrap_method(
self.count_message_tokens,
default_timeout=None,
client_info=client_info,
),
}<|fim_middle|>prep_wrapped_messages<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, k, v=1):
"""Helper for incrementing counters"""
self._current[k] += v<|fim_middle|>increment<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(command, ignore_exit_code=False, **kwargs):
try:
result = run_command(command, **kwargs)
except Exception:
if not isinstance(command, str):
command = ' '.join(command)
abort(f'Error running command: {command}')
else:
if not ignore_exit_code and result.code:
abort(result.stdout + result.stderr, code=result.code)
return result<|fim_middle|>run_or_abort<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
'''list all settings'''
return self._keys<|fim_middle|>list<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> int:
sub_menu = self.current_submenu
option_chosen = print_menu_and_get_desired_option_index(
sub_menu, default_option_index=EXIT_OPTION, default_str="Back"
)
if option_chosen == EXIT_OPTION:
self.location = TOP_LEVEL
return TRAVERSING_MENU
else:
return option_chosen<|fim_middle|>propose_options_and_get_input_at_sub<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, record, obj):
"""Set the access object."""
# We accept both dicts and access class objects.
if isinstance(obj, dict):
obj = self._access_obj_class.from_dict(obj)
assert isinstance(obj, self._access_obj_class)
# We do not dump the object until the pre_commit hook
# I.e. record.access != record['access']
self._set_cache(record, obj)<|fim_middle|>set_obj<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
#hack for the gtk calendar widget
if self.calendar is not None:
year,month,day = self.calendar.get_date()
# Selected day might be larger than current month's number of days.
# Iterate backwards until we find valid date.
while day >= 1:
try:
# months from gtk calendar are numbered from 0 to 11
return datetime.date(year, month + 1, day)
except ValueError :
day -= 1
raise ValueError("Invalid date supplied: "
"day is before 1st of month.")
else:
return datetime.date.today()<|fim_middle|>get_date<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""
Raise error if start_point is not None,
and end_point is None.
"""
with pytest.raises(TypeError, match="end_point should be one.+"):
io.xlsx_cells(wb, "clean", start_point="A1", end_point=None)<|fim_middle|>test_check_end_none<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.contract.write({"partner_id": self.partner.id})
self.contract.on_change_partner_id()
self.assertEqual(
self.contract.payment_mode_id,
self.contract.partner_id.customer_payment_mode_id,
)
self.contract.write(
{
"line_recurrence": True,
"contract_type": "sale",
"recurring_interval": 1,
"recurring_rule_type": "monthly",
"date_start": "2018-01-15",
"contract_line_ids": [
(
0,
0,
{
"product_id": self.product.id,
"name": "Database Administration 25",
"quantity": 2.0,
"uom_id": self.product.uom_id.id,
"price_unit": 200.0,
},
)
],
}
)
self.contract.recurring_create_invoice()
new_invoice = self.contract._get_related_invoices()
self.assertTrue(new_invoice)
self.assertEqual(new_invoice.partner_id, self.contract.partner_id)
self.assertEqual(new_invoice.payment_mode_id, self.contract.payment_mode_id)
self.assertEqual(len(new_invoice.ids), 1)
self.contract.recurring_create_invoice()
self.assertEqual(self.contract.payment_mode_id, new_invoice.payment_mode_id)<|fim_middle|>test_contract_and_invoices<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
from azure.data.tables import TableClient
# [START delete_table_from_table_client]
with TableClient.from_connection_string(
conn_str=self.connection_string, table_name=self.table_name
) as table_client:
table_client.delete_table()
print("Deleted table {}!".format(table_client.table_name))
# [END delete_table_from_table_client]<|fim_middle|>delete_from_table_client<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(uri: str, include_dir: bool = True) -> bool:
"""Check if a file exists.
Args:
uri: The URI to check
include_dir: Include directories in check, if this file_system
supports directory reads. Otherwise only return true if a single
file exists at the URI.
"""
pass<|fim_middle|>file_exists<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
element = FiniteElement("Lagrange", triangle, 1)
return Coefficient(element)<|fim_middle|>f<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
setup_ip_route_commands,
setup_single_bgp_instance):
show = setup_ip_route_commands
runner = CliRunner()
result = runner.invoke(
show.cli.commands["ip"].commands["route"], ["20c0:a8c7:0:81::", "json"])
print("{}".format(result.output))
assert result.exit_code == 0
assert result.output == show_ip_route_common.show_ipv6_route_single_json_expected_output<|fim_middle|>test_show_specific_ipv6_route_json<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, *args, **kwargs) -> None:
cls._session = Session()
cls._cleanup_files.append(cls._session.uid)<|fim_middle|>set_up_class<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(clientName, expectedResult, expectedIssuer, expectedClientId, expectedClientSecret):
"""Test getIdProvider"""
setupConfig(config)
result = idps.getIdProvider(clientName)
assert result["OK"] == expectedResult["OK"]
if not result["OK"]:
assert expectedResult["Message"] in result["Message"]
else:
idProvider = result["Value"]
assert expectedIssuer in idProvider.server_metadata_url
if expectedClientId:
assert idProvider.client_id == expectedClientId
else:
assert idProvider.client_secret is None
if expectedClientSecret:
assert idProvider.client_secret == expectedClientSecret
else:
assert idProvider.client_secret is None<|fim_middle|>test_get_id_provider<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(factories):
group = factories.Group()
with pytest.raises(AttributeError, match="can't set attribute"):
group.type = "open"<|fim_middle|>test_you_cannot_set_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(in_file, out_file=None, mode=None, quiet=False):
"""Decode uuencoded file"""
#
# Open the input file, if needed.
#
opened_files = []
if in_file == '-':
in_file = sys.stdin.buffer
elif isinstance(in_file, str):
in_file = open(in_file, 'rb')
opened_files.append(in_file)
try:
#
# Read until a begin is encountered or we've exhausted the file
#
while True:
hdr = in_file.readline()
if not hdr:
raise Error('No valid begin line found in input file')
if not hdr.startswith(b'begin'):
continue
hdrfields = hdr.split(b' ', 2)
if len(hdrfields) == 3 and hdrfields[0] == b'begin':
try:
int(hdrfields[1], 8)
break
except ValueError:
pass
if out_file is None:
# If the filename isn't ASCII, what's up with that?!?
out_file = hdrfields[2].rstrip(b' \t\r\n\f').METHOD_NAME("ascii")
if os.path.exists(out_file):
raise Error('Cannot overwrite existing file: %s' % out_file)
if mode is None:
mode = int(hdrfields[1], 8)
#
# Open the output file
#
if out_file == '-':
out_file = sys.stdout.buffer
elif isinstance(out_file, str):
fp = open(out_file, 'wb')
os.chmod(out_file, mode)
out_file = fp
opened_files.append(out_file)
#
# Main decoding loop
#
s = in_file.readline()
while s and s.strip(b' \t\r\n\f') != b'end':
try:
data = binascii.a2b_uu(s)
except binascii.Error as v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((s[0]-32) & 63) * 4 + 5) // 3
data = binascii.a2b_uu(s[:nbytes])
if not quiet:
sys.stderr.write("Warning: %s\n" % v)
out_file.write(data)
s = in_file.readline()
if not s:
raise Error('Truncated input file')
finally:
for f in opened_files:
f.close()<|fim_middle|>decode<|file_separator|> |