text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self, X):
# MSIWarp discards peaks outside the node_mzs range, so add a safety margin at either end
# in case some other spectra in the dataset have a wider m/z range than the sample spectra.
# Also, round to the nearest 10 or 1 Da for consistency and interpretability, and only pick
# unique values in case n_segments is too high or the mass range is too small
min_mz = np.floor(X.mz.min() / 10 - 1) * 10
max_mz = np.ceil(X.mz.max() / 10 + 1) * 10
node_mzs = np.unique(np.round(np.linspace(min_mz, max_mz, self.n_segments + 1)))
node_slacks = peak_width(node_mzs, self.analyzer, self.recal_sigma_1) / 2
return mx.initialize_nodes(node_mzs, node_slacks, self.n_steps)<|fim_middle|>make_mx_nodes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
cr = bahc.CodeRunner("This is a syntax error", "path", [])
assert cr.failed is True
assert cr.error is not None
assert cr.error_detail is not None
assert "Invalid syntax in" in cr.error<|fim_middle|>test_syntax_error_init<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(apps, schema_editor):
Consent = apps.get_model("consents", "Consent")
TermOption = apps.get_model("consents", "TermOption")
term_slug = "may-publish-name"
try:
options = TermOption.objects.filter(term__slug=term_slug, archived_at=None)
may_publish_name_github = [
option
for option in options
if "only use my GitHub Handle" in option.content
][0]
may_publish_name_profile = [
option
for option in options
if "use the name associated with my profile" in option.content
][0]
may_publish_name_orcid = [
option
for option in options
if "use the name associated with my ORCID profile" in option.content
][0]
may_publish_name_disagree = [
option for option in options if option.option_type == DECLINE
][0]
except IndexError:
return
for old_consent in Consent.objects.filter(
term__slug=term_slug, archived_at=None
).select_related("person"):
old_option = old_consent.person.lesson_publication_consent
option: Optional[TermOption] = None
if old_option == "yes-profile":
option = may_publish_name_profile
if old_option == "yes-orcid":
option = may_publish_name_orcid
if old_option == "yes-github":
option = may_publish_name_github
if old_option == "no":
option = may_publish_name_disagree
if option:
reconsent(Consent, old_consent, option)<|fim_middle|>copy_may_publish_name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, source_langs: bool) -> list[Language]:
"""
Reverse-language is supported as the only target language.
:param source_langs: See documentation on TranslationService.
:return: See documentation on TranslationService.
"""
if source_langs:
return Language.query_all()
else:
reverse_lang = Language.query_by_code(REVERSE_LANG["lang_code"])
if reverse_lang is not None:
return [reverse_lang]
raise Exception(
f"Test-language is not found in database with the code '{REVERSE_LANG['lang_code']}'."
)<|fim_middle|>get_languages<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(app, doctree):
# first generate figure numbers for each figure
env = app.builder.env
docname_figs = getattr(env, 'docname_figs', {})
docnames_by_figname = getattr(env, 'docnames_by_figname', {})
for figure_info in doctree.traverse(lambda n: isinstance(n, nodes.figure) or \
isinstance(n, subfig.subfigend) or \
isinstance(n, figtable.figtable)):
for id in figure_info['ids']:
fig_docname = docnames_by_figname[id] = env.docname
if fig_docname not in docname_figs:
docname_figs[fig_docname] = OrderedDict()
if isinstance(figure_info.parent, subfig.subfig):
mainid = figure_info.parent['mainfigid']
else:
mainid = id
if mainid not in docname_figs[fig_docname]:
docname_figs[fig_docname][mainid] = OrderedSet()
if isinstance(figure_info.parent, subfig.subfig):
docname_figs[fig_docname][mainid].add(id)
env.docnames_by_figname = docnames_by_figname
env.docname_figs = docname_figs<|fim_middle|>doctree_read<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if settings.URL_PREFIX:
return settings.URL_PREFIX
return "/"<|fim_middle|>site_url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(src, dest, opts=None):
if os.path.exists(dest):
logging.debug('Removing existing directory: %s' % dest)
shutil.rmtree(dest)
logging.info('Syncing directory: %s -> %s.' % (src, dest))
shutil.copytree(src, dest, ignore=get_filter(opts))
ensure_init(dest)<|fim_middle|>sync_directory<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, alias):
"""
Returns the phase number of an alias, if it exists.
Otherwise, returns the alias.
"""
return self.aliasPhaseMap.get(alias, alias)<|fim_middle|>get_alias_phase<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return (self.start or 0) * 512<|fim_middle|>start_in_bytes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, name):
"""See BaseTask.paths.
"""
props = self.name_split(name)
camera = "{}{}".format(props["band"], props["spec"])
return [ findfile("preproc", night=props["night"], expid=props["expid"],
camera=camera, groupname=None, nside=None, band=props["band"],
spectrograph=props["spec"]) ]<|fim_middle|>paths<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super().METHOD_NAME(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.gallery_application_name = AAZStrArg(
options=["-n", "--name", "--application-name", "--gallery-application-name"],
help="The name of the gallery application.",
required=True,
id_part="child_name_1",
)
_args_schema.gallery_name = AAZStrArg(
options=["-r", "--gallery-name"],
help="Gallery name.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
help="Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.",
required=True,
)
return cls._args_schema<|fim_middle|>build_arguments_schema<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, task_id, **kwargs): ...<|fim_middle|>get_tasks_id_owners_async<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
config = OmegaConf.structured(encoders.ResNet18AudioEncoder.Config())
encoder = encoders.ResNet18AudioEncoder(config)
x = torch.rand((1, 1, 4778, 224))
output = encoder(x)
self.assertEqual(output.size(-1), config.out_dim)<|fim_middle|>test_resnet18_audio_encoder<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, resource_group):
self.kwargs.update({
"san_name": self.create_random_name('elastic-san', 24)
})
self.cmd('az elastic-san create -n {san_name} -g {rg} --tags {{key1810:aaaa}} -l southcentralusstg '
'--base-size-tib 23 --extended-capacity-size-tib 14 '
'--sku {{name:Premium_LRS,tier:Premium}}',
checks=[JMESPathCheck('name', self.kwargs.get('san_name', '')),
JMESPathCheck('location', "southcentralusstg"),
JMESPathCheck('tags', {"key1810": "aaaa"}),
JMESPathCheck('baseSizeTiB', 23),
JMESPathCheck('extendedCapacitySizeTiB', 14),
JMESPathCheck('sku', {"name": "Premium_LRS",
"tier": "Premium"})
])
self.cmd('az elastic-san show -g {rg} -n {san_name}',
checks=[JMESPathCheck('name', self.kwargs.get('san_name', '')),
JMESPathCheck('location', "southcentralusstg"),
JMESPathCheck('tags', {"key1810": "aaaa"}),
JMESPathCheck('baseSizeTiB', 23),
JMESPathCheck('extendedCapacitySizeTiB', 14),
JMESPathCheck('sku', {"name": "Premium_LRS",
"tier": "Premium"})
])
self.cmd('az elastic-san list -g {rg}', checks=[JMESPathCheck('length(@)', 1)])
self.cmd('az elastic-san list-sku')
self.cmd('az elastic-san update -n {san_name} -g {rg} --tags {{key1710:bbbb}} '
'--base-size-tib 25 --extended-capacity-size-tib 15',
checks=[JMESPathCheck('name', self.kwargs.get('san_name', '')),
JMESPathCheck('tags', {"key1710": "bbbb"}),
JMESPathCheck('baseSizeTiB', 25),
JMESPathCheck('extendedCapacitySizeTiB', 15)])
self.cmd('az elastic-san delete -g {rg} -n {san_name} -y')
time.sleep(20)
self.cmd('az elastic-san list -g {rg}', checks=[JMESPathCheck('length(@)', 0)])<|fim_middle|>test_elastic_san_scenarios<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
oci_resource: gci.componentmodel.Resource,
clamav_client: clamav.client.ClamAVClient,
oci_client: oc.Client,
):
access: gci.componentmodel.OciAccess = oci_resource.access
try:
clamav_findings = _scan_oci_image(
clamav_client=clamav_client,
oci_client=oci_client,
image_reference=access.imageReference,
)
return clamav.model.MalwareScanResult(
resource=oci_resource,
scan_state=clamav.model.MalwareScanState.FINISHED_SUCCESSFULLY,
findings=[
f'{path}: {scan_result.virus_signature()}'
for scan_result, path in clamav_findings
],
)
except (requests.exceptions.RequestException, socket.gaierror) as e:
# log warning and include it as finding to document it via the generated report-mails
warning = f'error while scanning {oci_resource.access.imageReference} {e=}'
logger.warning(warning)
traceback.print_exc()
return clamav.model.MalwareScanResult(
resource=oci_resource,
scan_state=clamav.model.MalwareScanState.FINISHED_WITH_ERRORS,
findings=[warning],
)<|fim_middle|>try_scan_image<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(event_manager, element_factory, modeling_language):
main_window = None
return FileManager(event_manager, element_factory, modeling_language, main_window)<|fim_middle|>file_manager<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, fl_ctx: FLContext):
# get app root
app_root = fl_ctx.get_prop(FLContextKey.APP_ROOT)
# Load model configuration to initialize training NeMo environment
self.config = OmegaConf.load(os.path.join(app_root, self.config_path))
self.config.trainer.devices = self.devices
self.config.model.language_model_path = os.path.join(app_root, self.gpt_file_name)
# Using defaults from `init_prompt_encoder` in `MegatronBasePromptLearningModel`
_encoder_type = PromptEncoderType(self.config.model.p_tuning.get("encoder_type", "mlp").lower())
if _encoder_type == PromptEncoderType.TPMLP:
self._init_environment()
PromptEncoder.__init__(
self,
encoder_type=_encoder_type,
total_virtual_tokens=self.total_virtual_tokens,
token_dim=self.hidden_size,
hidden_size=self.config.model.p_tuning.get("encoder_hidden", self.hidden_size // 2),
lstm_dropout=self.config.model.p_tuning.get("dropout", 0.0),
num_layers=self.config.model.p_tuning.get("num_layers", 2),
init_std=self.config.model.p_tuning.get("init_std", 0.023),
taskname=self.taskname,
)
self.log_info(fl_ctx, f"Initialized prompt encoder type {_encoder_type}")<|fim_middle|>initialize<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters<|fim_middle|>header_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
field,
query_string="",
query_dsl="",
supported_charts="table",
start_time="",
end_time="",
limit=10,
):
"""Run the aggregation.
Args:
field (str): this denotes the event attribute that is used
for aggregation.
query_string (str): the query field to run on all documents prior to
aggregating the results.
query_dsl (str): the query DSL field to run on all documents prior
to aggregating the results. Either a query string or a query
DSL has to be present.
supported_charts: Chart type to render. Defaults to table.
start_time: Optional ISO formatted date string that limits the time
range for the aggregation.
end_time: Optional ISO formatted date string that limits the time
range for the aggregation.
limit (int): How many buckets to return, defaults to 10.
Returns:
Instance of interface.AggregationResult with aggregation result.
Raises:
ValueError: if neither query_string or query_dsl is provided.
"""
if not (query_string or query_dsl):
raise ValueError("Both query_string and query_dsl are missing")
self.field = field
formatted_field_name = self.format_field_by_type(field)
aggregation_spec = get_spec(
field=formatted_field_name,
limit=limit,
query=query_string,
query_dsl=query_dsl,
)
aggregation_spec = self._add_query_to_aggregation_spec(
aggregation_spec, start_time=start_time, end_time=end_time
)
# Encoding information for Vega-Lite.
encoding = {
"x": {
"field": field,
"type": "nominal",
"sort": {"op": "sum", "field": "count", "order": "descending"},
},
"y": {"field": "count", "type": "quantitative"},
"tooltip": [
{"field": field, "type": "nominal"},
{"field": "count", "type": "quantitative"},
],
}
response = self.opensearch_aggregation(aggregation_spec)
aggregations = response.get("aggregations", {})
aggregation = aggregations.get("aggregation", {})
buckets = aggregation.get("buckets", [])
values = []
for bucket in buckets:
d = {field: bucket.get("key", "N/A"), "count": bucket.get("doc_count", 0)}
values.append(d)
if query_string:
extra_query_url = "AND {0:s}".format(query_string)
else:
extra_query_url = ""
return interface.AggregationResult(
encoding=encoding,
values=values,
chart_type=supported_charts,
sketch_url=self._sketch_url,
field=field,
extra_query_url=extra_query_url,
)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self._file_upload_envelope = FetchFileUploadS3Data(self._pubnub).\
channel(self._channel).\
file_name(self._file_name).METHOD_NAME()
response_envelope = super(SendFileNative, self).METHOD_NAME()
publish_file_response = PublishFileMessage(self._pubnub).\
channel(self._channel).\
meta(self._meta).\
message(self._message).\
file_id(response_envelope.result.file_id).\
file_name(response_envelope.result.name).\
should_store(self._should_store).\
ttl(self._ttl).\
replicate(self._replicate).\
ptto(self._ptto).\
cipher_key(self._cipher_key).METHOD_NAME()
response_envelope.result.timestamp = publish_file_response.result.timestamp
return response_envelope<|fim_middle|>sync<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
vel_pids[LEFT].setpoint = 0
vel_pids[RIGHT].setpoint = 0<|fim_middle|>stop<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_action('.d', SCons.Defaults.DAction)
shared_obj.add_action('.d', SCons.Defaults.ShDAction)
static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter)
env['DC'] = env.Detect(['dmd', 'ldmd2', 'gdmd']) or 'dmd'
env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES'
env['_DINCFLAGS'] = '${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)}'
env['_DVERFLAGS'] = '${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)}'
env['_DDEBUGFLAGS'] = '${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)}'
env['_DFLAGS'] = '${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)}'
env['SHDC'] = '$DC'
env['SHDCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -fPIC -of$TARGET $SOURCES'
env['DPATH'] = ['#/']
env['DFLAGS'] = []
env['DVERSIONS'] = []
env['DDEBUG'] = []
if env['DC']:
DCommon.addDPATHToEnv(env, env['DC'])
env['DINCPREFIX'] = '-I'
env['DINCSUFFIX'] = ''
env['DVERPREFIX'] = '-version='
env['DVERSUFFIX'] = ''
env['DDEBUGPREFIX'] = '-debug='
env['DDEBUGSUFFIX'] = ''
env['DFLAGPREFIX'] = '-'
env['DFLAGSUFFIX'] = ''
env['DFILESUFFIX'] = '.d'
env['DLINK'] = '$DC'
env['DLINKFLAGS'] = SCons.Util.CLVar('')
env['DLINKCOM'] = '$DLINK -of$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS'
env['SHDLINK'] = '$DC'
env['SHDLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -defaultlib=libphobos2.so')
env[
'SHDLINKCOM'] = '$DLINK -of$TARGET $SHDLINKFLAGS $__SHDLIBVERSIONFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS'
env['DLIBLINKPREFIX'] = '' if env['PLATFORM'] == 'win32' else '-L-l'
env['DLIBLINKSUFFIX'] = '.lib' if env['PLATFORM'] == 'win32' else ''
env['_DLIBFLAGS'] = '${_stripixes(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
env['DLIBDIRPREFIX'] = '-L-L'
env['DLIBDIRSUFFIX'] = ''
env['_DLIBDIRFLAGS'] = '${_concat(DLIBDIRPREFIX, LIBPATH, DLIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)}'
env['DLIB'] = 'lib' if env['PLATFORM'] == 'win32' else 'ar cr'
env['DLIBCOM'] = '$DLIB $_DLIBFLAGS {0}$TARGET $SOURCES $_DLIBFLAGS'.format(
'-c ' if env['PLATFORM'] == 'win32' else '')
# env['_DLIBFLAGS'] = '${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)}'
env['DLIBFLAGPREFIX'] = '-'
env['DLIBFLAGSUFFIX'] = ''
# __RPATH is set to $_RPATH in the platform specification if that
# platform supports it.
env['DRPATHPREFIX'] = '-L-rpath,' if env['PLATFORM'] == 'darwin' else '-L-rpath='
env['DRPATHSUFFIX'] = ''
env['_DRPATH'] = '${_concat(DRPATHPREFIX, RPATH, DRPATHSUFFIX, __env__)}'
# Support for versioned libraries
env['_SHDLIBVERSIONFLAGS'] = '$SHDLIBVERSIONFLAGS -L-soname=$_SHLIBSONAME'
env['BUILDERS']['ProgramAllAtOnce'] = SCons.Builder.Builder(
action='$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -of$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS',
emitter=DCommon.allAtOnceEmitter,
)<|fim_middle|>generate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
cmd.include_dirs = 'one-dir'
cmd.finalize_options()
self.assertEqual(cmd.include_dirs, ['one-dir'])
cmd.include_dirs = None
cmd.finalize_options()
self.assertEqual(cmd.include_dirs, [])
cmd.distribution.libraries = 'WONTWORK'
self.assertRaises(DistutilsSetupError, cmd.finalize_options)<|fim_middle|>test_finalize_options<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, *scopes: str, claims: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs: Any
) -> AccessToken:
"""Request an access token for `scopes`.
This method is called automatically by Azure SDK clients.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
For more information about scopes, see
https://learn.microsoft.com/azure/active-directory/develop/scopes-oidc.
:keyword str claims: additional claims required in the token, such as those returned in a resource provider's
claims challenge following an authorization failure.
:keyword str tenant_id: optional tenant to include in the token request.
:keyword bool enable_cae: indicates whether to enable Continuous Access Evaluation (CAE) for the requested
token. Defaults to False.
:return: An access token with the desired scopes.
:rtype: ~azure.core.credentials.AccessToken
:raises CredentialUnavailableError: the credential is unable to attempt authentication because it lacks
required data, state, or platform support
:raises ~azure.core.exceptions.ClientAuthenticationError: authentication failed. The error's ``message``
attribute gives a reason.
"""
if not scopes:
raise ValueError('"get_token" requires at least one scope')
try:
token = self._acquire_token_silently(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
if not token:
self._last_request_time = int(time.time())
token = self._request_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
elif self._should_refresh(token):
try:
self._last_request_time = int(time.time())
token = self._request_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
except Exception: # pylint:disable=broad-except
pass
_LOGGER.log(
logging.DEBUG if within_credential_chain.get() else logging.INFO,
"%s.get_token succeeded",
self.__class__.__name__,
)
return token
except Exception as ex:
_LOGGER.log(
logging.DEBUG if within_credential_chain.get() else logging.WARNING,
"%s.get_token failed: %s",
self.__class__.__name__,
ex,
exc_info=_LOGGER.isEnabledFor(logging.DEBUG),
)
raise<|fim_middle|>get_token<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(raw):
# header
headerstr = "IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII"
data = (magic, version,
off_Entities, len_Entities,
off_Planes, len_Planes,
off_Vertices, len_Vertices,
off_Visibility, len_Visibility,
off_Nodes, len_Nodes,
off_Texture_Information, len_Texture_Information,
off_Faces, len_Faces,
off_Lightmaps, len_Lightmaps,
off_Leaves, len_Leaves,
off_Leaf_Face_Table, len_Leaf_Face_Table,
off_Leaf_Brush_Table, len_Leaf_Brush_Table,
off_Edges, len_Edges,
off_Face_Edge_Table, len_Face_Edge_Table,
off_Models, len_Models,
off_Brushes, len_Brushes,
off_Brush_Sides, len_Brush_Sides,
off_Pop, len_Pop,
off_Areas, len_Areas,
off_Area_Portals, len_Area_Portals) = struct.unpack_from(headerstr, raw)
if struct.pack("BBBB", magic>>24, (magic>>16)&0xff, (magic>>8)&0xff, magic&0xff) != "PSBI": die("Bad header")
if version != 38: die("Bad version")
Leaves = []
leaf_Size = 28
for i in range(len_Leaves / leaf_Size):
(brush_or,
cluster,
area,
bbox_minX, bbox_minY, bbox_minZ,
bbox_maxX, bbox_maxY, bbox_maxZ,
first_leaf_face, num_leaf_faces,
first_leaf_brush, num_leaf_brushes) = struct.unpack_from("IHHhhhhhhHHHH", raw, off_Leaves + i*leaf_Size)
Leaves.append((first_leaf_face, num_leaf_faces))
print "Leaves: %d" % len(Leaves)
Leaf_Face_Table = []
leafface_Size = 2
for i in range(len_Leaf_Face_Table / leafface_Size):
Leaf_Face_Table.append(struct.unpack_from("H", raw, off_Leaf_Face_Table + i*leafface_Size)[0])
Faces = []
face_Size = 20
for i in range(len_Faces / face_Size):
(plane, plane_Size,
first_edge, num_edges,
texture_info,
style0, style1, style2, style3,
lightmap_offset) = struct.unpack_from("HHIHHBBBBI", raw, off_Faces + i*face_Size)
Faces.append((first_edge, num_edges, lightmap_offset))
print "Faces: %d" % len(Faces)
Face_Edge_Table = []
faceedge_Size = 4
for i in range(len_Face_Edge_Table / faceedge_Size):
Face_Edge_Table.append(struct.unpack_from("i", raw, off_Face_Edge_Table + i*faceedge_Size)[0])
Edges = []
edge_Size = 4
for i in range(len_Edges / edge_Size):
(v0, v1) = struct.unpack_from("HH", raw, off_Edges + i*edge_Size)
Edges.append((v0, v1))
print "Edges: %d" % len(Edges)
Vertices = []
vert_Size = 12
for i in range(len_Vertices / vert_Size):
v = struct.unpack_from("fff", raw, off_Vertices + i*vert_Size)
Vertices.append(convCoord(v))
print "Vertices: %d" % len(Vertices)
ents = struct.unpack_from("%ds" % len_Entities, raw, off_Entities)[0][1:-3] # opening { and final }+nul
Entities = []
for ent in ents.split("}\n{"):
entdata = {}
for line in ent.lstrip("\n").rstrip("\n").split("\n"):
k,v = line.lstrip('"').rstrip('"').split('" "')
entdata[k] = v
Entities.append(entdata)
return Leaves, Leaf_Face_Table, Faces, Face_Edge_Table, Edges, Vertices, Entities<|fim_middle|>load_raw_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(live_eventhub, uamqp_transport):
credential = EnvironmentCredential()
producer_client = EventHubProducerClient(fully_qualified_namespace=live_eventhub['hostname'],
eventhub_name=live_eventhub['event_hub'],
credential=credential,
user_agent='customized information',
uamqp_transport=uamqp_transport)
consumer_client = EventHubConsumerClient(fully_qualified_namespace=live_eventhub['hostname'],
eventhub_name=live_eventhub['event_hub'],
consumer_group='$default',
credential=credential,
user_agent='customized information',
uamqp_transport=uamqp_transport
)
with producer_client:
batch = producer_client.create_batch(partition_id='0')
batch.add(EventData(body='A single message'))
producer_client.send_batch(batch)
def on_event(partition_context, event):
on_event.called = True
on_event.partition_id = partition_context.partition_id
on_event.event = event
on_event.called = False
with consumer_client:
worker = threading.Thread(target=consumer_client.receive, args=(on_event,),
kwargs={
"partition_id": '0',
"starting_position": '-1'
})
worker.start()
time.sleep(15)
worker.join()
assert on_event.called is True
assert on_event.partition_id == "0"
assert list(on_event.event.body)[0] == 'A single message'.encode('utf-8')<|fim_middle|>test_client_secret_credential<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, n):
if n < 0:
raise ValueError('fstring size must be nonnegative')
i = self.__pos
j = i + (n+3)//4*4
if j > len(self.__buf):
raise EOFError
self.__pos = j
return self.__buf[i:i+n]<|fim_middle|>unpack_fstring<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, inter: discord.Interaction, btn: discord.Button):
await inter.response.defer()
if not self.teams:
return await self.ctx.error("No teams to remove.", 5)
v = QuotientView(self.ctx)
v.add_item(TeamSelector(self.teams))
v.message = await inter.followup.send("", view=v, ephemeral=True)
await v.wait()
for _ in self.teams:
if str(_.id) in v.custom_id:
self.teams.remove(_)
await self.refresh_view()<|fim_middle|>remove_team<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, file_name):
self.create_and_verify_stack(file_name)
rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
apigw_client = self.client_provider.api_client
response = apigw_client.get_models(restApiId=rest_api_id)
request_models = response["items"]
self.assertEqual(request_models[0]["name"], "user")
self.assertEqual(
request_models[0]["schema"],
'{\n "type" : "object",\n'
+ ' "properties" : {\n "username" : {\n "type" : "string"\n }\n'
+ " }\n}",
)<|fim_middle|>test_request_models<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, child) -> None: ...<|fim_middle|>on_remove_child<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, iter_: int, stage_id: int) -> None:
lr = self._lr * _get_multiplier(self._stages[stage_id], iter_)
for param_group in self.param_groups:
# pyre-ignore [16]
param_group[self._lr_param] = lr<|fim_middle|>set_lr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
response: Response = self.client.post(
path=reverse(viewname="v1:host-component", kwargs={"cluster_id": self.cluster_pk}),
data={
"cluster_id": self.cluster_pk,
"hc": [
{
"component_id": ServiceComponent.objects.first().pk,
"host_id": self.host_pk,
"service_id": self.service.pk,
}
],
},
content_type=APPLICATION_JSON,
)
self.assertEqual(response.status_code, HTTP_201_CREATED)<|fim_middle|>create_host_component<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> IO[Any]: ...<|fim_middle|>make_file<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_empty_indices_and_params_ok_but<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Don't load data from syncthing daemon, it knows nothing...
copy = {k: self.app.config[k] for k in self.app.config}
self.cb_data_loaded(copy)
self.cb_check_value()<|fim_middle|>load_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
expected_instructions = (
"Gather the ingredients.\n"
"Position a rack in the center of the oven and heat to 350 F. Grease an 8-inch square cake pan with softened butter. Line the bottom with a large sheet of parchment paper that extends up and over two opposite sides of the pan.\n"
"To make the peppermint swirl, place the cream cheese, confectioners' sugar, salt, egg, extract, and food coloring in a medium bowl and beat with a hand mixer on medium to medium high speed until combined and fluffy. You can also use a stand mixer.\n"
"To make the brownies, microwave the butter, cocoa powder, and sugar on high in a large microwave-safe bowl in a few 30-second bursts, whisking after each, until combined. The batter will be quite thick and fudgy-looking. If you do not have a microwave, you may place the ingredients in a heat-proof bowl over a pot of simmering water on the stovetop until combined.\n"
"Whisk in the extract. Gently whisk in the eggs one at a time.\n"
"Sprinkle the salt over the wet ingredients and whisk it in.\n"
"Finally, fold in the flour and 1/3 cup of the crushed candy until the last streak of flour disappears. Don’t overmix.\n"
"Scrape the batter into the prepared pan, reserving 3/4 cup of it.\n"
"Dollop the swirl mixture over the brownie batter in the pan—about 9 dollops, using a 1 1/2 tablespoon portion scoop, if you have it.\n"
"Dollop the reserved brownie batter in between the swirl dollops. Run a long wooden skewer through both the brownie batter and the pink cream cheese mixture, until the top is completely covered in decorative swirls. Don’t swirl too much: big swirls are what you want, as they are most dramatic. And don’t swirl too deep or the swirl can get lost in the batter.\n"
"Bake for 30 to 35 minutes, rotating the pan at the halfway point. At the 25 minute mark, sprinkle the remaining 1/3 cup of candy over the top of the brownies. The brownies are ready when a cake tester inserted into the brownie—not the swirl—comes out with a few moist crumbs—do not over bake.\n"
"Let cool to room temperature. Lift the brownies out of the pan by the parchment handles. Run a butter knife around the edges not covered by the paper, if there’s resistance. Slice into 9 equal sized squares and serve room-temperature, or refrigerate for an hour and serve chilled (my preference)."
)
self.assertEqual(expected_instructions, self.harvester_class.instructions())<|fim_middle|>test_instructions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(root: ModelWithMetadata, _info: ResolveInfo, *, keys=None):
return _filter_metadata(root.metadata, keys)<|fim_middle|>resolve_metafields<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return "GET"<|fim_middle|>method<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, str):
return "--" + str<|fim_middle|>section_divider<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, parameter_set="default"):
"""Return testing parameter settings for the estimator.
Parameters
----------
parameter_set : str, default="default"
Name of the set of test parameters to return, for use in tests. If no
special parameters are defined for a value, will return `"default"` set.
Returns
-------
params : dict or list of dict, default = {}
Parameters to create testing instances of the class
Each dict are parameters to construct an "interesting" test instance, i.e.,
`MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance.
`create_test_instance` uses the first (or only) dictionary in `params`
"""
return {
"n_lags": 4,
}<|fim_middle|>get_test_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Reverse children in-place"""
self.__children.reverse()<|fim_middle|>reverse_children<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.run_test("def sort_key(n):\n b=[(1,3),(5,4),(2,n)]\n b.sort(key=lambda x: x[1])\n return b", 1, sort_key=[int])<|fim_middle|>test_sort_key<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(d, k, m, x, y):
"""d: {k: {m: [(x,y)]}}"""
mt = d.get(k)
if mt is None:
d[k] = {m: [(x,y)]}
else:
klist = mt.get(m)
if klist is None:
mt[m] = [(x,y)]
else:
klist.append((x, y))<|fim_middle|>add_metric<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
self,
channel_id: "Snowflake_Type",
message_ids: list["Snowflake_Type"],
reason: str | None = None,
) -> None:
"""
Delete multiple messages in a single request.
Args:
channel_id: The id of the channel these messages are in
message_ids: A list of message ids to delete
reason: The reason for this action
"""
payload = {"messages": [int(message_id) for message_id in message_ids]}
await self.request(
Route("POST", "/channels/{channel_id}/messages/bulk-delete", channel_id=channel_id),
payload=payload,
reason=reason,
)<|fim_middle|>bulk_delete_messages<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, event=None):
"""Toggle enabling."""
self.enabled = not self.enabled<|fim_middle|>toggle<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, x):
return self.preds<|fim_middle|>forward<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
r"""
>>> element = ET.XML("<html><body>text</body></html>")
>>> ET.ElementTree(element).write(sys.stdout)
<html><body>text</body></html>
>>> element = ET.fromstring("<html><body>text</body></html>")
>>> ET.ElementTree(element).write(sys.stdout)
<html><body>text</body></html>
>>> print ET.tostring(element)
<html><body>text</body></html>
>>> print ET.tostring(element, "ascii")
<?xml version='1.0' encoding='ascii'?>
<html><body>text</body></html>
>>> _, ids = ET.XMLID("<html><body>text</body></html>")
>>> len(ids)
0
>>> _, ids = ET.XMLID("<html><body id='body'>text</body></html>")
>>> len(ids)
1
>>> ids["body"].tag
'body'
"""<|fim_middle|>parseliteral<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_url_param(
"namespaceName", self.ctx.args.namespace_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"subscriptionName", self.ctx.args.subscription_name,
required=True,
),
**self.serialize_url_param(
"topicName", self.ctx.args.topic_name,
required=True,
),
}
return parameters<|fim_middle|>url_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertCallableWorks(process_dataset_function)<|fim_middle|>test_function<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, **kwargs):
"""Populate a fake Segment event with data of interest"""
action = kwargs.get('action', 'Track')
sample_event = {
"userId": kwargs.get('user_id', SEGMENTIO_TEST_USER_ID),
"event": "Did something",
"properties": {
'name': kwargs.get('name', str(sentinel.name)),
'data': kwargs.get('data', {}),
'context': {
'course_id': kwargs.get('course_id') or '',
'app_name': 'edx.mobile.android',
}
},
"channel": 'server',
"context": {
"library": {
"name": kwargs.get('library_name', 'test-app'),
"version": "unknown"
},
"app": {
"version": "1.0.1",
},
'userAgent': str(sentinel.user_agent),
},
"receivedAt": "2014-08-27T16:33:39.100Z",
"timestamp": "2014-08-27T16:33:39.215Z",
"type": action.lower(),
"projectId": "u0j33yjkr8",
"messageId": "qy52hwp4",
"version": 2,
"integrations": {},
"options": {
"library": "unknown",
"providers": {}
},
"action": action
}
if 'context' in kwargs:
sample_event['properties']['context'].update(kwargs['context'])
if 'label' in kwargs:
sample_event['properties']['label'] = kwargs['label']
if kwargs.get('exclude_name') is True:
del sample_event['properties']['name']
return sample_event<|fim_middle|>create_segmentio_event<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, previous_description):
if previous_description != [""]:
print("(--> Type enter directly to keep previous value !)")
description = [""]
while True:
user_input = input()
# 👇️ if user pressed Enter without a value, break out of loop
if user_input == "":
break
else:
description.append(user_input)
if description == [""]:
description = previous_description
return description<|fim_middle|>get_description<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(event):
mount(make_next_count_constructor(count))<|fim_middle|>on_click<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, test_config):
test_config['ckan.plugins'] += ' s3filestore'
## AWS S3 settings
test_config['ckanext.s3filestore.aws_access_key_id'] = 'aws_access_key_id'
test_config['ckanext.s3filestore.aws_secret_access_key'] = 'aws_secret_access_key'
test_config['ckanext.s3filestore.aws_bucket_name'] = cls.bucket_name
test_config['ckanext.s3filestore.host_name'] = 'http://s3.eu-central-1.amazonaws.com'
test_config['ckanext.s3filestore.region_name'] = 'eu-central-1'
test_config['ckanext.s3filestore.signature_version'] = 's3v4'
test_config['ckanext.s3filestore.link_expires_in_seconds'] = 180
test_config.pop('hdx.s3filestore', None)<|fim_middle|>change_config<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(fd):
"""
Some file descriptors may be closed 'silently' - that is, by the garbage
collector, by an external library, etc. When the OS returns a file descriptor
from an open call (or something similar), this may be the only indication we
have that the FD has been closed and then recycled.
We let the hub know that the old file descriptor is dead; any stuck listeners
will be disabled and notified in turn.
"""
hub = get_hub()
hub.mark_as_reopened(fd)<|fim_middle|>notify_opened<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(inputs: Tensor, dim: int, pad: int, outer_pad: int = 0) -> Tensor:
"""Apply single-dimension reflection padding to match scipy implementation.
Args:
inputs: Input image
dim: A dimension the image should be padded over
pad: Number of pads
outer_pad: Number of outer pads
Return:
Image padded over a single dimension
"""
_max = inputs.shape[dim]
x = torch.index_select(inputs, dim, torch.arange(pad - 1, -1, -1).to(inputs.device))
y = torch.index_select(inputs, dim, torch.arange(_max - 1, _max - pad - outer_pad, -1).to(inputs.device))
return torch.cat((x, inputs, y), dim)<|fim_middle|>single_dimension_pad<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(locations, first=None, last=None, force_update=False, load_email=True):
# download and prepare data first
qd = quidel.QuidelData(DATAPATH, load_email)
if not qd.need_update and not force_update:
print("Data not updated, nothing needs change.")
return
qd_data = qd.load_csv()
qd_measurements = qd.prepare_measurements(qd_data, start_weekday=4)
qd_ts = quidel.measurement_to_ts(qd_measurements, 7, startweek=first, endweek=last)
# connect to the database
u, p = secrets.db.epi
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
cur = cnx.cursor()
def get_num_rows():
cur.execute("SELECT count(1) `num` FROM `quidel`")
for (num,) in cur:
pass
return num
# check from 4 weeks preceeding the last week with data through this week
cur.execute("SELECT max(`epiweek`) `ew0`, yearweek(now(), 6) `ew1` FROM `quidel`")
for (ew0, ew1) in cur:
ew0 = 200401 if ew0 is None else flu.add_epiweeks(ew0, -4)
ew0 = ew0 if first is None else first
ew1 = ew1 if last is None else last
print(f"Checking epiweeks between {int(ew0)} and {int(ew1)}...")
# keep track of how many rows were added
rows_before = get_num_rows()
# check Quidel for new and/or revised data
sql = """
INSERT INTO
`quidel` (`location`, `epiweek`, `value`)
VALUES
(%s, %s, %s)
ON DUPLICATE KEY UPDATE
`value` = %s
"""
total_rows = 0
for location in locations:
if location not in qd_ts:
continue
ews = sorted(qd_ts[location].keys())
num_missing = 0
for ew in ews:
v = qd_ts[location][ew]
sql_data = (location, ew, v, v)
cur.execute(sql, sql_data)
total_rows += 1
if v == 0:
num_missing += 1
if num_missing > 0:
print(f" [{location}] missing {int(num_missing)}/{len(ews)} value(s)")
# keep track of how many rows were added
rows_after = get_num_rows()
print(f"Inserted {int(rows_after - rows_before)}/{int(total_rows)} row(s)")
# cleanup
cur.close()
cnx.commit()
cnx.close()<|fim_middle|>update<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(try_new_cert: bool, try_new_metadata: bool) -> Saml2Config:
saml_path = Path(app.config["SAML_PATH"])
if try_new_cert:
saml_path /= "new"
config_file = saml_path / "config.py"
saml2_config = Saml2Config()
# We load the config file manually so that we can fill it with the extra info
try:
globals_dict = globals().copy()
locals_dict: dict = {}
exec(config_file.read_text(), globals_dict, locals_dict)
saml2_config.load_file(str(config_file))
config_dict = locals_dict["CONFIG"]
except FileNotFoundError:
raise FileNotFoundError(f"Could not find SAML config file.")
except KeyError:
raise KeyError(f"Could not find CONFIG dict in SAML config file.")
metadata_cert_file_name = (
"metadata_new.crt" if try_new_metadata else "metadata.crt"
)
metadata_cert_file = saml_path / "certs" / metadata_cert_file_name
sp_key = saml_path / "certs" / "sp.key"
sp_cert = saml_path / "certs" / "sp.crt"
if sp_key.exists() and sp_cert.exists():
sp_key_str = str(sp_key)
sp_cert_str = str(sp_cert)
config_dict["key_file"] = sp_key_str
config_dict["cert_file"] = sp_cert_str
# Encryption keypairs seem to be a different option, but e.g., in HAKA the same keys are used for encrypting
# requests and decrypting responses
config_dict["encryption_keypairs"] = [
{
"key_file": sp_key_str,
"cert_file": sp_cert_str,
}
]
config_dict["metadata"] = {
"loadex": [
{
"loader": metadata_loader,
"cert": str(saml_path / "certs" / metadata_cert_file)
if app.config["SAML_VERIFY_METADATA"]
else None,
}
]
}
config_dict["attribute_map_dir"] = str(saml_path.parent / "attributemaps")
config_dict["allow_unknown_attributes"] = True
saml2_config.load(config_dict)
return saml2_config<|fim_middle|>do_get_saml_config<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
assert ["error1", "error2"] == merge_errors(["error1", "error2"], None)<|fim_middle|>test_merging_list_and_none<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
...<|fim_middle|>reloader<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_no_env_var_no_flag<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[str]:
"""
The URI that can be used to request the next set of paged results.
"""
return pulumi.get(self, "next_link")<|fim_middle|>next_link<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self):
"""
Ping the Data Source
:return: Response object
"""
ping_endpoint = self.host + "/" + self.QUERY_ENDPOINT
if not self.http_client:
self.create_http_client()
return await self.client_api(ping_endpoint, 'GET')<|fim_middle|>ping_box<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(text: str) -> str:
# escaped = text.replace('\\', '\\\\')
escaped = []
lines = text.splitlines()
for line in lines:
before = line
after = simple_escape_line(line)
while before != after:
before = after
after = simple_escape_line(line)
escaped.append(after)
return "<br/>".join(escaped)<|fim_middle|>escape<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request, equivalent_id):
"""
Instructors view to remove teaching equivalent
"""
instructor = get_object_or_404(Person, userid=request.user.username)
if request.method != 'POST':
raise Http404
equivalent = get_object_or_404(TeachingEquivalent, pk=equivalent_id, instructor=instructor, status='UNCO')
equivalent.delete()
messages.add_message(request, messages.SUCCESS, "Teaching Equivalent successfully removed")
return HttpResponseRedirect(reverse('planning.views.view_teaching_credits_inst'))<|fim_middle|>remove_teaching_equiv_inst<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(mod):
tgen = Topogen(build_topo, mod.__name__)
tgen.start_topology()
router_list = tgen.routers()
for routern in range(1, 4):
tgen.gears["r{}".format(routern)].cmd("ip link add vrf1 type vrf table 10")
tgen.gears["r{}".format(routern)].cmd("ip link set vrf1 up")
tgen.gears["r{}".format(routern)].cmd("ip address add dev vrf1 {}.{}.{}.{}/32".format(routern, routern, routern,routern))
tgen.gears["r2"].cmd("ip address add dev vrf1 192.0.2.8/32")
tgen.gears["r3"].cmd("ip address add dev vrf1 192.0.2.8/32")
for i, (rname, router) in enumerate(router_list.items(), 1):
router.load_config(
TopoRouter.RD_ZEBRA, os.path.join(CWD, "{}/zebra.conf".format(rname))
)
router.load_config(
TopoRouter.RD_BGP, os.path.join(CWD, "{}/bgpd.conf".format(rname))
)
router.load_config(
TopoRouter.RD_LDP, os.path.join(CWD, "{}/ldpd.conf".format(rname))
)
tgen.start_router()
tgen.gears["r1"].cmd("ip route add 192.0.2.2 via 192.168.1.2 metric 20")
tgen.gears["r1"].cmd("ip route add 192.0.2.3 via 192.168.2.2 metric 20")<|fim_middle|>setup_module<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return False<|fim_middle|>immutable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, context, layout):
layout.prop(self, 'out_mode', expand=True)
layout.prop(self, 'noise_type', text="Type")
if self.noise_type in noise_numpy_types.keys():
row = layout.row(align=True)
row.prop(self, 'smooth', toggle=True)
row.prop(self, 'interpolate', toggle=True)<|fim_middle|>draw_buttons<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, instruction, sudo_plugins):
"""Execute with special instructions.
EXAMPLE instruction (Powershell):
powershell -ExecutionPolicy Unrestricted $plugin_name $plugin_args
EXAMPLE instruction (VBS):
wscript $plugin_name $plugin_args
"""
command = []
# Add sudo for commands that need to run as sudo
if os.name == 'posix':
if self.name in sudo_plugins:
command.append('sudo')
# Set shlex to use posix mode on posix machines (so that we can pass something like
# --metric='disk/logical/|' and have it properly format quotes)
mode = False
if os.name == 'posix':
mode = True
lexer = shlex.shlex(instruction, posix=mode)
lexer.whitespace_split = True
for x in lexer:
if '$plugin_name' in x:
replaced = x.replace('$plugin_name', self.plugin_abs_path)
command.append(replaced)
elif '$plugin_args' == x:
if self.arguments:
args = shlex.shlex(' '.join(self.arguments), posix=mode)
args.whitespace_split = True
for a in args:
command.append(a)
else:
command.append(x)
return command<|fim_middle|>get_cmdline<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
keytype: Optional[str] = None,
md5: Optional[str] = None,
sha1: Optional[str] = None,
sha256: Optional[str] = None,
subject: Optional[str] = None,
issuer: Optional[str] = None,
self_signed: Optional[bool] = None,
pkmd5: Optional[str] = None,
pksha1: Optional[str] = None,
pksha256: Optional[str] = None,
cacert: bool = False,
) -> Filter:
...<|fim_middle|>searchcert<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, request, format=None):
annotation_id = request.POST.get('annotationId', None)
age = request.POST.get('ageGroup', None)
gender = request.POST.get('gender', None)
newSubjectId = request.POST.get('newSubjectId', None)
person_album = None
if newSubjectId and newSubjectId != '':
person_album = Album.objects.filter(pk=newSubjectId).first()
new_rectangle = FaceRecognitionRectangle.objects.get(pk=annotation_id)
if (person_album and not AlbumPhoto.objects.filter(photo=new_rectangle.photo, album=person_album).exists()):
albumPhoto = AlbumPhoto(album=person_album, photo=new_rectangle.photo, type=AlbumPhoto.FACE_TAGGED,
profile=request.user.profile)
albumPhoto.save()
person_album.set_calculated_fields()
person_album.save()
additional_subject_data = AddAdditionalSubjectData(subject_rectangle_id=annotation_id, age=age, gender=gender,
newSubjectId=newSubjectId)
return self.add_subject_data(additional_subject_data=additional_subject_data, request=request)<|fim_middle|>post<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)<|fim_middle|>on_200<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
single_temporal = pd.DateOffset(months=3)
single_td = _check_timedelta(single_temporal)
assert single_td.get_value("mo") == 3
assert single_td.delta_obj == pd.DateOffset(months=3)
mult_temporal = pd.DateOffset(years=10, months=3, days=5)
mult_td = _check_timedelta(mult_temporal)
expected = {"Y": 10, "mo": 3, "d": 5}
assert mult_td.get_value() == expected
assert mult_td.delta_obj == mult_temporal
# get_name() for multiple values is not deterministic
assert len(mult_td.get_name()) == len("10 Years 3 Months 5 Days")
special_dateoffset = pd.offsets.BDay(100)
special_td = _check_timedelta(special_dateoffset)
assert special_td.get_value("businessdays") == 100
assert special_td.delta_obj == special_dateoffset<|fim_middle|>test_pd_dateoffset_to_timedelta<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(x, y, z=_SENTINEL):
"""
pow(x, y[, z]) -> number
With two arguments, equivalent to x**y. With three arguments,
equivalent to (x**y) % z, but may be more efficient (e.g. for ints).
"""
# Handle newints
if isinstance(x, newint):
x = long(x)
if isinstance(y, newint):
y = long(y)
if isinstance(z, newint):
z = long(z)
try:
if z == _SENTINEL:
return _builtin_pow(x, y)
else:
return _builtin_pow(x, y, z)
except ValueError:
if z == _SENTINEL:
return _builtin_pow(x+0j, y)
else:
return _builtin_pow(x+0j, y, z)<|fim_middle|>pow<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")<|fim_middle|>type<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self, label, start, num_iters, device, batch_size):<|fim_middle|>report<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(tmp_path, process, disable_extractors_dict):
disable_extractors_dict.update({"USE_READABILITY": "true"})
add_process = subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'],
capture_output=True, env=disable_extractors_dict)
archived_item_path = list(tmp_path.glob("archive/**/*"))[0]
output_file = archived_item_path / "readability" / "content.html"
assert output_file.exists()<|fim_middle|>test_readability_works<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_nested_multiple_withs_and_statements<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request_cls):
""" Extract a request's response class using the mapping found in the module defining the request's service """
for req_cls in request_cls.mro():
module = sys.modules[req_cls.__module__]
if hasattr(module, 'action_mapping'):
return module.action_mapping[(request_cls._action, request_cls._version)][1]
elif hasattr(module, 'response_mapping'):
return module.response_mapping[req_cls]
raise TypeError('no response class!')<|fim_middle|>get_response_cls<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(content, version):
regex = re.compile(r"graphql-dgs-platform-dependencies:([0-9\w\-.]+)")
return re.sub(regex, f"graphql-dgs-platform-dependencies:{version}", content)<|fim_middle|>find_replace_version<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, kube_apis, crd_ingress_controller, virtual_server_setup):
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
f"{TEST_DATA}/virtual-server-advanced-routing/virtual-server-complex.yaml",
virtual_server_setup.namespace,
)
ensure_response_from_backend(virtual_server_setup.backend_1_url, virtual_server_setup.vs_host)
wait_before_test()
global resp_1, resp_2, resp_3
resp_1.status_code = resp_2.status_code = resp_3.status_code = 502
while resp_1.status_code == 502 and resp_2.status_code == 502 and resp_3.status_code == 502:
resp_1 = requests.get(
virtual_server_setup.backend_1_url + "?arg1=v1",
headers={"host": virtual_server_setup.vs_host, "x-version": "future"},
cookies={"user": "some"},
)
resp_2 = requests.post(
virtual_server_setup.backend_1_url + "?arg1=v2",
headers={"host": virtual_server_setup.vs_host, "x-version": "deprecated"},
cookies={"user": "bad"},
)
resp_3 = requests.get(
virtual_server_setup.backend_1_url + "?arg1=v2",
headers={"host": virtual_server_setup.vs_host, "x-version": "deprecated"},
cookies={"user": "bad"},
)
execute_assertions(resp_1, resp_2, resp_3)<|fim_middle|>test_flow_with_complex_conditions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
for year in range(1963, 2050):
self.assertHoliday(
f"{year}-01-01",
f"{year}-05-01",
f"{year}-10-20",
f"{year}-12-12",
f"{year}-12-25",
f"{year}-12-26",
)<|fim_middle|>test_holidays<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
mod = self.make_module("""
#include <Python.h>
HPyDef_METH(f, "f", HPyFunc_NOARGS)
static HPy f_impl(HPyContext *ctx, HPy self)
{
HPy h = HPy_FromPyObject(ctx, NULL);
if (HPy_IsNull(h)) {
return HPy_Dup(ctx, ctx->h_True);
}
else {
return HPy_Dup(ctx, ctx->h_False);
}
}
@EXPORT(f)
@INIT
""")
assert mod.f()<|fim_middle|>test_frompyobject_null<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, event=None, server=None): ...<|fim_middle|>start_server<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
pat_str = self.config_get(self.CFG_PAT_PAUSED, self.DEFAULT_PAT_PAUSED)
pattern = Pattern(pat_str)
self.status = pattern.format(self.song) if self.song else ""
self._set_status(self.status)<|fim_middle|>plugin_on_paused<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(address_format_type: str, address_data_dict: dict, address_key_prefix: str):
result = ''
if address_format_type == AddressFormatType.FOREIGN:
addr_line_2_key = f'{address_key_prefix}addr_line_2'
addr_line_3_key = f'{address_key_prefix}addr_line_3'
addr_line_2 = address_data_dict[addr_line_2_key]
addr_line_3 = address_data_dict[addr_line_3_key]
addr_line_2 = addr_line_2 if addr_line_2 else ''
addr_line_2 = addr_line_2.strip()
addr_line_3 = addr_line_3 if addr_line_3 else ''
result = f'{addr_line_2} {addr_line_3}'
elif address_format_type == AddressFormatType.ADVANCED:
route_service_type_key = f'{address_key_prefix}route_service_type'
lock_box_no_key = f'{address_key_prefix}lock_box_no'
route_service_no_key = f'{address_key_prefix}route_service_no'
installation_type_key = f'{address_key_prefix}installation_type'
installation_name_key = f'{address_key_prefix}installation_name'
street_additional_elements = [address_data_dict[route_service_type_key],
address_data_dict[lock_box_no_key],
address_data_dict[route_service_no_key],
address_data_dict[installation_type_key],
address_data_dict[installation_name_key]]
for element in street_additional_elements:
if element:
result += f' {element}'
result = result.strip()
return result<|fim_middle|>get_street_additional<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> list[GenericRevision]:
"""Load all Alembic revisions."""
revisions = []
for path in get_alembic_path().iterdir():
if path.suffix == ".py":
module = load_python_file(str(path.parent), str(path.name))
revisions.append(
GenericRevision(
alembic_downgrade=getattr(module, "down_revision", None),
created_at=get_revision_create_date(path),
id=getattr(module, "revision"),
name=get_revision_name(path),
source=RevisionSource.ALEMBIC,
upgrade=None,
virtool_downgrade=None,
)
)
return revisions<|fim_middle|>load_alembic_revisions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
command_io = _example(
"seqtk convert -i '1.bed' --output '1.bam'", example_outputs=["1.bam"], example_inputs=["1.bed"]
)
cwl_properties = command_io.cwl_properties()
assert_equal(cwl_properties["base_command"], ["seqtk", "convert"])
assert_equal(cwl_properties["inputs"][0].position, 1)
assert_equal(cwl_properties["inputs"][0].prefix.prefix, "-i")
assert_equal(cwl_properties["inputs"][0].prefix.separated, True)
assert_equal(cwl_properties["outputs"][0].glob, "$(inputs.output1)")
assert_equal(cwl_properties["outputs"][0].prefix.prefix, "--output")
assert_equal(cwl_properties["outputs"][0].prefix.separated, True)
assert_equal(cwl_properties["stdout"], None)<|fim_middle|>test_prefixes_separated<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *_args):
tasks = asyncio.all_tasks()
logger.warning(
"Application shutdown detected!\n %s",
pformat([t.get_name() for t in tasks]),
)
self.kill_now = True
assert self.worker # nosec
self.task = asyncio.create_task(
self.worker.close(timeout=5), name="close_dask_worker_task"
)<|fim_middle|>exit_gracefully<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, node_id: str) -> Union[None, Vertex]:
"""Returns a node by id."""
return next((node for node in self.nodes if node.id == node_id), None)<|fim_middle|>get_node<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> AirbyteMessage:
return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={}))<|fim_middle|>airbyte_state_message_fixture<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
pass<|fim_middle|>pre_operations<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(value):
from string import digits, hexdigits # [unused-import]
return value if value in digits else "Nope"<|fim_middle|>unused_import_in_function<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
)
_schema_on_200.value = AAZListType(
flags={"required": True},
)
value = cls._schema_on_200.value
value.Element = AAZStrType()
return cls._schema_on_200<|fim_middle|>build_schema_on_200<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(operation, param):
"""Provide updates to the model that are specific to the CLI."""
overrides = {
"create-cluster": {"clusterConfiguration": {"type": "file"}},
"update-cluster": {"clusterConfiguration": {"type": "file"}},
"build-image": {"imageConfiguration": {"type": "file"}},
}
try:
return overrides[to_kebab_case(operation["operationId"])][param]
except KeyError:
return {}<|fim_middle|>param_overrides<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, obj):
populator = DefaultValuePopulator(obj.get("schema_etree"))
etree = obj.get("scenario_etree")
populator.transform(etree)
obj.set("scenario_etree", etree)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/pipelines/1/test_report",
json=test_report_content,
content_type="application/json",
status=200,
)
yield rsps<|fim_middle|>resp_get_pipeline_test_report<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(session: api.Session, ntsc_type: api.NTSC_Kind, eid: str) -> None:
"""
Use to wait for a notebook, tensorboard, or shell command to become ready.
"""
name = ntsc_type.value
loading_animator = render.Animator(f"Waiting for {name} to become ready")
err_msg = api.task_is_ready(session, eid, loading_animator.next)
msg = f"{name} (id: {eid}) is ready." if not err_msg else f"Waiting stopped: {err_msg}"
loading_animator.clear(msg)
if err_msg:
raise errors.CliError(err_msg)<|fim_middle|>wait_ntsc_ready<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(svc: bentoml.Service, **kwargs) -> Deployment:
@serve.deployment(name=f"bento-svc-{svc.name}", **kwargs)
class BentoDeployment:
def __init__(self, **runner_deployments: dict[str, Deployment]):
from ..server.http_app import HTTPAppFactory
# Use Ray's metrics setup. Ray has already initialized a prometheus client
# for multi-process and this conflicts with BentoML's prometheus setup
self.app = HTTPAppFactory(svc, enable_metrics=False)()
for runner in svc.runners:
assert runner.name in runner_deployments
runner._set_handle(RayRunnerHandle, runner_deployments[runner.name])
async def __call__(self, request: requests.Request):
sender = ASGIHTTPSender()
await self.app(request.scope, receive=request.receive, send=sender)
return sender.build_asgi_response()
return BentoDeployment<|fim_middle|>get_service_deployment<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(setup_db_for_xpub_tests):
"""Test that edits bitcoin xpub label and tries to add non existing tag"""
db, xpub, _, _, _ = setup_db_for_xpub_tests
with db.user_write() as cursor:
with pytest.raises(InputError):
db.edit_bitcoin_xpub(
cursor,
XpubData(
xpub=xpub.xpub,
blockchain=SupportedBlockchain.BITCOIN_CASH,
derivation_path=xpub.derivation_path,
label='123',
tags=['test'],
),
)
result = db.get_bitcoin_xpub_data(cursor, SupportedBlockchain.BITCOIN_CASH)
assert result[0].xpub == xpub.xpub
assert result[0].label == xpub.label
assert result[0].derivation_path == xpub.derivation_path
assert result[0].tags != {'test'}<|fim_middle|>test_edit_bitcoin_xpub_not_existing_tag<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return TOTP(self.secret)<|fim_middle|>make_otp<|file_separator|> |