text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self):
return self.formatter.generate_response(self.queryset)<|fim_middle|>generate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(axis, loc, lab, offsetx=None, offsety=None):
text = '{"axis": "' + axis + '", "pos": ' + str(loc) + ', "lab": "' + lab + '" '
if offsetx is not None:
text += ', "offsetx": ' + str(offsetx)
if offsety is not None:
text += ', "offsety": ' + str(offsety)
text += "}"
return text<|fim_middle|>create_dict_labels<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, obs_vars: list) -> tuple:
"""
Get lists of obs / mod variables to be processed
Parameters
----------
obs_vars : list
list of observation variables
Returns
-------
list
list of observation variables (potentially extended from input
list)
list
corresponding model variables which are mapped based on content
of :attr:`model_add_vars` and :attr:`model_use_vars`.
"""
obsout, modout = [], []
for obsvar in obs_vars:
obsout.append(obsvar)
if obsvar in self.model_use_vars:
modout.append(self.model_use_vars[obsvar])
else:
modout.append(obsvar)
for ovar, mvars in self.model_add_vars.items():
if not isinstance(mvars, list):
raise AttributeError(
f"values of model_add_vars need to be lists, even if "
f"only single variables are to be added: "
f"{self.model_add_vars}"
)
for mvar in mvars:
obsout.append(ovar)
modout.append(mvar)
return (obsout, modout)<|fim_middle|>get_vars_to_process<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> list[str]:
return [
"FIPS_mode_set",
"FIPS_mode",
]<|fim_middle|>cryptography_has_fips<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Entrypoint for kubernetes webhook
"""
req = self.get_request_body()
log.info(req)
try:
req = json.loads(req)
except Exception as e:
self.send_response(400)
self.send_header("Content-Type", "application/json")
self.end_headers()
self.wfile.write(json.dumps({"error": str(e)}).encode("utf-8"))
return
failed_policies, warn_policies, patches = self.run_policies(req)
self.send_response(200)
self.send_header("Content-Type", "application/json")
self.end_headers()
if patches:
patches = base64.b64encode(json.dumps(patches).encode("utf-8")).decode()
response = self.create_admission_response(
uid=req["request"]["uid"],
failed_policies=failed_policies,
warn_policies=warn_policies,
patches=patches,
)
log.info(response)
self.wfile.write(response.encode("utf-8"))<|fim_middle|>do_post<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Test :func:`colour.models.rgb.transfer_functions.\<|fim_middle|>test_nan_oetf_da_vinci_intermediate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, server, backend, weight=0):
"""
Mock of getWeight method
"""
self.backend = backend
self.server = server
self.weight = weight
return "server weight"<|fim_middle|>get_weight<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str | None:
return self._jwk_data.get("kty", None)<|fim_middle|>key_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return set(os.listdir(self.path))<|fim_middle|>get_listing<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self, request, context):<|fim_middle|>is_self_allowed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, command_args):
super().METHOD_NAME(command_args)
self._execute_operations()
return self._output()<|fim_middle|>handler<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, command_args):
super().METHOD_NAME(command_args)
self._execute_operations()
return self._output()<|fim_middle|>handler<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(test, timeout=DEFAULT_TIMEOUT, interval=1):
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
if test():
return True
except AssertionError:
pass
time.sleep(interval)
return False<|fim_middle|>wait_for<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, spec, prefix):
"""
Build and install APComp
"""
with working_dir("spack-build", create=True):
host_cfg_fname = self.create_host_config(spec, prefix)
cmake_args = []
# if we have a static build, we need to avoid any of
# spack's default cmake settings related to rpaths
# (see: https://github.com/LLNL/spack/issues/2658)
if "+shared" in spec:
cmake_args.extend(std_cmake_args)
else:
for arg in std_cmake_args:
if arg.count("RPATH") == 0:
cmake_args.append(arg)
cmake_args.extend(["-C", host_cfg_fname, "../src"])
print("Configuring APComp...")
cmake(*cmake_args)
print("Building APComp...")
make()
print("Installing APComp...")
make("install")
# install copy of host config for provenance
METHOD_NAME(host_cfg_fname, prefix)<|fim_middle|>install<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return self.voltage[2]<|fim_middle|>get_voltage_3<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional['outputs.PrivateEndpointResponse']:
"""
The private endpoint resource.
"""
return pulumi.get(self, "private_endpoint")<|fim_middle|>private_endpoint<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
audit_info = self.set_mocked_audit_info()
redshift = Redshift(audit_info)
assert redshift.session.__class__.__name__ == "Session"<|fim_middle|>test_get_session<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(signature):
"""Check the signature of a quantum transform: (tape, ...) - > (Sequence(tape), fn)"""
# Check that the arguments of the transforms follows: (tape: qml.tape.QuantumTape, ...)
tape = signature.get("tape", None)
if tape is None:
raise TransformError("The first argument of a transform must be tape.")
if tape != qml.tape.QuantumTape:
raise TransformError("The type of the tape argument must be a QuantumTape.")
# Check return is (qml.tape.QuantumTape, callable):
ret = signature.get("return", None)
if ret is None or not isinstance(ret, tuple):
raise TransformError(
"The return of a transform must match (collections.abc.Sequence["
"pennylane.tape.tape.QuantumTape], <built-in function callable>)"
)
if ret[0] not in (
Sequence[qml.tape.QuantumTape],
List[qml.tape.QuantumTape],
Tuple[qml.tape.QuantumTape],
): # pylint:disable=unsubscriptable-object
raise TransformError(
"The first return of a transform must be a sequence of tapes: collections.abc.Sequence["
"pennylane.tape.tape.QuantumTape]"
)
if ret[1] != Callable:
raise TransformError(
"The second return of a transform must be a callable: <built-in function callable>"
)<|fim_middle|>transform_signature_check<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Set members based on inventory.
"""
MeshGenerator.METHOD_NAME(self)<|fim_middle|>configure<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, platform, fragment,
bootaddr = 0,
spimode = None,
freq = None,
compress = True,
**kwargs):
# Validate options
ecp5_mclk_freqs = [
2.4,
4.8,
9.7,
19.4,
38.8,
62.0,
]
if freq is not None:
assert freq in ecp5_mclk_freqs, "Invalid MCLK frequency. Valid frequencies: " + str(ecp5_mclk_freqs)
# prepare ecppack opts
self._packer_opts += " --bootaddr {bootaddr} {spimode} {freq} {compress} ".format(
bootaddr = bootaddr,
spimode = "" if spimode is None else f"--spimode {spimode}",
freq = "" if freq is None else "--freq {}".format(freq),
compress = "" if not compress else "--compress"
)
return YosysNextPNRToolchain.METHOD_NAME(self, platform, fragment, **kwargs)<|fim_middle|>build<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# We keep the connection open during `callback`. Closing the connection is the signal to the
# caller that processing completed
with redirect_stdout_stderr_exceptions_to_log():
with connection:
self.__callback(read_all_bytes(connection))<|fim_middle|>handle_connection<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)<|fim_middle|>send_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--auto", required=True, help="Auto download, convert, compile and infer if True")
parser.add_argument(
"--pp_detect_path", default='/workspace/PaddleDetection', help="Path of PaddleDetection folder")
parser.add_argument(
"--model_file", required=True, help="Path of sophgo model.")
parser.add_argument("--config_file", required=True, help="Path of config.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
return parser.parse_args()<|fim_middle|>parse_arguments<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(f, module_name, entity_name):
"""Tests whether the function or method is an instance of a known type."""
if (module_name not in sys.modules or
not hasattr(sys.modules[module_name], entity_name)):
return False
type_entity = getattr(sys.modules[module_name], entity_name)
if isinstance(f, type_entity):
# The method if of this type. Example:
#
# o = ClassType()
# function(o.method)()
return True
# Note: inspect is required here, to avoid unpacking tf.function decorators.
if inspect.ismethod(f):
# The unbound method if of this type. Example:
#
# class ClassType:
# @function
# def method(self):
# ...
# o = ClassType()
# o.method()
if isinstance(f.__func__, type_entity):
return True
return False<|fim_middle|>is_known_loaded_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(args, flist):
"""read names in flist and append to args"""
expanded = args[:]
if flist:
try:
if flist == '-':
fd = sys.stdin
else:
fd = open(flist)
while 1:
line = fd.readline()
if not line:
break
expanded.append(line[:-1])
except IOError:
print "Error reading file list %s" % flist
raise
return expanded<|fim_middle|>expand_args<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, file_path):
"""Initialize with a path."""
node, _ = get_data_class('vasp.potcar').get_or_create_from_file(file_path=file_path)
self.sha512 = node.sha512<|fim_middle|>init_with_path<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
from ilastik.workflows.carving.carvingTools import parallel_watershed
shape = (400,) * 2
x = numpy.random.rand(*shape).astype("float32")
seg, max_id = parallel_watershed(x, max_workers=4)
max_expected = seg.max()
assert max_id == max_expected, f"Expect {max_expected} but got {max_id}"
assert max_id > 1, f"Expect more than one segment in watershed result"
# TODO test that labels in individual blocks are indeed different<|fim_middle|>test_parallel_watershed_2d<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, username: str, key: PKey, event: Event | None = None) -> list[str]: ...<|fim_middle|>auth_publickey<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args: Any, **kwds: Any) -> Optional[Dict[str, object]]:
config = super().METHOD_NAME(*args, **kwds)
if config is None:
raise Exception("Specify local_path XOR secret")
return config<|fim_middle|>configure<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(file_extension, tmpdir):
x = np.arange(N_rows, dtype='i8')
df = vaex.from_arrays(x=x, x2=-x)
df['y'] = df.x**2
path = tmpdir / f'test.{file_extension}'
df.export(path)
df = vaex.open(path)
yield df<|fim_middle|>df_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, gpt_model, test_text):
assert isinstance(gpt_model.tokenizer, AutoTokenizer)
assert gpt_model.tokenizer.name == 'GPT2Tokenizer'
assert gpt_model.tokenizer.vocab_size == 50257
ids = [gpt_model.tokenizer.text_to_ids(text) for text in test_text]
true_ids = [
[31373, 11, 995],
[14337, 4776, 290, 3598, 812, 2084],
[7120, 640, 318, 3614],
[1532, 345, 900, 4661, 2266, 291, 18117, 1029],
]
assert sum([id_list == true_id_list for id_list, true_id_list in zip(ids, true_ids)]) == 4<|fim_middle|>test_tokenizer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cmd, force_info_log=False, suppress_warning=False,
env=None):
"""Tries running the provided command once.
Args:
cmd: A list of strings such as is given to the subprocess.Popen()
constructor.
env: A dict of key/value strings, such as is given to the subprocess.Popen()
constructor, that contains environment variables to be injected.
Returns:
A tuple of stdout, and retcode from running the provided command.
"""
print('=== Running: %s' % ' '.join(cmd))
process = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE)
stdout = ''
while True:
output = process.stdout.readline()
if not output and process.poll() is not None:
break
if output:
stdout += str(output)
print('= ' + str(output.strip()))
rc = process.poll()
print('=== Finished with code %d' % rc)
return stdout, rc<|fim_middle|>issue_command<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(data: Dict[str, Any]) -> AirbyteStateMessage:
return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data))<|fim_middle|>state<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(regex: str) -> str:
# Fixes structure error produced by named groups like
# ^simcore/services/comp/(?P<subdir>[a-z0-9][a-z0-9_.-]*/)*(?P<name>[a-z0-9-_]+[a-z0-9])$
# into
# ^simcore/services/comp/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$
return re.sub(r"\(\?P<[^>]+>", "(", regex)<|fim_middle|>remove_named_groups<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return False<|fim_middle|>supports_auto_limit<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_backward_walk_ops<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, method, params):
j = {
'method': method,
'params': params,
'id': 'dontcare',
'jsonrpc': '2.0'
}
r = requests.post(self.get_rpc_node_address(), json=j, timeout=30)
return json.loads(r.content)<|fim_middle|>json_rpc<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, values):
# Search missing values in values and validate them.
values = dict((v.name, v) for v in values)
for name in list(self.unvalidated_specs):
try:
value = values[name]
except KeyError:
continue
spec = self.specs[name]
value = spec.validate(value)
section = self.setdefault(spec.section, {})
section[spec.name] = value
self.unvalidated_specs.remove(name)<|fim_middle|>add_values<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
"""
Accept current search query. Focus original `BufferControl` again.
"""
layout = get_app().layout
search_control = layout.current_control
target_buffer_control = layout.search_target_buffer_control
from prompt_toolkit.layout.controls import BufferControl
if not isinstance(search_control, BufferControl):
return
if target_buffer_control is None:
return
search_state = target_buffer_control.search_state
# Update search state.
if search_control.buffer.text:
search_state.text = search_control.buffer.text
# Apply search.
target_buffer_control.buffer.apply_search(
search_state, include_current_position=True
)
# Add query to history of search line.
search_control.buffer.append_to_history()
# Stop search and focus previous control again.
stop_search(target_buffer_control)<|fim_middle|>accept_search<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
return Response(define.webpage(request.userid, "help/markdown.html",
title="Markdown"))<|fim_middle|>help_markdown<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(index: str, query: dict, timeout: int = 600, _source: Iterable[str] = ()) -> dict:
return elastic_handler.search(
index=index, body=query, scroll=SCROLL_TIME, size=SCROLL_SIZE, request_timeout=timeout, _source=_source
)<|fim_middle|>scroll_docs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(start_idx, count, initial_string):
"""
Return the substring of length `count` starting at `start_idx`.
:param start_idx: starting index of the substring
:param count: length of the substring in bytes
:param initial_string: original string
:return: the substring
"""
new_value = initial_string.value[start_idx.value : start_idx.value + count.value]
return StringV(new_value)<|fim_middle|>str_substr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())<|fim_middle|>to_str<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, request: Request, organization) -> Response:
stats_period = request.GET.METHOD_NAME("groupStatsPeriod")
try:
start, end = get_date_range_from_params(request.GET)
except InvalidParams as e:
raise ParseError(detail=str(e))
if stats_period not in (None, "", "24h", "14d", "auto"):
return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400)
environments = self.get_environments(request, organization)
projects = self.get_projects(request, organization)
if not projects:
return Response([])
is_fetching_replay_data = request.headers.METHOD_NAME("X-Sentry-Replay-Request") == "1"
if (
len(projects) > 1
and not features.has("organizations:global-views", organization, actor=request.user)
and not is_fetching_replay_data
):
return Response(
{"detail": "You do not have the multi project stream feature enabled"}, status=400
)
queries = request.GET.getlist("query")
response = {}
for query in queries:
try:
count = self._count(
request,
query,
organization,
projects,
environments,
{"count_hits": True, "date_to": end, "date_from": start},
)
response[query] = count
except (ValidationError, discover.InvalidSearchQuery) as exc:
return Response({"detail": str(exc)}, status=400)
return Response(response)<|fim_middle|>get<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super().METHOD_NAME(*args, **kwargs)
# define Arg Group ""
return cls._args_schema<|fim_middle|>build_arguments_schema<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"routeFilterName", self.ctx.args.filter_name,
required=True,
),
**self.serialize_url_param(
"ruleName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters<|fim_middle|>url_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(func):
def wrapper():
# Try to connect to DB
for i in range(RETRY_COUNT):
try:
func()
return
except Exception as ex: # pylint: disable=broad-except
logger.error(
"waiting for %s, retry %d/%d [%s]",
func.__name__,
i + 1,
RETRY_COUNT,
ex,
)
time.sleep(RETRY_INTERVAL)
raise Exception(f"waiting for {func.__name__} failed")
return wrapper<|fim_middle|>retryable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
parser = argparse.ArgumentParser( description='Metagenemark GFF -> GFF3 conversion script')
## output file to be written
parser.add_argument('-i', '--input', type=str, required=True, help='Path to a GFF file created by Metagenemark' )
parser.add_argument('-o', '--output', type=str, required=True, help='Path to an output file to be created' )
parser.add_argument('-p', '--prefix', type=str, required=True, help='Prefix to use in ID generation')
parser.add_argument('-pf', '--protein_fasta', type=str, required=False, help='Optional protein FASTA to be written')
args = parser.parse_args()
assemblies = dict()
current_assembly = None
# key like 2 = SRS014890.polypeptide.2
polypeptide_lookup = dict()
writing_protein = False
gene = None
mRNAs = dict()
current_sequence = None
current_gene_comment_lines = list()
fout = open(args.output, mode='wt', encoding='utf-8')
fout.write("##gff-version 3\n")
if args.protein_fasta is not None:
protein_out = open(args.protein_fasta, mode='wt', encoding='utf-8')
for line in open(args.input):
if line.startswith("#"):
if line.startswith("##FASTA"):
current_gene_comment_lines.append("#{0}".format(line))
elif line.startswith("##end-Protein"):
writing_protein = False
current_gene_comment_lines.append(line)
# since we're already doing our own header, don't duplicate the old one
elif line.startswith("##gff-version"):
continue
else:
if line.startswith("##Protein "):
m = re.match("##Protein (\d+)", line)
if m:
writing_protein = True
protein_out.write(">{0}\n".format(polypeptide_lookup[m.group(1)]))
else:
raise Exception("ERROR: Expected line to match: ##Protein N")
elif writing_protein == True:
protein_out.write(line[2:])
current_gene_comment_lines.append(line)
else:
cols = line.split("\t")
if len(cols) != 9:
continue
mol_id = cols[0]
mol_id_m = re.match('^(\S+) ', mol_id)
if mol_id_m:
print("MATCH!")
mol_id = mol_id_m.group(1)
feat_type = cols[2]
## we expect only gene types here
if feat_type not in ['gene', 'CDS']:
raise Exception("ERROR: expected only 'gene' or 'CDS' feature types as input (depending on metagenemark version).")
m_gene = re.match('gene_id[ =](\d+)', cols[8])
if m_gene:
gene_num = m_gene.group(1)
else:
raise Exception("ERROR: expected 9th column to have gene ids like: gene_id 5")
## initialize this assembly if we haven't seen it yet
if mol_id not in assemblies:
assemblies[mol_id] = things.Assembly(id=mol_id)
current_assembly = assemblies[mol_id]
gene = things.Gene(id="{0}.gene.{1}".format(args.prefix, gene_num))
gene.locate_on( target=current_assembly, fmin=int(cols[3]) - 1, fmax=int(cols[4]), strand=cols[6] )
mRNA = things.mRNA(id="{0}.mRNA.{1}".format(args.prefix, gene_num), parent=gene.id)
mRNA.locate_on( target=current_assembly, fmin=int(cols[3]) - 1, fmax=int(cols[4]), strand=cols[6] )
gene.add_mRNA(mRNA)
CDS = things.CDS(id="{0}.CDS.{1}".format(args.prefix, gene_num), parent=mRNA.id)
CDS.locate_on( target=current_assembly, fmin=int(cols[3]) - 1, fmax=int(cols[4]), strand=cols[6], phase=int(cols[7]) )
mRNA.add_CDS(CDS)
exon = things.Exon(id="{0}.exon.{1}".format(args.prefix, gene_num), parent=mRNA.id)
exon.locate_on( target=current_assembly, fmin=int(cols[3]) - 1, fmax=int(cols[4]), strand=cols[6] )
mRNA.add_exon(exon)
polypeptide_id = "{0}.polypeptide.{1}".format(args.prefix, gene_num)
polypeptide = things.Polypeptide(id=polypeptide_id, parent=mRNA.id)
polypeptide.locate_on( target=current_assembly, fmin=int(cols[3]) - 1, fmax=int(cols[4]), strand=cols[6] )
mRNA.add_polypeptide(polypeptide)
polypeptide_lookup[gene_num] = polypeptide_id
gene.print_as(fh=fout, source='GeneMark.hmm', format='gff3')
fout.write( "".join(current_gene_comment_lines) )
current_gene_comment_lines = list()<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertTrue(_is_whitespace(" "))
self.assertTrue(_is_whitespace("\t"))
self.assertTrue(_is_whitespace("\r"))
self.assertTrue(_is_whitespace("\n"))
self.assertTrue(_is_whitespace("\u00A0"))
self.assertFalse(_is_whitespace("A"))
self.assertFalse(_is_whitespace("-"))<|fim_middle|>test_is_whitespace<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwargs):
# let lightning clip
super().METHOD_NAME(*args, **kwargs)
# check clipping worked as expected
self.check_grads_clipped()<|fim_middle|>configure_gradient_clipping<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, F, x):
x = self.features(x)
x = self.output(x)
return x<|fim_middle|>hybrid_forward<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
api_model = ConfigurableAPI(
domain=self.domain_name,
transform_expression=UCRExpression(definition={})
)
user = MockUser()
context = get_evaluation_context(user, 'post', {}, {}, {})
with self.assertRaises(BadSpecError):
_execute_generic_api(self.domain_name, user, "device_id", context, api_model)<|fim_middle|>test_spec_error<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")<|fim_middle|>name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(testdir, app_schema, openapi3_base_url):
# When the user registers auth from `requests`
testdir.make_test(
f"""<|fim_middle|>test_requests_auth<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, nodes):
return self._get_mappings(nodes, transport="fc")<|fim_middle|>get_mappings<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(root_url):
"""
Returns an UrlMap class that is bound to a specific root url
"""
properties = {"root_url": root_url}
return type("UrlMap", (UrlMapBase,), properties)<|fim_middle|>url_map_maker<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, alert):
return GenericAlert(
timestamp=alert['discoverydate'],
url=f'{TNS_BASE_URL}object/' + alert['objname'],
id=alert['objname'],
name=alert['name_prefix'] + alert['objname'],
ra=alert['radeg'],
dec=alert['decdeg'],
mag=alert['discoverymag'],
score=alert['name_prefix'] == 'SN'
)<|fim_middle|>to_generic_alert<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# Mimic a real test runner, for better compat 2.7 / 3.x
suite = unittest.TestSuite()
suite.addTest(_CachedTestClassSample("example_test"))
suite.addTest(_CachedTestClassSample("example_test_2"))
suite.addTest(_CachedTestClassSample("example_test_3"))
unittest.TextTestRunner().run(suite)
assert len(traces) == 5
assert traces[0] == "create A"
assert traces[1] == "create B"
assert traces[2] == "create C"
assert traces[3] == "create C"
assert traces[4] == "remove C" # One of the C's is cached, one is not.
# Note: unit test runner doesn't trigger the pytest session fixture that deletes resources when all tests are done.
# let's run that manually now to test it.
AbstractPreparer._perform_pending_deletes()
assert len(traces) == 8
# we're technically relying on an implementation detail (for earlier versions of python
# dicts did not guarantee ordering by insertion order, later versions do)
# to order removal by relying on dict ordering.
assert traces[5] == "remove C"
assert traces[6] == "remove B"
assert traces[7] == "remove A"<|fim_middle|>test_cached_preparer_order<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
spawn_test_server(ServerStub, "default", "Fake")
assert core.get_databus().initialized.is_set()<|fim_middle|>test_spawn_test_server_initializes_databus<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
expected = [
"On the fifth day of Christmas my true love gave to me: "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(5, 5), expected)<|fim_middle|>test_fifth_day_five_gold_rings<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Register template under one domain admin(root/domain1->account 1)
template_register = Template.register(
self.apiclient,
self.testdata["privatetemplate"],
zoneid=self.zone.id,
hypervisor=self.hypervisor,
account=self.account1.name,
domainid=self.domain1.id)
template_register.download(self.apiclient)
self.download(self.apiclient, template_register.id)
listtemplate = Template.list(
self.apiclient,
zoneid=self.zone.id,
hypervisor=self.hypervisor,
account=self.account2.name,
domainid=self.account2.domainid,
templatefilter="executable")
self.assertEqual(
listtemplate,
None,
"Check templates are not listed - CLOUDSTACK-10149"
)
return<|fim_middle|>test_listtemplate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
from frappe.desk.treeview import make_tree_args
args = frappe.form_dict
args = make_tree_args(**args)
if args.parent_location == "All Locations":
args.parent_location = None
frappe.get_doc(args).insert()<|fim_middle|>add_node<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
env_helper = Mock()
spark_session_factory = SparkSessionFactory(env_helper)
spark_config = spark_session_factory._get_spark_configs(is_training_job=True)
assert all(tup[0] != "spark.jars" for tup in spark_config)
assert all(tup[0] != "spark.jars.packages" for tup in spark_config)<|fim_middle|>test_spark_session_factory_configuration_on_training<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cfg, testfile, update=False):
"""Compare test results."""
extension = []
extension_config = {}
wrapper = "%s"
for k, v in cfg['extensions'].items():
extension.append(k)
if v:
extension_config[k] = v
if 'css' in cfg and len(cfg['css']):
wrapper = WRAPPER % '\n'.join([CSS_LINK % css for css in cfg['css']])
check_markdown(testfile, extension, extension_config, wrapper, update)<|fim_middle|>compare_results<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*args, **kwds):
tokenobject_list_from = get_tokens(serial=args[0])
tokenobject_list_to = get_tokens(serial=args[1])
if len(tokenobject_list_from) != 1:
log.error("not a unique token to copy from found")
raise(TokenAdminError("No unique token to copy from found",
id=1016))
if len(tokenobject_list_to) != 1:
log.error("not a unique token to copy to found")
raise(TokenAdminError("No unique token to copy to found",
id=1017))
f_result = func(*args, **kwds)
return f_result<|fim_middle|>check_serial_wrapper<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Specify what celestial object is causing an eclipse message.
"""
self.eclipseObject.ModelTag = "eclipseObject"
self.eclipseObject.sunInMsg.subscribeTo(self.gravFactory.spiceObject.planetStateOutMsgs[self.sun])
# add all celestial objects in spiceObjects except for the sun (0th object)
for item in range(1, len(self.gravFactory.spiceObject.planetStateOutMsgs)):
self.eclipseObject.addPlanetToModel(self.gravFactory.spiceObject.planetStateOutMsgs[item])<|fim_middle|>set_eclipse_object<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(data):
"""Construct correct locator subclass from dictionary,
which should contain a 'type' field and at least all
required fields of that locator.
"""
type_ = data.pop("type", None)
if not type_:
raise ValueError("Missing locator type field")
class_ = TYPES.get(type_)
if not class_:
raise ValueError(f"Unknown locator type: {type_}")
# Check for missing parameters
required = set(
field.name for field in fields(class_) if field.default is MISSING
)
missing = set(required) - set(data)
if missing:
raise ValueError("Missing locator field(s): {}".format(", ".join(missing)))
# Ignore extra data
required_or_optional = [field.name for field in fields(class_)]
kwargs = {k: v for k, v in data.items() if k in required_or_optional}
return class_(**kwargs)<|fim_middle|>from_dict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
x = relay.var("x", shape=[128, 64], dtype="int8")
w = relay.var("w", shape=[256, 64], dtype="int8")
zero = relay.const(0)
op = relay.op.nn.dense(
relay.qnn.op.dequantize(x, relay.const(2.0), zero),
relay.qnn.op.dequantize(w, relay.const(0.5), zero),
)
op = relay.qnn.op.quantize(op, relay.const(1.0), zero, out_dtype="int8")
mod = tvm.IRModule.from_expr(op)
fake_quantized_op_freqs = relay.analysis.list_fake_quantized_op_freqs(mod)
assert dict(fake_quantized_op_freqs) == {"nn.dense": 1}<|fim_middle|>test_fake_quantize_dense<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, index: int) -> xr.Dataset:
"""Get or compute the dataset for the level at given *index*.
:param index: the level index
:return: the dataset for the level at *index*.
"""
if index not in self._level_datasets:
with self._lock:
# noinspection PyTypeChecker
level_dataset = self._get_dataset_lazily(index,
self._parameters)
self.set_dataset(index, level_dataset)
# noinspection PyTypeChecker
return self._level_datasets[index]<|fim_middle|>get_dataset<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(subject, charsets):
for charset in charsets[:-1]:
try:
return tonative(subject, charset)
except ValueError:
pass
return tonative(subject, charsets[-1])<|fim_middle|>try_decode<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""Load mathics-three-package.json. It contains version information."""
global mathics_threejs_backend_data
if not mathics_threejs_backend_data:
try:
with builtin_open(
settings.MATHICS_BACKEND_THREEJS_JSON_PATH, "rb"
) as version_json_fp:
mathics_threejs_backend_data = json.load(version_json_fp)
except Exception:
pass
return mathics_threejs_backend_data<|fim_middle|>get_mathics_threejs_backend_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None: ...<|fim_middle|>check_handshake_timeout<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self, filename):<|fim_middle|>tryfind<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(ProvName: str, ProvType, Flags) -> None: ...<|fim_middle|>crypt_set_provider_ex<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(batch):
def padding(indice, max_length, pad_idx=0):
pad_indice = [
item + [pad_idx] * max(0, max_length - len(item)) for item in indice
]
return torch.tensor(pad_indice)
input_ids = [data["input_ids"] for data in batch]
max_length = max([len(t) for t in input_ids])
input_ids = padding(input_ids, max_length)[:,:maxlen]
data = {
"input_ids": input_ids,
"labels": input_ids
}
return data<|fim_middle|>collate_fn<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(hostname, timeout=20, username=None, password=None):
"""
Reboot a server using the Dell DRAC
CLI Example:
.. code-block:: bash
salt-run drac.reboot example.com
"""
client = __connect(hostname, timeout, username, password)
if isinstance(client, paramiko.SSHClient):
(stdin, stdout, stderr) = client.exec_command("racadm serveraction powercycle")
if "successful" in stdout.readline():
log.info("powercycle successful")
else:
log.error("powercycle racadm command failed")
return False
else:
log.error("client was not of type paramiko.SSHClient")
return False
return True<|fim_middle|>reboot<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
"""
IMPORTANT: Do not change limits.
Please respect the APIs that provide this data for free.
Returns None if NOT overlimit. Otherwise returns dict containing totals in request.
"""
if len(request) <= min(LIBRARY_ADD_LIMIT_TVSHOWS, LIBRARY_ADD_LIMIT_MOVIES):
return
totals = {}
for i in request:
totals[i.get('type', 'none')] = totals.get(i.get('type', 'none'), 0) + 1
if totals.get('show', 0) <= LIBRARY_ADD_LIMIT_TVSHOWS:
if totals.get('movie', 0) <= LIBRARY_ADD_LIMIT_MOVIES:
return
return totals<|fim_middle|>check_overlimit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, name=None, **kwargs) -> Generator[BaseSegment, None, None]:
"""Return a subsegment context manger.
Parameters
----------
name: str
Subsegment name
kwargs: Optional[dict]
Optional parameters to be propagated to segment
"""<|fim_middle|>in_subsegment<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(config):
stop_gunicorn()<|fim_middle|>pytest_unconfigure<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(klass: Type):
_types[klass.__jsii_type__] = klass<|fim_middle|>register_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> bool:
"""
General implementation of checking if we are in a proper thread.
If Qt is available and Application is created then assign :py:func:`in_qt_main_thread` to `in_main_thread`.
If Qt liba are not available then assign :py:func:`in_main_thread_py` to in_main_thread.
IF Qt libs are available but there is no Application ti wil emmit warning and return result of in_main_thread_py.
Returns
-------
thread_flag : bool
True if we are in the main thread, False otherwise.
"""
global in_main_thread
try:
from napari._qt.utils import in_qt_main_thread
res = in_qt_main_thread()
in_main_thread = in_qt_main_thread
except ImportError:
in_main_thread = in_main_thread_py
return in_main_thread_py()
except AttributeError:
warnings.warn(
"Qt libs are available but no QtApplication instance is created"
)
return in_main_thread_py()
return res<|fim_middle|>in_main_thread<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(address):
async with BleakClient(address, winrt=dict(use_cached_services=True)) as client:
print(f"Connected: {client.is_connected}")
system_id = await client.read_gatt_char(SYSTEM_ID_UUID)
print(
"System ID: {0}".format(
":".join(["{:02x}".format(x) for x in system_id[::-1]])
)
)
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
print("Model Number: {0}".format("".join(map(chr, model_number))))
try:
device_name = await client.read_gatt_char(DEVICE_NAME_UUID)
print("Device Name: {0}".format("".join(map(chr, device_name))))
except Exception:
pass
manufacturer_name = await client.read_gatt_char(MANUFACTURER_NAME_UUID)
print("Manufacturer Name: {0}".format("".join(map(chr, manufacturer_name))))
firmware_revision = await client.read_gatt_char(FIRMWARE_REV_UUID)
print("Firmware Revision: {0}".format("".join(map(chr, firmware_revision))))
hardware_revision = await client.read_gatt_char(HARDWARE_REV_UUID)
print("Hardware Revision: {0}".format("".join(map(chr, hardware_revision))))
software_revision = await client.read_gatt_char(SOFTWARE_REV_UUID)
print("Software Revision: {0}".format("".join(map(chr, software_revision))))
battery_level = await client.read_gatt_char(BATTERY_LEVEL_UUID)
print("Battery Level: {0}%".format(int(battery_level[0])))
async def notification_handler(characteristic, data):
print(f"{characteristic.description}: {data}")
# Turn on the red light on the Sensor Tag by writing to I/O Data and I/O Config.
write_value = bytearray([0x01])
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_DATA_CHAR_UUID, write_value, response=True)
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Post-Write Value: {0}".format(value))
assert value == write_value
write_value = bytearray([0x01])
value = await client.read_gatt_char(IO_CONFIG_CHAR_UUID)
print("I/O Config Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_CONFIG_CHAR_UUID, write_value, response=True)
value = await client.read_gatt_char(IO_CONFIG_CHAR_UUID)
print("I/O Config Post-Write Value: {0}".format(value))
assert value == write_value
# Try notifications with key presses.
await client.start_notify(KEY_PRESS_UUID, notification_handler)
await asyncio.sleep(5.0)
await client.stop_notify(KEY_PRESS_UUID)<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, series, requestContext=None):
return self.request('POST', '/tags/delSeries', {'path': series}, requestContext)<|fim_middle|>del_series<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> List[str]:
with open(self._version_file_path, "r") as f:
file_content = json.load(f)
return file_content[self._PRODUCTION_VERSION_KEY]<|fim_middle|>get_production_versions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
spec = self.spec
args = []
args.append("-DISPC_NO_DUMPS=ON") # otherwise, LLVM needs patching
args.append("-DCURSES_NEED_NCURSES=TRUE")
args.append("-DISPC_INCLUDE_EXAMPLES=OFF")
args.append("-DISPC_INCLUDE_TESTS=OFF")
args.append("-DISPC_INCLUDE_UTILS=OFF")
if spec.satisfies("target=x86_64:") or spec.satisfies("target=x86:"):
args.append("-DARM_ENABLED=OFF")
elif spec.satisfies("target=aarch64:"):
args.append("-DARM_ENABLED=ON")
return args<|fim_middle|>cmake_args<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[str]:
"""
The resource ID of the scope map to which the token will be associated with.
"""
return pulumi.get(self, "scope_map_id")<|fim_middle|>scope_map_id<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
request: Request,
response: Response,
opentrons_version: Union[int, Literal["*"]] = Header(
...,
description=(
"The HTTP API version to use for this request. Must be "
f"'{MIN_API_VERSION}' or higher. To use the latest "
f"version unconditionally, specify '{LATEST_API_VERSION_HEADER_VALUE}'"
),
),<|fim_middle|>check_version_header<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self, inputs):<|fim_middle|>tokenize<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(mocker, mongo, config):
m = mocker.patch(
"virtool.history.db.patch_to_version",
make_mocked_coro((None, {"_id": "foo"}, None)),
)
manifest = {"foo": 2, "bar": 10, "baz": 4}
patched_otus = await get_patched_otus(mongo, config, manifest)
assert list(patched_otus) == [{"_id": "foo"}, {"_id": "foo"}, {"_id": "foo"}]
m.assert_has_calls(
[
mocker.call(config.data_path, mongo, "foo", 2),
mocker.call(config.data_path, mongo, "bar", 10),
mocker.call(config.data_path, mongo, "baz", 4),
]
)<|fim_middle|>test_get_patched_otus<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
tgen = get_topogen()
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
r1 = tgen.gears["r1"]
r2 = tgen.gears["r2"]
def _bgp_converge():
output = json.loads(r1.vtysh_cmd("show bgp neighbor json"))
expected = {
"192.168.1.2": {
"bgpState": "Established",
"neighborCapabilities": {
"dynamic": "advertisedAndReceived",
"softwareVersion": {
"advertisedSoftwareVersion": None,
"receivedSoftwareVersion": None,
},
},
"connectionsEstablished": 1,
"connectionsDropped": 0,
}
}
return topotest.json_cmp(output, expected)
test_func = functools.partial(
_bgp_converge,
)
_, result = topotest.run_and_expect(test_func, None, count=30, wait=1)
assert result is None, "Can't converge"
step("Enable software version capability and check if it's exchanged dynamically")
r1.vtysh_cmd(
"""
configure terminal
router bgp
neighbor 192.168.1.2 capability software-version
"""
)
r2.vtysh_cmd(
"""
configure terminal
router bgp
neighbor 192.168.1.1 capability software-version
"""
)
def _bgp_check_if_session_not_reset():
def _bgp_software_version():
try:
versions = output["192.168.1.2"]["neighborCapabilities"][
"softwareVersion"
]
adv = versions["advertisedSoftwareVersion"]
rcv = versions["receivedSoftwareVersion"]
if not adv and not rcv:
return ""
pattern = "FRRouting/\\d.+"
if re.search(pattern, adv) and re.search(pattern, rcv):
return adv, rcv
except:
return ""
output = json.loads(r1.vtysh_cmd("show bgp neighbor json"))
adv, rcv = _bgp_software_version()
expected = {
"192.168.1.2": {
"bgpState": "Established",
"neighborCapabilities": {
"dynamic": "advertisedAndReceived",
"softwareVersion": {
"advertisedSoftwareVersion": adv,
"receivedSoftwareVersion": rcv,
},
},
"connectionsEstablished": 1,
"connectionsDropped": 0,
}
}
return topotest.json_cmp(output, expected)
test_func = functools.partial(
_bgp_check_if_session_not_reset,
)
_, result = topotest.run_and_expect(test_func, None, count=30, wait=1)
assert (
result is None
), "Session was reset after enabling software version capability"<|fim_middle|>test_bgp_dynamic_capability_software_version<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
if cache_folder.is_dir():
shutil.rmtree(cache_folder)
cache_folder.mkdir(parents=True, exist_ok=True)
recording, sorting = toy_example(
duration=60, num_channels=4, num_units=5, num_segments=2, average_peak_amplitude=-1000
)
recording = bandpass_filter(recording, freq_min=300, freq_max=6000)
recording = recording.save(folder=cache_folder / "recording")
sorting = sorting.save(folder=cache_folder / "sorting")
wvf_extractor = extract_waveforms(
recording, sorting, folder=cache_folder / "wvf_extractor", ms_before=10.0, ms_after=10.0
)<|fim_middle|>setup_module<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, query: str):
params = {"keywords": query}
self.visit(f"{self.home_url}search?{urlencode(params)}")
self.last_soup_url = self.browser.current_url
for li in self.browser.soup.select(".search-result-container li"):
a = li.find("a")
yield SearchResult(
url=self.absolute_url(a.get("href")),
title=a.get("data-bookname"),
info=li.find(".g_star_num small").text.strip(),
)<|fim_middle|>search_novel_in_browser<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
np.random.seed(0)
pts = np.random.rand(50, 3)
tri = op.network.Delaunay(points=pts, shape=[1, 1, 0])
assert tri.coords.shape == (50, 3)
assert np.all(tri.coords[:, :2] == pts[:, :2])
assert np.all(tri.coords[:, -1] != pts[:, -1])
assert np.all(tri.coords[:, -1] == 0.0)<|fim_middle|>test_delaunay_square_with_3_d_points<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Test that reading less than 1 full character works when stdin
# contains multibyte UTF-8 sequences
source = 'ϼўТλФЙ\r\n'.encode('utf-16-le')
expected = 'ϼўТλФЙ\r\n'.encode('utf-8')
for read_count in range(1, 16):
with open('CONIN$', 'rb', buffering=0) as stdin:
write_input(stdin, source)
actual = b''
while not actual.endswith(b'\n'):
b = stdin.read(read_count)
actual += b
self.assertEqual(actual, expected, 'stdin.read({})'.format(read_count))<|fim_middle|>test_partial_reads<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, section, settings):
"""
Save all the settings in a given section.
:param section: section name
:param settings: settings to save (dict)
"""
changed = False
if section not in self._config:
self._config[section] = {}
changed = True
for name, value in settings.items():
if name not in self._config[section] or self._config[section][name] != str(value):
self._config[section][name] = str(value)
changed = True
if changed:
self.writeConfig()<|fim_middle|>save_settings<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, obj):
"""
This method is needed to support proper serialization. While its name is value_to_string()
the real meaning of the method is to convert the value to some serializable format.
Since most of the enum values are strings or integers we WILL NOT convert it to string
to enable integers to be serialized natively.
"""
if django.VERSION >= (2, 0):
value = self.value_from_object(obj)
else:
value = self._get_val_from_obj(obj)
return value.value if value else None<|fim_middle|>value_to_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, params: Mapping[str, Any]) -> Iterable:
yield from []<|fim_middle|>list_objects<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(vm):
vm.Install('mutilate')<|fim_middle|>install_mutilate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(filters: Optional[Sequence[pulumi.InputType['GetRouteTablesFilterArgs']]] = None,
tags: Optional[Mapping[str, str]] = None,
vpc_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRouteTablesResult:
"""
This resource can be useful for getting back a list of route table ids to be referenced elsewhere.
:param Sequence[pulumi.InputType['GetRouteTablesFilterArgs']] filters: Custom filter block as described below.
:param Mapping[str, str] tags: Map of tags, each pair of which must exactly match
a pair on the desired route tables.
More complex filters can be expressed using one or more `filter` sub-blocks,
which take the following arguments:
:param str vpc_id: VPC ID that you want to filter from.
"""
__args__ = dict()
__args__['filters'] = filters
__args__['tags'] = tags
__args__['vpcId'] = vpc_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:ec2/getRouteTables:getRouteTables', __args__, opts=opts, typ=GetRouteTablesResult).value
return AwaitableGetRouteTablesResult(
filters=pulumi.get(__ret__, 'filters'),
id=pulumi.get(__ret__, 'id'),
ids=pulumi.get(__ret__, 'ids'),
tags=pulumi.get(__ret__, 'tags'),
vpc_id=pulumi.get(__ret__, 'vpc_id'))<|fim_middle|>get_route_tables<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(data: Any, handler: Callable[[list[str], str], None], tags: list[str] = []) -> None:
# processes arbitrary nested structure of dicts and lists which
# is possible in conandata.yaml files
if isinstance(data, list):
for item in data:
METHOD_NAME(item, handler, tags)
elif isinstance(data, dict):
for key, item in data.items():
METHOD_NAME(item, handler, tags + [key])
elif isinstance(data, str):
handler(tags, data)<|fim_middle|>traverse_arbitrary_structure<|file_separator|> |