text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>():
with open(cmdargs.manifest, 'r') as f:
for line in f:
line = line.rstrip();
if is_comment.match(line): continue;
components = line.split(": ", 2);
guestpath = components[0].strip();
hostpath = components[1].strip()
if is_test.match(guestpath):
test_files.append(guestpath);
add_tests((TestRunnerTest(os.path.basename(x)) for x in test_files))<|fim_middle|>collect_tests<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_distribution_of_stateless_random_uniform<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, parser):
parser.add_argument("-l", "--location", dest="location", help="(OPTIONAL) location of the TAS file to load")<|fim_middle|>add_arguments<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, gc, points, icon_mode=False):
if len(points) < 1:
return
with gc:
gc.clip_to_rect(self.x, self.y, self.width, self.height)
gc.set_stroke_color(self.line_color_)
gc.set_line_width(self.line_width)
# Draw the body of the arrow
starts = points
ends = points + self._cached_vector_data
gc.begin_path()
gc.line_set(starts, ends)
gc.stroke_path()
if self.arrow_size > 0:
vec = self._cached_vector_data
unit_vec = (
vec / sqrt(vec[:, 0] ** 2 + vec[:, 1] ** 2)[:, newaxis]
)
a = 0.707106781 # sqrt(2)/2
# Draw the left arrowhead (for an arrow pointing straight up)
arrow_ends = (
ends
- array(unit_vec * matrix([[a, a], [-a, a]]))
* self.arrow_size
)
gc.begin_path()
gc.line_set(ends, arrow_ends)
gc.stroke_path()
# Draw the left arrowhead (for an arrow pointing straight up)
arrow_ends = (
ends
- array(unit_vec * matrix([[a, -a], [a, a]]))
* self.arrow_size
)
gc.begin_path()
gc.line_set(ends, arrow_ends)
gc.stroke_path()<|fim_middle|>render<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request: pytest.FixtureRequest) -> Callable:
return request.param<|fim_middle|>wsgi_middleware<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(benchmark_time: float, request: pytest.FixtureRequest) -> None:
path = "s3://covid19-lake/covid_knowledge_graph/json/edges/paper_to_concept/*.json"
with ExecutionTimer(request, data_paths=path) as timer:
file_paths = wr.s3.list_objects(path)
ray_ds = ray.data.read_json(file_paths)
ray_ds.to_modin()
assert timer.elapsed_time < benchmark_time<|fim_middle|>test_modin_s3_read_json_simple<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Test elided text"""
raw = "mmmmmmm"
self.label.setText(raw)
self.label.setFixedWidth(200)
displayedText = qt.QLabel.text(self.label)
self.assertNotIn("…", displayedText)
self.assertEqual(raw, displayedText)<|fim_middle|>test_not_elided_value<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(pi_cls):
"""Wrap a python class as pass instrument"""
# No additional wrapping needed if the user class already
# inherits.
if issubclass(pi_cls, PassInstrument):
return pi_cls
class PyPassInstrument(pi_cls, PassInstrument):
"""Internal wrapper class to create a class instance."""
def __init__(self, *args, **kwargs):
# initialize handle in case pi_cls creation failed.
self.handle = None
pi_cls.__init__(self, *args, **kwargs)
PassInstrument.__init__(self)
functools.update_wrapper(PyPassInstrument.__init__, pi_cls.__init__)
PyPassInstrument.__name__ = pi_cls.__name__
PyPassInstrument.__doc__ = pi_cls.__doc__
PyPassInstrument.__module__ = pi_cls.__module__
return PyPassInstrument<|fim_middle|>wrap_class_pass_instrument<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(arch: str) -> List[str]:
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
if arch.startswith("macosx"):
arches = _mac_platforms(arch)
elif arch_prefix in ["manylinux2014", "manylinux2010"]:
arches = _custom_manylinux_platforms(arch)
else:
arches = [arch]
return arches<|fim_middle|>get_custom_platforms<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(src):
module = []
src, ext = os.path.splitext(src)
while src and src != '/':
head, tail = os.path.split(os.path.abspath(src))
if (tail == 'src' or any(os.path.exists(os.path.join(head, tail, f))
for f in ('setup.py', 'pyproject.toml'))):
return '.'.join(module)
module.insert(0, tail)
src = head
return None<|fim_middle|>guess_module_name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(scene):
a = Arc(radius=1, start_angle=PI)
scene.add(a)<|fim_middle|>test_arc<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(direction, amount=1, stream=sys.stdout, erase=False):
"""Move the cursor.
If the stream is a TTY, print characters that will move the cursor
in the given direction and optionally erase the line. Else do nothing.
direction (int): the direction as a directions constant, like
directions.UP.
stream (fileobj): a file-like object (that adheres to the API
declared in the `io' package). Defaults to sys.stdout.
erase (bool): True if the line the cursor ends on should be erased.
"""
if stream.isatty():
if direction == directions.UP:
print(curses.tparm(curses.tigetstr("cuu"), amount).decode('ascii'),
file=stream, end='')
elif direction == directions.DOWN:
print(curses.tparm(curses.tigetstr("cud"), amount).decode('ascii'),
file=stream, end='')
elif direction == directions.LEFT:
print(curses.tparm(curses.tigetstr("cub"), amount).decode('ascii'),
file=stream, end='')
elif direction == directions.RIGHT:
print(curses.tparm(curses.tigetstr("cuf"), amount).decode('ascii'),
file=stream, end='')
if erase:
print(curses.tparm(curses.tigetstr("el")).decode('ascii'),
file=stream, end='')
stream.flush()<|fim_middle|>move_cursor<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertMarkdownRenders(
"""![Text](http://link.com/with spaces'.png"and quotes" 'and title") more text""",
"""<p><img alt="Text" src="http://link.com/with spaces'.png" title="and quotes" 'and title" />"""
""" more text</p>"""
)<|fim_middle|>test_mixed_title_quotes4<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
return str(self.__config[NOTIFICATION_JOB_LOG_URL_PATTERN])<|fim_middle|>get_job_log_url_pattern<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
obj = ClassFactory.create()
self.assertFalse(obj.has_suffix_attr)
obj.attrs.append(AttrFactory.create())
obj.attrs.append(AttrFactory.create())
self.assertFalse(obj.has_suffix_attr)
obj.attrs[1].index = sys.maxsize
self.assertTrue(obj.has_suffix_attr)<|fim_middle|>test_property_has_suffix_attr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> List[str]:
"""Extracts the script section from the CI config files.
Returns:
list(str). An alphabetically-sorted list of names of test suites
from the script section in the CI config files.
"""
suites = []
file_contents = read_and_parse_ci_config_files()
for file_content in file_contents:
workflow_dict = yaml.load(file_content, Loader=yaml.Loader)
suites += workflow_dict[
'jobs']['e2e_test']['strategy']['matrix']['suite']
return sorted(suites)<|fim_middle|>get_e2e_suite_names_from_ci_config<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(plugin: Plugin, target_host: TargetHost):
result = plugin.run(
host=target_host,
servers=SERVERS,
current_depth=1,
options={},
interrupt=Event(),
)
assert result == EXPLOITER_RESULT<|fim_middle|>test_run_returns_exploiter_result<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(client):
response = client.get("/tomchristie")
assert response.status_code == 200
assert response.text == "Hello, tomchristie!"<|fim_middle|>test_http_endpoint_route_path_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
run_symm(dace.dtypes.DeviceType.CPU)<|fim_middle|>test_cpu<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(module):
out ="""<|fim_middle|>gen_doc_stub<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>( # noqa: PLR0912
entry: Custom,
options: FavaOptions,
) -> None:
"""Parse a single custom fava-option entry and set option accordingly."""
key = entry.values[0].value.replace("-", "_")
if key not in All_OPTS:
raise ValueError(f"unknown option `{key}`")
if key == "default_file":
options.default_file = entry.meta["filename"]
return
value = entry.values[1].value
if not isinstance(value, str):
raise TypeError(f"expected string value for option `{key}`")
if key == "insert_entry":
try:
pattern = re.compile(value)
except re.error as err:
raise TypeError(
f"Should be a regular expression: '{value}'.",
) from err
opt = InsertEntryOption(
entry.date,
pattern,
entry.meta["filename"],
entry.meta["lineno"],
)
options.insert_entry.append(opt)
elif key == "collapse_pattern":
try:
pattern = re.compile(value)
except re.error as err:
raise TypeError(
f"Should be a regular expression: '{value}'.",
) from err
options.collapse_pattern.append(pattern)
elif key == "locale":
try:
Locale.parse(value)
options.locale = value
except UnknownLocaleError as err:
raise ValueError(f"Unknown locale: '{value}'.") from err
elif key == "fiscal_year_end":
fye = parse_fye_string(value)
if fye is None:
raise ValueError("Invalid 'fiscal_year_end' option.")
options.fiscal_year_end = fye
elif key in STR_OPTS:
setattr(options, key, value)
elif key in BOOL_OPTS:
setattr(options, key, value.lower() == "true")
elif key in INT_OPTS:
setattr(options, key, int(value))
else: # key in TUPLE_OPTS
setattr(options, key, tuple(value.strip().split(" ")))<|fim_middle|>parse_option_custom_entry<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Test Ethernet setters"""
self.eth.set_ether_type(0x88cc)
self.assertEqual(self.eth.get_ether_type(), 0x88cc)
# Swap source and destination MACs
dhost = self.eth.get_ether_dhost()
shost = self.eth.get_ether_shost()
self.eth.set_ether_dhost(shost)
self.eth.set_ether_shost(dhost)
self.assertEqual(self.eth.get_ether_dhost(), array('B', self.frame[6:12]))
self.assertEqual(self.eth.get_ether_shost(), array('B', self.frame[0:6]))<|fim_middle|>test_02<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
mp = request.getfixturevalue("monkeypatch")
mp.setattr(MiriadeClass, '_request', nonremote_request)
return mp<|fim_middle|>patch_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, blackboard_api_client, plugin, grouping_service, course
):
api_groups = plugin.get_groups_for_instructor(
grouping_service, course, sentinel.group_set_id
)
blackboard_api_client.group_set_groups.assert_called_once_with(
course.lms_id, sentinel.group_set_id
)
assert api_groups == blackboard_api_client.group_set_groups.return_value<|fim_middle|>test_get_groups_for_instructor<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(obj):
"""
Are we being asked to look up a list of things, instead of a single thing?
We check for the `__iter__` attribute so that this can cover types that
don't have to be known by this module, such as NumPy arrays.
Strings, however, should be considered as atomic values to look up, not
iterables. The same goes for tuples, since they are immutable and therefore
valid entries.
We don't need to check for the Python 2 `unicode` type, because it doesn't
have an `__iter__` attribute anyway.
"""
return (
hasattr(obj, "__iter__")
and not isinstance(obj, str)
and not isinstance(obj, tuple)
)<|fim_middle|>is_iterable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
disable_component_obj = DisabledComponent.objects.create(
component_code=random.choice(self.components).code,
action=DisabledComponent.ACTION_TYPE_RETRIEVE,
scope=DisabledComponent.SCOPE_TYPE_FLOW,
)
cases = [
{
"action": DisabledComponent.ACTION_TYPE_RETRIEVE,
"scope": DisabledComponent.SCOPE_TYPE_FLOW,
"query_scope": DisabledComponent.SCOPE_TYPE_FLOW,
"not_found": True,
},
{
"action": DisabledComponent.ACTION_TYPE_RETRIEVE,
"scope": DisabledComponent.SCOPE_TYPE_FLOW,
"query_scope": DisabledComponent.SCOPE_TYPE_TASK,
"not_found": False,
},
{
"action": DisabledComponent.ACTION_TYPE_RETRIEVE,
"scope": DisabledComponent.SCOPE_TYPE_ALL,
"query_scope": DisabledComponent.SCOPE_TYPE_FLOW,
"not_found": True,
},
{
"action": DisabledComponent.ACTION_TYPE_LIST,
"scope": DisabledComponent.SCOPE_TYPE_FLOW,
"query_scope": DisabledComponent.SCOPE_TYPE_FLOW,
"not_found": False,
},
]
for case in cases:
disable_component_obj.scope = case["scope"]
disable_component_obj.action = case["action"]
disable_component_obj.save()
try:
code = self.call_by_action(
"detail", query_params={"scope": case["query_scope"]}, code=disable_component_obj.component_code
)["data"]["code"]
except AssertionError:
self.assertTrue(case["not_found"], True)
continue
self.assertEqual(code, disable_component_obj.component_code)<|fim_middle|>test_retrieve_disable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result<|fim_middle|>output<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(db, results, nrep, compress, data, detailed, minimize, threadno=None):
for j in range(nrep):
for r in 1, 10, 100, 1000:
t = time.time()
conflicts = 0
jar = db.open()
while 1:
try:
transaction.begin()
rt = jar.root()
key = 's%s' % r
if key in rt:
p = rt[key]
else:
rt[key] = p = P()
for i in range(r):
v = getattr(p, str(i), P())
if compress is not None:
v.d = compress(data)
else:
v.d = data
setattr(p, str(i), v)
transaction.commit()
except ConflictError:
conflicts = conflicts + 1
else:
break
jar.close()
t = time.time() - t
if detailed:
if threadno is None:
print("%s\t%s\t%.4f\t%d" % (j, r, t, conflicts))
else:
print("%s\t%s\t%.4f\t%d\t%d" % (j, r, t, conflicts,
threadno))
results[r].append((t, conflicts))
rt = p = v = None # release all references
if minimize:
time.sleep(3)
jar.cacheMinimize()<|fim_middle|>work<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, datum):
inp = datum
text = self.column_delimiter().join(
[self.string_delimiter().join(inp[0]), self.string_delimiter().join(inp[1])]
)
key = self.arbitrary_identifier.encode(text)
result = {"key": key, "input": inp, "text": text}
return result<|fim_middle|>parse_pair_of_lists<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Returns the first date of the month represented by this form instance"""
return date(self.cleaned_year, self.cleaned_month, 1)<|fim_middle|>this_month_start<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
doctype: str,
data: Dict):
"""Creates a new document of the given doctype.
See https://frappeframework.com/docs/v14/user/en/api/rest#listing-documents
Args:
doctype (str): Type of the document to create.
data (Dict): Document object.
"""
post_response = requests.post(
f'{self.base_url}/resource/{doctype}',
json=data,
headers=self.headers)
if not post_response.ok:
if 400 <= post_response.status_code < 600:
raise requests.HTTPError(f'{post_response.reason}: {post_response.text}', response=post_response)
return post_response.json()['data']<|fim_middle|>post_document<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, browser_instance):
browser_instance.visit(self.host)
browser_instance.find_link_by_text('Log in').first.click()
browser_instance.fill('username', 'jessamyn@archive.org')
browser_instance.fill('password', 'openlibrary')
browser_instance.find_by_value('Log In').first.click()<|fim_middle|>login<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
from litex.build.parser import LiteXArgumentParser
parser = LiteXArgumentParser(platform=antmicro_lpddr4_test_board.Platform, description="LiteX SoC on LPDDR4 Test Board.")
parser.add_target_argument("--flash", action="store_true", help="Flash bitstream.")
parser.add_target_argument("--sys-clk-freq", default=50e6, type=float, help="System clock frequency.")
parser.add_target_argument("--iodelay-clk-freq", default=200e6, type=float, help="IODELAYCTRL frequency.")
ethopts = parser.target_group.add_mutually_exclusive_group()
ethopts.add_argument("--with-ethernet", action="store_true", help="Add Ethernet.")
ethopts.add_argument("--with-etherbone", action="store_true", help="Add EtherBone.")
parser.add_target_argument("--eth-ip", default="192.168.1.50", help="Ethernet/Etherbone IP address.")
parser.add_target_argument("--eth-dynamic-ip", action="store_true", help="Enable dynamic Ethernet IP addresses setting.")
parser.add_target_argument("--with-hyperram", action="store_true", help="Add HyperRAM.")
parser.add_target_argument("--with-sdcard", action="store_true", help="Add SDCard.")
parser.add_target_argument("--with-jtagbone", action="store_true", help="Add JTAGBone.")
parser.add_target_argument("--with-uartbone", action="store_true", help="Add UartBone on 2nd serial.")
args = parser.parse_args()
assert not (args.with_etherbone and args.eth_dynamic_ip)
soc = BaseSoC(
sys_clk_freq = args.sys_clk_freq,
iodelay_clk_freq = args.iodelay_clk_freq,
with_ethernet = args.with_ethernet,
with_etherbone = args.with_etherbone,
eth_ip = args.eth_ip,
eth_dynamic_ip = args.eth_dynamic_ip,
with_hyperram = args.with_hyperram,
with_sdcard = args.with_sdcard,
with_jtagbone = args.with_jtagbone,
with_uartbone = args.with_uartbone,
**parser.soc_argdict)
builder = Builder(soc, **parser.builder_argdict)
if args.build:
builder.build(**parser.toolchain_argdict)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(builder.get_bitstream_filename(mode="sram"))
if args.flash:
prog = soc.platform.create_programmer()
prog.flash(0, builder.get_bitstream_filename(mode="flash"))<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
return None<|fim_middle|>discriminator<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
question_freeze_after_option_before_deadline,
):
assert question_freeze_after_option_before_deadline.read_only is False<|fim_middle|>test_question_property_freeze_after_option_before<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, query, url):
query['size'] = 50
query['from'] = 0
for page_num in itertools.count(1):
results = self._call_api(
'search', None, query, url,
'Downloading search JSON page %d' % page_num)
sl = try_get(results, lambda x: x['sectionList'][0], dict)
if sl:
results = sl
items = results.get('items')
if not items or not isinstance(items, list):
break
for item in items:
if not isinstance(item, dict):
continue
if not self._check_bc_id_exists(item):
continue
yield self._parse_rf_item(item)
size = int_or_none(results.get('size'))
if size is not None:
query['size'] = size
total = int_or_none(results.get('total'))
if total is not None and query['from'] + query['size'] > total:
break
query['from'] += query['size']<|fim_middle|>entries<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(fxn):
"""Decorator to handle selecting the proper module for loaders.
The decorated function is passed the module to use instead of the module
name. The module passed in to the function is either from sys.modules if
it already exists or is a new module. If the module is new, then __name__
is set the first argument to the method, __loader__ is set to self, and
__package__ is set accordingly (if self.is_package() is defined) will be set
before it is passed to the decorated function (if self.is_package() does
not work for the module it will be set post-load).
If an exception is raised and the decorator created the module it is
subsequently removed from sys.modules.
The decorator assumes that the decorated function takes the module name as
the second argument.
"""
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
@functools.wraps(fxn)
def module_for_loader_wrapper(self, fullname, *args, **kwargs):
with _module_to_load(fullname) as module:
module.__loader__ = self
try:
is_package = self.is_package(fullname)
except (ImportError, AttributeError):
pass
else:
if is_package:
module.__package__ = fullname
else:
module.__package__ = fullname.rpartition('.')[0]
# If __package__ was not set above, __import__() will do it later.
return fxn(self, module, *args, **kwargs)
return module_for_loader_wrapper<|fim_middle|>module_for_loader<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, data):
self._p("%s DECL" % data)<|fim_middle|>handle_decl<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(get_contract):
code = """<|fim_middle|>test_single_type_tuple_address<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
**kwargs: Any
) -> AsyncIterable["_models.ResourceProviderOperationList"]:
"""List all the available operations the KubernetesConfiguration resource provider supports, in
this api-version.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceProviderOperationList or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2021_09_01.models.ResourceProviderOperationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceProviderOperationList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
template_url=self.METHOD_NAME.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceProviderOperationList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)<|fim_middle|>list<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
page = OptionsPage.from_json(
{
'actions': {
'POST': {'username': {'type': 'string', 'help_text': 'Please specify a username', 'required': True}},
}
}
)
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--username TEXT Please specify a username'<|fim_middle|>test_creation_required_argument<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.press('b', 'shift')
bcp_command = ('switch', None, {'name': 'shift_b', 'state': 1})
self.assertIn(bcp_command, self.sent_bcp_commands)<|fim_middle|>test_mod_key_with_plus<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
max_strength = 0
max_axis = -1
for dim in range(3):
sig = self.sigs[dim]
sd = sig[:-2] - 2.0 * sig[1:-1] + sig[2:]
center = int((self.flagged.shape[dim] - 1) / 2)
strength = zero_strength = zero_cross = 0
for i in range(1, sig.size - 2):
# Note that sd is offset by one
if sd[i - 1] * sd[i] < 0:
strength = np.abs(sd[i - 1] - sd[i])
# TODO this differs from what I could find in ENZO
# there's |center - i| < |center - zero_cross| instead
# additionally zero_cross is undefined in first pass
if strength > zero_strength or (
strength == zero_strength
and np.abs(center - i) < np.abs(zero_cross - i)
):
zero_strength = strength
zero_cross = i
if zero_strength > max_strength:
max_axis = dim
dims = self.dimensions.copy()
li = self.left_index.copy()
dims[max_axis] = zero_cross
psg1 = ProtoSubgrid(self.flagged, li, dims)
li[max_axis] += zero_cross
dims[max_axis] = self.dimensions[max_axis] - zero_cross
offset = np.zeros(3)
offset[max_axis] = zero_cross
psg2 = ProtoSubgrid(self.flagged, li, dims, offset)
return [psg1, psg2]<|fim_middle|>find_by_second_derivative<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
current_dir = os.path.dirname(os.path.realpath(__file__))
valid_dir_path = os.path.join(current_dir, "resources", "gitlab_conf", "pass")
runner = Runner()
runner.gitlab.gitlab_conf_dir_path = valid_dir_path
checks = ["CKV_GITLAB_1", "CKV_GITLAB_2"]
report = runner.run(
root_folder=valid_dir_path,
runner_filter=RunnerFilter(checks=checks)
)
self.assertEqual(len(report.failed_checks), 0)
self.assertEqual(report.parsing_errors, [])
self.assertEqual(len(report.passed_checks), 2)
self.assertEqual(report.skipped_checks, [])<|fim_middle|>test_runner_object_passing_check<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional['outputs.GetInsightsResultsMetadataResponse']:
"""
The metadata from the get insights operation results.
"""
return pulumi.get(self, "meta_data")<|fim_middle|>meta_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(a, b):
return torch.mm(a, b)<|fim_middle|>torch_mm<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
ts = Touchstone("./tests/data/valid.s1p")
ts.load()
self.assertEqual(str(ts.opts), "# HZ S RI R 50")
self.assertEqual(len(ts.s11), 1010)
self.assertEqual(len(ts.s21), 0)
self.assertEqual(ts.r, 50)
ts = Touchstone("./tests/data/valid.s2p")
ts.load()
ts.gen_interpolation()
self.assertEqual(str(ts.opts), "# HZ S RI R 50")
self.assertEqual(len(ts.s11), 1020)
self.assertEqual(len(ts.s21), 1020)
self.assertEqual(len(ts.s12), 1020)
self.assertEqual(len(ts.s22), 1020)
self.assertIn("! Vector Network Analyzer VNA R2", ts.comments)
self.assertEqual(ts.min_freq(), 500000)
self.assertEqual(ts.max_freq(), 900000000)
self.assertEqual(ts.s_freq("11", 1),
Datapoint(1, -3.33238E-001, 1.80018E-004))
self.assertEqual(ts.s_freq("11", 750000),
Datapoint(750000, -0.3331754099382822,
0.00032433255669243524))
ts = Touchstone("./tests/data/ma.s2p")
ts.load()
self.assertEqual(str(ts.opts), "# MHZ S MA R 50")
ts = Touchstone("./tests/data/db.s2p")
ts.load()
self.assertEqual(str(ts.opts), "# HZ S DB R 50")
ts = Touchstone("./tests/data/broken_pair.s2p")
with self.assertLogs(level=logging.ERROR) as cm:
ts.load()
self.assertRegex(cm.output[0], "Data values aren't pairs")
ts = Touchstone("./tests/data/missing_pair.s2p")
with self.assertLogs(level=logging.ERROR) as cm:
ts.load()
self.assertRegex(cm.output[0], "Inconsistent number")
ts = Touchstone("./tests/data/nonexistent.s2p")
with self.assertLogs(level=logging.ERROR) as cm:
ts.load()
self.assertRegex(cm.output[0], "No such file or directory")<|fim_middle|>test_load<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(device_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBandwidthScheduleResult]:
"""
Gets the properties of the specified bandwidth schedule.
:param str device_name: The device name.
:param str name: The bandwidth schedule name.
:param str resource_group_name: The resource group name.
"""
...<|fim_middle|>get_bandwidth_schedule_output<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
if(module.needs_reduction):
module.needs_reduction = False
buckets = {}
for param in module.parameters():
if param.requires_grad and param.grad is not None:
tp = type(param.data)
if tp not in buckets:
buckets[tp] = []
buckets[tp].append(param)
if module.warn_on_half:
if torch.cuda.HalfTensor in buckets:
print("WARNING: gloo dist backend for half parameters may be extremely slow." +
" It is recommended to use the NCCL backend in this case. This currently requires" +
"PyTorch built from top of tree master.")
module.warn_on_half = False
for tp in buckets:
bucket = buckets[tp]
grads = [param.grad.data for param in bucket]
coalesced = _flatten_dense_tensors(grads)
dist.all_reduce(coalesced)
coalesced /= dist.get_world_size()
for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)):
buf.copy_(synced)<|fim_middle|>allreduce_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.drop_all(engine)
metadata.create_all(engine)
yield
metadata.drop_all(engine)<|fim_middle|>create_test_database<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
super(type(self), self).METHOD_NAME()
initializeTestModule_SingleInstance(self)
# Put test based setup code here. it is called once before every test<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
size = 2
for tile in self.tile_list:
size += 3
if not isinstance(tile.tiletype, expression.ConstantNumeric):
size += 2
return size<|fim_middle|>get_size<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
manifest = get_manifest()
endpoints = manifest.get("communicationEndpoints", [])
# prepend the DT_SAAS_URL because the communication endpoints might not be correct
endpoints.insert(0, _join_url(os.environ.get("DT_SAAS_URL"), "communication"))
return ";".join(endpoints)<|fim_middle|>get_connection_endpoint<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(dltensor):
dltensor = ctypes.py_object(dltensor)
if ctypes.pythonapi.PyCapsule_IsValid(dltensor, _c_str_dltensor):
ptr = ctypes.pythonapi.PyCapsule_GetPointer(dltensor, _c_str_dltensor)
# XXX(minjie): The below cast should be unnecessary given the code to
# set restype of PyCapsule calls. But weirdly, this does not
# work out always.
ptr = ctypes.cast(ptr, ctypes.c_void_p)
handle = DGLArrayHandle()
check_call(_LIB.DGLArrayFromDLPack(ptr, ctypes.byref(handle)))
ctypes.pythonapi.PyCapsule_SetName(dltensor, _c_str_used_dltensor)
ctypes.pythonapi.PyCapsule_SetDestructor(
dltensor, DGLPyCapsuleDestructor(0)
)
return _make_array(handle, False)
raise ValueError(
"Expect a dltensor field, PyCapsule can only be consumed once"
)<|fim_middle|>from_dlpack<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertIs(ct.get_entity('int'), int)<|fim_middle|>test_good_entity<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The Azure location to which the resources in the service belong to or should be deployed to.
"""
return pulumi.get(self, "target_location")<|fim_middle|>target_location<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Verify that we truncate the preference value if it is too long.
"""
MAX_STRING_LENGTH = 12500
OVERSIZE_STRING_LENGTH = MAX_STRING_LENGTH + 10
self.user_preference.value = "z" * OVERSIZE_STRING_LENGTH
self.user_preference.save()
self.assert_user_setting_event_emitted(
setting=self.TEST_KEY, old=self.TEST_VALUE, new="z" * MAX_STRING_LENGTH, truncated=["new"]
)
self.user_preference.value = "x" * OVERSIZE_STRING_LENGTH
self.user_preference.save()
self.assert_user_setting_event_emitted(
setting=self.TEST_KEY, old="z" * MAX_STRING_LENGTH, new="x" * MAX_STRING_LENGTH, truncated=["old", "new"]
)<|fim_middle|>test_truncated_user_preference_event<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
A weird record with only a name results in nothing.
The attachment may contain bogus "name:" records with no info and those
should be eaten silently as we have no real data.
"""
stream = StringIO(MODINFO3)
parser = MultipleModinfoParser(stream)
result = ModinfoResult()
parser.run(result)
self.assertEqual(result.mod_data, {})<|fim_middle|>test_name_only_parse<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, media_dict, out_size=(16, 16), size_per_component=True, return_linear=True):
"""
Basic media-dict blurhash decoding.
out_size is the desired result size in pixels, either absolute or per blurhash
component (this is the default).
By default, this function will return the image as linear RGB, ready for further
scaling operations. If you want to display the image directly, set return_linear
to False.
Returns the decoded blurhash image as a three-dimensional list: [height][width][3],
with the last dimension being RGB colours.
For further info and tips for advanced usage, refer to the documentation for the
blurhash module: https://github.com/halcy/blurhash-python
"""
if not IMPL_HAS_BLURHASH:
raise NotImplementedError(
'To use the blurhash functions, please install the blurhash Python module.')
# Figure out what size to decode to
decode_components_x, decode_components_y = blurhash.components(media_dict["blurhash"])
if size_per_component:
decode_size_x = decode_components_x * out_size[0]
decode_size_y = decode_components_y * out_size[1]
else:
decode_size_x = out_size[0]
decode_size_y = out_size[1]
# Decode
decoded_image = blurhash.decode(media_dict["blurhash"], decode_size_x, decode_size_y, linear=return_linear)
# And that's pretty much it.
return decoded_image<|fim_middle|>decode_blurhash<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(alias_urn: str, result=result):
print(f"validating {alias_urn}")
if alias_urn != result:
raise Exception(f"expected {result} but got {alias_urn}")<|fim_middle|>validate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.session.close()<|fim_middle|>terminate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.METHOD_NAME() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.METHOD_NAME()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].METHOD_NAME())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Port, dict):
for key, value in self.items():
result[key] = value
return result<|fim_middle|>to_dict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
config = self.default_config()
# This isn't a real configuration option but is used to provide the main
# homeserver and worker homeserver different options.
main_replication_secret = config.pop("main_replication_secret", None)
if main_replication_secret:
config["worker_replication_secret"] = main_replication_secret
return self.setup_test_homeserver(config=config)<|fim_middle|>make_homeserver<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(a: dace.float32[N]):
a *= 2
a *= 3<|fim_middle|>multiply<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self):
ui = utilities.make_ui()
cmd = g_commands.ExternalCommand('test -t 0', refocus=False)
await cmd.apply(ui)
ui.notify.assert_not_called()<|fim_middle|>test_no_spawn_no_stdin_attached<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
class User:
is_authenticated = True
self.assertEqual(user_is_authenticated(User()), True)<|fim_middle|>test_user_has_is_authenticated<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(val: Any) -> Optional[bool]:
"""Attempts to infer a value's unitary-ness via its _unitary_ method."""
getter = getattr(val, '_unitary_', None)
if getter is None:
return None
result = getter()
return result is not NotImplemented and result is not None<|fim_middle|>strat_has_unitary_from_unitary<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(ceph_cluster, **kw):
"""
CEPH-83574001 - Make necessary changes in config file to export the nfs share with RO and RW
access and mount the share accordingly and verify if the permission set on export path works.
Pre-requisites:
1. Create cephfs volume
creats fs volume create <vol_name>
2. Create nfs cluster
ceph nfs cluster create <nfs_name> <nfs_server>
Test operation:
1. Create nfs cluster with same name
ceph nfs cluster create <nfs_name> <nfs_server>
2. Create cephfs nfs export
ceph nfs export create cephfs <fs_name> <nfs_name> <nfs_export_name> path=<export_path>
3. Make export Readonly
4. Mount nfs mount with cephfs export
mount -t nfs -o port=2049 <nfs_server>:<nfs_export> <nfs_mounting_dir>
5. Create a file and it should fail with permission denaied error
6. Make export to ReadWrite(RW)
7. mount it and try creating the file again. it should pass
Clean-up:
1. Remove data in cephfs
2. Remove cephfs nfs export
3. Remove all nfs mounts
"""
try:
tc = "CEPH-83574001"
log.info(f"Running cephfs {tc} test case")
config = kw["config"]
build = config.get("build", config.get("rhbuild"))
fs_util = FsUtils(ceph_cluster)
clients = ceph_cluster.get_ceph_objects("client")
client1 = clients[0]
fs_util.prepare_clients(clients, build)
fs_util.auth_list(clients)
mon_node_ip = fs_util.get_mon_node_ips()
mon_node_ip = ",".join(mon_node_ip)
rhbuild = config.get("rhbuild")
nfs_servers = ceph_cluster.get_ceph_objects("nfs")
nfs_server = nfs_servers[0].node.hostname
nfs_name = "cephfs-nfs"
clients = ceph_cluster.get_ceph_objects("client")
client1 = clients[0]
nfs_mounting_dir = "/mnt/nfs_" + "".join(
secrets.choice(string.ascii_uppercase + string.digits) for i in range(5)
)
out, rc = client1.exec_command(
sudo=True, cmd=f"ceph nfs cluster create {nfs_name} {nfs_server}"
)
if not wait_for_process(client=client1, process_name=nfs_name, ispresent=True):
raise CommandFailed("Cluster has not been created")
out, rc = client1.exec_command(sudo=True, cmd="ceph nfs cluster ls")
output = out.split()
if nfs_name in output:
log.info("ceph nfs cluster created successfully")
else:
raise CommandFailed("Failed to create nfs cluster")
nfs_export_name = "/export_" + "".join(
secrets.choice(string.digits) for i in range(3)
)
export_path = "/"
fs_name = "cephfs"
if "5.0" in rhbuild:
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export create cephfs {fs_name} {nfs_name} "
f"{nfs_export_name} path={export_path}",
)
else:
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export create cephfs {nfs_name} "
f"{nfs_export_name} {fs_name} path={export_path}",
)
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export get {nfs_name} {nfs_export_name} > export.conf",
)
out, rc = client1.exec_command(
sudo=True,
cmd="cat export.conf",
)
log.info(f"config file for {nfs_export_name}: {out}")
log.info("Make export as Readonly")
client1.exec_command(
sudo=True,
cmd="sed -i 's/RW/RO/g' export.conf",
)
out, rc = client1.exec_command(
sudo=True,
cmd="cat export.conf",
)
log.info(f"config file for {nfs_export_name}: {out}")
log.info("Apply the config")
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export apply {nfs_name} -i export.conf",
)
rc = fs_util.cephfs_nfs_mount(
client1, nfs_server, nfs_export_name, nfs_mounting_dir
)
if not rc:
log.error("cephfs nfs export mount failed")
return 1
out, err = client1.exec_command(
sudo=True, cmd=f"touch {nfs_mounting_dir}/file", check_ec=False
)
log.info(err)
if not err:
raise CommandFailed("NFS export has permission to write")
log.info("umount the export")
client1.exec_command(sudo=True, cmd=f"umount {nfs_mounting_dir}")
log.info("Make export as ReadWrite")
client1.exec_command(
sudo=True,
cmd="sed -i 's/RO/RW/g' export.conf",
)
out, rc = client1.exec_command(
sudo=True,
cmd="cat export.conf",
)
log.info("Apply the config")
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export apply {nfs_name} -i export.conf",
)
rc = fs_util.cephfs_nfs_mount(
client1, nfs_server, nfs_export_name, nfs_mounting_dir
)
client1.exec_command(sudo=True, cmd=f"touch {nfs_mounting_dir}/file")
return 0
except Exception as e:
log.error(e)
log.error(traceback.format_exc())
return 1
finally:
log.info("Cleaning up the system")
commands = [
f"rm -rf {nfs_mounting_dir}/*",
f"umount {nfs_mounting_dir}",
f"ceph nfs export delete {nfs_name} {nfs_export_name}",
]
for command in commands:
client1.exec_command(sudo=True, cmd=command)
client1.exec_command(
sudo=True, cmd=f"rm -rf {nfs_mounting_dir}/", check_ec=False
)
client1.exec_command(sudo=True, cmd="rm -rf export.conf", check_ec=False)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"Given a bad override, the get_parsed_config_args_context() should leave the configuration untouched"
with self.assertRaises(SystemExit) as caught:
with self._cli(["-o", "string_value=bla", "-o", "int_value=hello"]):
pass
self.assertNotEqual(caught.exception.code, 0)
self.assertEqual(self.config.root.int_value, 0)
self.assertEqual(self.config.root.string_value, "")<|fim_middle|>test_config_assign_wrong_path_restores_config<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.setup_clean_chain = True
self.num_nodes = 3
# discardfee is used to make change outputs less likely in the change_pos test
self.extra_args = [
[],
["-discardfee=1"],
["-avoidpartialspends", "-discardfee=1"]
]<|fim_middle|>set_test_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(lf, rf):
"""Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
l = set(lf(paths))
r = set(rf(paths))
return sorted(list(l|r))
return keep<|fim_middle|>union<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(user_id):
METHOD_NAME = User.query.filter_by(id=user_id).one()
permissions = Permission.query.all()
class UserForm(Form):
note = StringField("Check-in note (will be shown to check-in operator)")
add_note = SubmitField("Save Note")
change_permissions = SubmitField("Change")
for permission in permissions:
setattr(
UserForm,
"permission_" + permission.name,
BooleanField(
permission.name, default=METHOD_NAME.has_permission(permission.name, False)
),
)
form = UserForm()
if form.validate_on_submit():
if form.change_permissions.data:
for permission in permissions:
field = getattr(form, "permission_" + permission.name)
if METHOD_NAME.has_permission(permission.name, False) != field.data:
app.logger.info(
"user %s (%s) %s: %s -> %s",
METHOD_NAME.name,
METHOD_NAME.id,
permission.name,
METHOD_NAME.has_permission(permission.name, False),
field.data,
)
if field.data:
METHOD_NAME.grant_permission(permission.name)
else:
METHOD_NAME.revoke_permission(permission.name)
db.session.commit()
elif form.add_note.data:
METHOD_NAME.checkin_note = form.note.data
db.session.commit()
return redirect(url_for(".user", user_id=METHOD_NAME.id))
form.note.data = METHOD_NAME.checkin_note
return render_template(
"admin/users/user.html", METHOD_NAME=METHOD_NAME, form=form, permissions=permissions
)<|fim_middle|>user<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, queryset):
"""
Return most recent to least recent badge.
"""
return queryset.order_by('-created')<|fim_middle|>filter_queryset<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters<|fim_middle|>header_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(data):
"""Check that there is a valid schema and all FtM conform to it."""
schema = model.get(data.get("schema"))
if schema is None:
raise InvalidData(gettext("No schema on entity"))
# This isn't strictly required because the proxy will contain
# only those values that can be inserted for each property,
# making it valid -- all this does, therefore, is to raise an
# exception that notifies the user.
schema.validate(data)<|fim_middle|>validate_entity<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self):<|fim_middle|>set_select_path<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(_: dt.Date, value, name):
return bq.ScalarQueryParameter(
name, "DATE", pd.Timestamp(value).to_pydatetime().date()
)<|fim_middle|>bq_param_date<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
@with_meta
class Test:
class Meta:
foo = 'foo'
def __init__(self, foo):
self.foo = foo
t = Test(foo='bar')
assert t.foo == 'bar'<|fim_middle|>test_args_override_by_name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, msg: MessageFromBackend) -> None:
with self._response_lock:
super().METHOD_NAME(msg)<|fim_middle|>send_message<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
result = self.vm.qmp('x-debug-block-dirty-bitmap-sha256',
node='drive0', name='bitmap0')
return result['return']['sha256']<|fim_middle|>get_sha256<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self) -> AsyncIterator[Record]:
# Execute the query to get all records
await self.conn.execute(self.parent.config.METHOD_NAME)
# Grab records batch by batch until none are left
result = [True]
while result:
# Grab another batch
result = await self.conn.fetchmany()
if not result:
continue
# Convert row objects to Record objects
for row in result:
yield self.row_to_record(row)<|fim_middle|>records<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
is_success = False
error = '-'
try:
recipients_part_one = self.execution.snapshot.get('recipients_part_one', [])
recipients_part_two = self.execution.snapshot.get('recipients_part_two', [])
if not recipients_part_one and not recipients_part_two:
print(
'\n'
'\033[32m>>> 该备份任务未分配收件人\033[0m'
''
)
if recipients_part_one and recipients_part_two:
files = self.create_excel(section='front')
self.send_backup_mail(files, recipients_part_one)
files = self.create_excel(section='back')
self.send_backup_mail(files, recipients_part_two)
else:
recipients = recipients_part_one or recipients_part_two
files = self.create_excel()
self.send_backup_mail(files, recipients)
except Exception as e:
self.is_frozen = True
print('任务执行被异常中断')
print('下面打印发生异常的 Traceback 信息 : ')
print(e)
error = str(e)
else:
is_success = True
finally:
reason = error
self.step_perform_task_update(is_success, reason)
self.step_finished(is_success)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
support.run_unittest(StructSeqTest)<|fim_middle|>test_main<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self): # Called by wizardvideo.xml.<|fim_middle|>rate_selection_moved<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(dgt, qtindex):
if utool.VERBOSE:
print('pressed button')
butkw = dgt.get_index_butkw(qtindex)
callback = butkw['clicked']
callback()<|fim_middle|>on_button_click<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Return a basic dictionary with information about the channel.
"""
simple_dict = super().METHOD_NAME()
epoch = datetime.utcfromtimestamp(0)
simple_dict.update(
{
"infohash": hexlify(self.infohash),
"size": self.size,
"num_seeders": self.health.seeders,
"num_leechers": self.health.leechers,
"last_tracker_check": self.health.last_check,
"created": int((self.torrent_date - epoch).total_seconds()),
"tag_processor_version": self.tag_processor_version,
}
)
return simple_dict<|fim_middle|>to_simple_dict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, asset):
"""
Get hierarchy from Asset entity
:param asset: Asset entity
:type asset: dict
:returns: hierarchy string
:rtype: str
"""
return asset['data']['hierarchy']<|fim_middle|>get_hierarchy<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(private_key: str, *reports: dict):
"""Decrypt all issue tracker credentials in the reports."""
for report in reports:
for secret_attribute in ("password", "private_token"):
if secret_attribute in report.get("issue_tracker", {}).get("parameters", {}):
credential = decrypt_credential(private_key, report["issue_tracker"]["parameters"][secret_attribute])
report["issue_tracker"]["parameters"][secret_attribute] = credential<|fim_middle|>decrypt_issue_tracker_credentials<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(argv):
test_suite_name = argv[2]
Monoids = argv[3].split(";")
Binops = argv[4].split(";")
Semirings = argv[5]
DataTypes = argv[6].split(";")
# Hard-coding data shapes for now
Kernels= argv[7]
return argv[1], test_suite_name, Monoids, Binops, Semirings, DataTypes, DataShapes, Kernels<|fim_middle|>load_types<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Version 2.0.1 uses make.sys,
# other verions use make.inc
if self.spec.satisfies("@2.0.1"):
filename = "make.sys"
else:
filename = "make.inc"
abspath = join_path(self.stage.source_path, filename)
return abspath<|fim_middle|>makefile_name<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self):<|fim_middle|>store_state<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
boto3mod.cache_id(
self.service, self.resource_name, resource_id=self.resource_id
)
self.assertEqual(
boto3mod.cache_id(self.service, self.resource_name), self.resource_id
)<|fim_middle|>test_set_and_get_with_no_auth<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
data = json.loads(request.body)
action_id = data["action"]
resources = data.get("resources", [])
subject = Subject("user", request.user.username)
action = Action(action_id)
resource = [Resource(r["system"], r["type"], str(r["id"]), r["attributes"]) for r in resources]
iam = get_iam_client()
try:
METHOD_NAME = iam.is_allowed(Request(conf.SYSTEM_ID, subject, action, resource, None))
except (AuthInvalidRequest, AuthAPIError) as e:
return standard_response(False, str(e))
return standard_response(True, "success", {"is_allow": METHOD_NAME})<|fim_middle|>is_allow<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(meta_data: MetaDataSet) -> None:
image_coordinates = {}
for image, lat, lon in meta_data.images_with_gps():
image_coordinates[image] = [lon, lat]
features = []
images, positions, labels, centers = meta_data.load_clusters()
for image, label in zip(images, labels):
features.append(
{
"type": "Feature",
"geometry": {"type": "Point", "coordinates": image_coordinates[image]},
"properties": {"name": image, "submodel": int(label)}, # cluster_idx
}
)
geojson = {"type": "FeatureCollection", "features": features}
meta_data.save_clusters_geojson(geojson)<|fim_middle|>save_clusters_geojson<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
sleep(get_cool_off().total_seconds())<|fim_middle|>cool_off<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(account_summary, relevant_tag):
result = dict(
[
(summary_item.currency, summary_item.value)
for summary_item in account_summary
if summary_item.tag == relevant_tag
]
)
return result<|fim_middle|>extract_currency_dict_for_tag_from_account<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(path, field_name, content_type):
# adapted from here: http://groups.google.com/group/django-users/browse_th\
# read/thread/834f988876ff3c45/
f = open(path, 'rb')
return InMemoryUploadedFile(
file=f,
field_name=field_name,
name=f.name,
content_type=content_type,
size=os.path.getsize(path),
charset=None
)<|fim_middle|>django_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return 'off' if self.data['is-wanted'] is False else self.data['priority']<|fim_middle|>get_raw_value<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)<|fim_middle|>unbound_on_quit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Check misc parameters of news models."""
self.assertEqual(str(self.entry1), self.entry1.title)<|fim_middle|>test_news_other<|file_separator|> |