text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self, path, args={}, headers={}):
# http://www.akleg.gov/apptester.html
# http://www.akleg.gov/basis/BasisPublicServiceAPI.pdf
# http://www.legis.state.ak.us/publicservice/basis/meetings?minifyresult=false&session=31
# X-Alaska-Legislature-Basis-Version:1.2
# X-Alaska-Legislature-Basis-Query:meetings;details
headers["X-Alaska-Legislature-Basis-Version"] = "1.2"
url = "{}{}".format(self.API_BASE, path)
page = self.get(url, params=args, headers=headers, verify=False)
page = lxml.etree.fromstring(page.content)
return page<|fim_middle|>api_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
import tvm
from topi.util import get_const_tuple
# TVM does not support batched matmuls, so we fake it
I = tvm.placeholder((self.bs * self.i, self.k), name='I')
W = tvm.placeholder((self.j, self.k), name='W')
B = tvm.placeholder((self.j,), name='B')
i_shape = get_const_tuple(I.shape)
w_shape = get_const_tuple(W.shape)
b_shape = get_const_tuple(B.shape)
dtype = I.dtype
i_np = np.random.uniform(size=i_shape).astype(dtype)
w_np = np.random.uniform(size=w_shape).astype(dtype)
b_np = np.random.uniform(size=b_shape).astype(dtype)
return I, W, B, i_np, w_np, b_np<|fim_middle|>create_dataset_tvm<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.client.force_login(self.proofreader)
response = self.client.get(
reverse(
'do_proofing',
kwargs={
'proofing_task_id': self.archived_proofing_task.pk
}
)
)
self.assertTrue(
response.status_code,
200,
)<|fim_middle|>test_active_article_task_200s<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""
Test getting a list of minions
"""
ret = {
"id": "test-id-1",
"jid": "1",
}
etcd_return.returner(ret)
ret = {
"id": "test-id-2",
"jid": "2",
}
etcd_return.returner(ret)
retval = etcd_return.get_minions()
assert len(retval) == 2
assert "test-id-1" in retval
assert "test-id-2" in retval<|fim_middle|>test_get_minions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, host):
# self.payload = {'image': '', 'shape': []}
new_instance = cls(host)
return new_instance<|fim_middle|>init_gui<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
:return:
"""
pipeline = self.transforms.get_equivalent_preprocessing()
params = dict(
conf=0.05,
image_processor={Processings.ComposeProcessing: {"processings": pipeline}},
edge_links=self.edge_links,
edge_colors=self.edge_colors,
keypoint_colors=self.keypoint_colors,
)
return params<|fim_middle|>get_dataset_preprocessing_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
d1 = {"a": 0, "b": 1}
assert FrozenDict(d1).items() == d1.items()<|fim_middle|>test_items<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, __flags: int = 0): ...<|fim_middle|>dooneevent<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(keyword_cmds_re, builtin_cmds_re, context=""):
return [
(keyword_cmds_re, Keyword, 'params' + context),
(builtin_cmds_re, Name.Builtin, 'params' + context),
(r'([\w.-]+)', Name.Variable, 'params' + context),
(r'#', Comment, 'comment'),
]<|fim_middle|>gen_command_rules<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(name, data, path, font, is_module):
"""
Create screenshot of py3status output and save to path
"""
desktop_color = get_color_for_name(name)
# if this screenshot is for a module then add modules name etc
if is_module:
data.append({"full_text": name.split("-")[0], "color": desktop_color, "separator": True})
data.append({"full_text": "py3status", "color": COLOR_PY3STATUS, "separator": True})
img = Image.new("RGB", (WIDTH, HEIGHT), COLOR_BG)
d = ImageDraw.Draw(img)
# top bar
d.rectangle((0, 0, WIDTH, TOP_BAR_HEIGHT), fill=desktop_color)
x = X_OFFSET
# add text and separators
for part in reversed(data):
text = part.get("full_text")
color = part.get("color", COLOR)
background = part.get("background")
separator = part.get("separator")
urgent = part.get("urgent")
# urgent background
if urgent:
color = COLOR_URGENT
background = COLOR_URGENT_BG
size = font.getbbox(text)[-2:]
if background:
d.rectangle(
(
WIDTH - x - (size[0] // SCALE),
TOP_BAR_HEIGHT + PADDING,
WIDTH - x - 1,
HEIGHT - PADDING,
),
fill=background,
)
x += size[0] // SCALE
txt = Image.new("RGB", size, background or COLOR_BG)
d_text = ImageDraw.Draw(txt)
d_text.text((0, 0), text, font=font, fill=color)
# resize to actual size wanted and add to image
txt = txt.resize((size[0] // SCALE, size[1] // SCALE), Image.LANCZOS)
img.paste(txt, (WIDTH - x, TOP_BAR_HEIGHT + PADDING))
if separator:
x += SEP_PADDING_RIGHT
d.line(
(
(WIDTH - x, TOP_BAR_HEIGHT + PADDING),
(WIDTH - x, TOP_BAR_HEIGHT + 1 + PADDING + FONT_SIZE),
),
fill=COLOR_SEP,
width=1,
)
x += SEP_PADDING_LEFT
img.save(path / f"{name}.png")
print(f" {name}.png")<|fim_middle|>create_screenshot<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(checked: Iterable):
return bool(set(checked) & frozenset(i))<|fim_middle|>at_least_one_in<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, input_shape):
return input_shape<|fim_middle|>compute_output_shape<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if not any(output.is_linked for output in self.outputs):
return
vertices_s = self.inputs['Vertices'].sv_get()
edges_s = self.inputs['Edges'].sv_get(default=[[]])
faces_s = self.inputs['Faces'].sv_get()
init_masks_s = self.inputs['InitMask'].sv_get() # mandatory input
obstacle_masks_s = self.inputs['ObstacleMask'].sv_get(default = [[]])
wave_front_out = []
wave_distances_out = []
start_idxs_out = []
meshes = zip_long_repeat(vertices_s, edges_s, faces_s, init_masks_s, obstacle_masks_s)
for vertices, edges, faces, init_mask, obstacle_masks in meshes:
bm = bmesh_from_pydata(vertices, edges, faces)
if obstacle_masks:
if self.mode == 'face':
fullList(obstacle_masks, len(faces))
bm.faces.layers.int.new("wave_obstacle")
bm.faces.ensure_lookup_table()
fill_faces_layer(bm, obstacle_masks, "wave_obstacle", int, 1)
else: #verts
fullList(obstacle_masks, len(vertices))
bm.verts.layers.int.new("wave_obstacle")
bm.verts.ensure_lookup_table()
fill_verts_layer(bm, obstacle_masks, "wave_obstacle", int, 1)
if self.mode == 'face':
by_vert = self.face_mode == 'vertex'
new_wave_front = wave_markup_faces(bm, init_mask, neighbour_by_vert=by_vert, find_shortest_path=True)
distance = bm.faces.layers.float.get("wave_path_distance")
start = bm.faces.layers.int.get("wave_start_index")
new_distances = [face[distance] for face in bm.faces]
new_starts = [face[start] for face in bm.faces]
else: # verts
by_edge = self.vert_mode == 'edge'
new_wave_front = wave_markup_verts(bm, init_mask, neighbour_by_edge=by_edge, find_shortest_path=True)
distance = bm.verts.layers.float.get("wave_path_distance")
start = bm.verts.layers.int.get("wave_start_index")
new_distances = [vert[distance] for vert in bm.verts]
new_starts = [vert[start] for vert in bm.verts]
bm.free()
wave_front_out.append(new_wave_front)
wave_distances_out.append(new_distances)
start_idxs_out.append(new_starts)
self.outputs['WaveFront'].sv_set(wave_front_out)
self.outputs['WaveDistance'].sv_set(wave_distances_out)
self.outputs['StartIdx'].sv_set(start_idxs_out)<|fim_middle|>process<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cause_error: bool):
keyboard = create_keyboard(cause_error)
sent_string = "ABC"
if cause_error:
# Verify the test setup and swallow the exception created by the Mock
assert_that(
calling(keyboard.send_keys).with_args(sent_string, send_mode=keyboard.SendMode.KEYBOARD),
raises(Exception))
else:
keyboard.send_keys(sent_string, send_mode=keyboard.SendMode.KEYBOARD)
mock_mediator: MagicMock = keyboard.mediator
mock_mediator.send_string.assert_called_once_with(sent_string)
mock_mediator.paste_string.assert_not_called()
mock_mediator.interface.finish_send.assert_called_once()<|fim_middle|>test_send_keys_send_mode_keyboard<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
return self._append_app_name_and_version(cast(str, _android_folder()), "files")<|fim_middle|>user_data_dir<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(testapp, functional_characterization_experiment_item):
testapp.post_json('/functional_characterization_experiment', functional_characterization_experiment_item, status=201)<|fim_middle|>test_valid_functional_characterization_experiment<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(geom_input: AnyStr) -> OGRGeometry: ...<|fim_middle|>from_json<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(f0, dur, ratio, frm_tol=5):
if f0 is None:
seg_f0 = torch.zeros_like(dur, dtype=torch.float)
else:
seg_f0 = align_f0_to_durations(f0, dur, ratio, tol=frm_tol * ratio)
return seg_f0.numpy() # try a hacky stuff<|fim_middle|>align_f0<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
settings: Settings, kafka: str, kafka_topics: Tuple[str, str]
) -> Settings:
input_topic, output_topic = kafka_topics
settings.kafka_enabled = True
settings.kafka_servers = kafka
settings.kafka_topic_input = input_topic
settings.kafka_topic_output = output_topic
return settings<|fim_middle|>kafka_settings<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(array):
host,port = array.split(':')
# convert host from mangled-per-four-bytes form as used by kernel
host = unhexlify(host)
host_out = ''
for x in range(0, len(host)//4):
(val,) = struct.unpack('=I', host[x*4:(x+1)*4])
host_out += '%08x' % val
return host_out,int(port,16)<|fim_middle|>convert_ip_port<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*args, **kwargs):
with self._mutex:
# Dereference args corresponding to params, leaving others unchanged.
args_with_dereferenced_params = [
self._dereference_params(arg) for arg in args
]
kwargs_with_dereferenced_params = {
key: self._dereference_params(value)
for key, value in kwargs.items()
}
self._call_cnt += 1
# Maybe update params, depending on client configuration.
if self._variable_client is not None:
self._variable_client.update()
return handler(*args_with_dereferenced_params,
**kwargs_with_dereferenced_params)<|fim_middle|>dereference_params_and_call_handler<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, node: ASTExpressionNode) -> str:
"""
Prints a bit operator in NEST syntax.
:param op: a bit operator object
:return: a string representation
"""
op = node.get_binary_operator()
lhs = self.print(node.get_lhs())
rhs = self.print(node.get_rhs())
if op.is_bit_shift_left:
return lhs + " << " + rhs
if op.is_bit_shift_right:
return lhs + " >> " + rhs
if op.is_bit_and:
return lhs + " & " + rhs
if op.is_bit_or:
return lhs + " | " + rhs
if op.is_bit_xor:
return lhs + " ^ " + rhs
raise RuntimeError("Cannot determine bit operator!")<|fim_middle|>print_bit_operator<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(clr, grad, group_grad_clip):
if group_grad_clip > 0:
norm = grad.norm(2).item()
if norm > group_grad_clip:
clr *= group_grad_clip / (norm + 1e-10)
return clr<|fim_middle|>clip_grad<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
r"""
Check that the link to a random curve works.
"""
homepage = self.tc.get("/EllipticCurve/Q/").get_data(as_text=True)
self.check(homepage, "/EllipticCurve/Q/random",
'Minimal Weierstrass equation')<|fim_middle|>test_random<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(doc):
c = doc['location']['coords']
# Is c a list and has 2 element?
if type(c) is list:
return (len(c) == 2)
else:
return False<|fim_middle|>old_coordinates_format<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Return the hook type of the decorator.
:rtype: HookType
"""
raise NotImplementedError('No hook_type defined for class.')<|fim_middle|>hook_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(metadata):
if not metadata:
return
uri = metadata.get('uri', '')
text = metadata.get('title', '')
artists = metadata.get('artists', [])
if artists:
text += ' - '
text += fmt_artists_names(artists)
if uri:
if text:
print(f'Playing: {uri} # {text}')
else:
print(f'Playing: {uri}')
else:
print(f'Playing: {text}')<|fim_middle|>on_metadata_changed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
raise NotImplementedError()<|fim_middle|>uses_complex<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> rclpy.node.Client:
return self.create_client(WaypointPush, (self._plugin_ns, "push"))<|fim_middle|>cli_push<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
zone_key: str = "JP-KY",
session: Optional[Session] = None,
target_datetime: Optional[datetime] = None,
logger: Logger = getLogger(__name__),<|fim_middle|>fetch_production<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
assert self.auth.perform(self.achalls) == \
[achall.response(achall.account_key) for achall in self.achalls]
assert self.mock_get_display().notification.call_count == len(self.achalls)
for i, (args, kwargs) in enumerate(self.mock_get_display().notification.call_args_list):
achall = self.achalls[i]
assert achall.validation(achall.account_key) in args[0]
assert kwargs['wrap'] is False<|fim_middle|>test_manual_perform<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(hass, config, async_add_entities, discovery_info=None):
hass.data.setdefault(DATA_KEY, {})
hass.data[DOMAIN]['add_entities'][ENTITY_DOMAIN] = async_add_entities
config['hass'] = hass
model = str(config.get(CONF_MODEL) or '')
spec = hass.data[DOMAIN]['miot_specs'].get(model)
entities = []
if isinstance(spec, MiotSpec):
for srv in spec.get_services('ir_aircondition_control'):
if not srv.actions:
continue
entities.append(MiotActionsEntity(config, srv))
for entity in entities:
hass.data[DOMAIN]['entities'][entity.unique_id] = entity
async_add_entities(entities, update_before_add=True)
bind_services_to_entries(hass, SERVICE_TO_METHOD)<|fim_middle|>async_setup_platform<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(make_stubber, monkeypatch, error_code):
s3_resource = boto3.resource('s3')
s3_stubber = make_stubber(s3_resource.meta.client)
user_key = MagicMock(id='test-access-key-id', secret='test-secret')
def get_s3(client, aws_access_key_id, aws_secret_access_key):
assert aws_access_key_id == user_key.id
assert aws_secret_access_key == user_key.secret
return s3_resource
monkeypatch.setattr(boto3, 'resource', get_s3)
s3_stubber.stub_list_buckets([], error_code)
if error_code is None:
with pytest.raises(RuntimeError):
assume_role.show_access_denied_without_role(user_key)
elif error_code == 'AccessDenied':
assume_role.show_access_denied_without_role(user_key)
elif error_code == 'TestException':
with pytest.raises(ClientError):
assume_role.show_access_denied_without_role(user_key)<|fim_middle|>test_show_access_denied_without_role<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(url: str, retry_on_404: bool = True, num_retries: int = 10, timeout: int | None = None) -> str: ...<|fim_middle|>retry_url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(week_range: Tuple[int, int]) -> int:
start = time_value_to_week(week_range[0])
end = time_value_to_week(week_range[1])
acc = end.week - start.week
# accumulate the number of weeks in the years between
for y in range(start.year, end.year):
year = Year(y)
acc += year.totalweeks()
return acc + 1 # same week should lead to 1 week that will be queried<|fim_middle|>weeks_in_range<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(model: onnx.ModelProto, nncf_graph: NNCFGraph) -> onnx.ModelProto:
return METHOD_NAME(model, nncf_graph)<|fim_middle|>remove_fq_from_inputs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, a0: QtGui.QResizeEvent) -> None: ...<|fim_middle|>resize_event<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(exercise: Dict[str, Any]) -> datetime.datetime:
deadlines = _get_exercise_common_deadlines(exercise)
personal_deadline = exercise['personal_deadline']
if personal_deadline is not None:
deadlines.append(personal_deadline)
return max(deadlines)<|fim_middle|>get_exercise_deadline<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
with self.assertRaisesRegex(
Exception,
(
"manifest file test section 'manifest' contains "
"unknown field 'invalid.field'"
),
):
ManifestParser(
"test",
"""<|fim_middle|>test_unknown_field<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# End-to-end test to verify the load is in fact lazy.
importer = TestingImporter()
assert importer.loaded is None
with test_util.uncache(importer.module_name):
with test_util.import_state(meta_path=[importer]):
module = importlib.import_module(importer.module_name)
self.assertIsNone(importer.loaded)
# Trigger load.
self.assertEqual(module.__loader__, importer)
self.assertIsNotNone(importer.loaded)
self.assertEqual(module, importer.loaded)<|fim_middle|>test_e2e<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
name: str, relative_to: str | Path | None = SOURCE_DIR
) -> Path:
path = _get_root_example_path_by_name(name, relative_to)
if path.is_dir():
return path / "main.py"
else:
return path.with_suffix(".py")<|fim_middle|>get_main_example_file_by_name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, data):
"""
Callback that processes the input data and publishes to the corresponding topics.
:param data: input message
:type data: sensor_msgs.msg.Image
"""
# Convert sensor_msgs.msg.Image into OpenDR Image
image = self.bridge.from_ros_image(data, encoding='bgr8')
# Run object detection
boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False)
# Publish detections in ROS message
ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes
if self.object_publisher is not None:
self.object_publisher.publish(ros_boxes)
if self.image_publisher is not None:
# Get an OpenCV image back
image = image.opencv()
# Annotate image with object detection boxes
image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes)
# Convert the annotated OpenDR image to ROS2 image message using bridge and publish it
self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8'))<|fim_middle|>callback<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cursor, default_owner_id):
current_resources = Redis.hgetall("resources")
cursor.execute(""" SELECT
ProbeSetFreeze.Id, ProbeSetFreeze.Name, ProbeSetFreeze.confidentiality, ProbeSetFreeze.public
FROM
ProbeSetFreeze""")
resource_results = cursor.fetchall()
for i, resource in enumerate(resource_results):
resource_ob = {}
resource_ob['name'] = resource[1]
resource_ob['owner_id'] = default_owner_id
resource_ob['data'] = {"dataset": str(resource[0])}
resource_ob['type'] = "dataset-probeset"
if resource[2] < 1 and resource[3] > 0:
resource_ob['default_mask'] = {"data": "view",
"metadata": "view",
"admin": "not-admin"}
else:
resource_ob['default_mask'] = {"data": "no-access",
"metadata": "no-access",
"admin": "not-admin"}
resource_ob['group_masks'] = {}
add_resource(resource_ob, update=False)<|fim_middle|>insert_probeset_resources<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
check: BaseCheck, entity_configuration: dict[str, Any], runner_filter: RunnerFilter, report_type: str
) -> bool:
check_id_allowlist = runner_filter.checks
check_id_denylist = runner_filter.skip_checks
if check_id_allowlist or runner_filter.check_threshold:
# Allow list provides namespace-only allows, check-only allows, or both
# If namespaces not specified, all namespaces are scanned
# If checks not specified, all checks are scanned
if any("_" in check_id for check_id in check_id_allowlist) or runner_filter.check_threshold:
# a Kubernetes namespace can't have an '_' in its name,
# therefore we assume it is a built-in or custom check
if not runner_filter.should_run_check(check=check, report_type=report_type):
return False
allowed_namespaces = [check_id for check_id in check_id_allowlist if "_" not in check_id]
if allowed_namespaces:
# Check if namespace in allowed namespaces
if "metadata" in entity_configuration and "namespace" in entity_configuration["metadata"]:
if entity_configuration["metadata"]["namespace"] in allowed_namespaces:
return True
elif "parent_metadata" in entity_configuration and "namespace" in entity_configuration["parent_metadata"]:
if entity_configuration["parent_metadata"]["namespace"] in allowed_namespaces:
return True
else:
if "default" in allowed_namespaces:
return True
else:
# No namespaces to filter
return True
elif check_id_denylist or runner_filter.skip_check_threshold or runner_filter.use_enforcement_rules:
namespace_skip = False
if "metadata" in entity_configuration and "namespace" in entity_configuration["metadata"]:
if entity_configuration["metadata"]["namespace"] in check_id_denylist:
namespace_skip = True
elif "parent_metadata" in entity_configuration and "namespace" in entity_configuration["parent_metadata"]:
if entity_configuration["parent_metadata"]["namespace"] in check_id_denylist:
namespace_skip = True
else:
if "default" in check_id_denylist:
namespace_skip = True
if runner_filter.should_run_check(check=check, report_type=report_type) and not namespace_skip:
return True
else:
return True
return False<|fim_middle|>should_run_scan<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(message: WildValue) -> str:
"""
Construct the response to a canary or canarytoken request.
"""
title = canary_kind(message).title()
name = canary_name(message)
METHOD_NAME = f"**:alert: {title} *{name}* has been triggered!**\n\n{message['Intro'].tame(check_string)}\n\n"
if "IncidentHash" in message:
METHOD_NAME += f"**Incident ID:** `{message['IncidentHash'].tame(check_string)}`\n"
if "Token" in message:
METHOD_NAME += f"**Token:** `{message['Token'].tame(check_string)}`\n"
if "Description" in message:
METHOD_NAME += f"**Kind:** {message['Description'].tame(check_string)}\n"
if "Timestamp" in message:
METHOD_NAME += f"**Timestamp:** {message['Timestamp'].tame(check_string)}\n"
if "CanaryIP" in message:
METHOD_NAME += f"**Canary IP:** `{message['CanaryIP'].tame(check_string)}`\n"
if "CanaryLocation" in message:
METHOD_NAME += f"**Canary location:** {message['CanaryLocation'].tame(check_string)}\n"
if "Triggered" in message:
unit = "times" if message["Triggered"].tame(check_int) > 1 else "time"
METHOD_NAME += f"**Triggered:** {message['Triggered'].tame(check_int)} {unit}\n"
source_ip, reverse_dns = source_ip_and_reverse_dns(message)
if source_ip:
METHOD_NAME += f"**Source IP:** `{source_ip}`\n"
if reverse_dns:
METHOD_NAME += f"**Reverse DNS:** `{reverse_dns}`\n"
if "AdditionalDetails" in message:
for detail in message["AdditionalDetails"]:
key = detail[0].tame(check_string)
value = detail[1].tame(check_union([check_string, check_int]))
if isinstance(value, str) and "*" in value:
# Thinkst sends passwords as a series of stars which can mess with
# formatting, so wrap these in backticks.
METHOD_NAME += f"**{key}:** `{value}`\n"
else:
METHOD_NAME += f"**{key}:** {value}\n"
return METHOD_NAME<|fim_middle|>body<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(message: str, choices: Union[List[str], List[Tuple[str, str]]],
default: Optional[int] = None, cli_flag: Optional[str] = None,
force_interactive: bool = False) -> Tuple[str, int]:
"""Display a menu.
.. todo:: This doesn't enable the help label/button (I wasn't sold on
any interface I came up with for this). It would be a nice feature.
:param str message: title of menu
:param choices: Menu lines, len must be > 0
:type choices: list of tuples (tag, item) or
list of descriptions (tags will be enumerated)
:param default: default value to return, if interaction is not possible
:param str cli_flag: option used to set this value with the CLI
:param bool force_interactive: True if it's safe to prompt the user
because it won't cause any workflow regressions
:returns: tuple of (`code`, `index`) where
`code` - str display exit code
`index` - int index of the user's selection
:rtype: tuple
"""
return obj.get_display().METHOD_NAME(message, choices, default=default, cli_flag=cli_flag,
force_interactive=force_interactive)<|fim_middle|>menu<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# given
img_data = BytesIO()
image = Image.new("RGB", size=(1, 1))
image.save(img_data, format="JPEG")
field = "image"
# when
img = SimpleUploadedFile("product.jpg", img_data.getvalue(), "image/jpeg")
# then
clean_image_file({field: img}, field, ProductErrorCode)<|fim_middle|>test_clean_image_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(bytesequence):
img = []
for i in range(len(bytesequence) // 4):
offset = i * 4
byte1 = bytesequence[offset + 0]
byte2 = bytesequence[offset + 1]
byte3 = bytesequence[offset + 2]
byte4 = bytesequence[offset + 3]
value = byte1 * 0x100 + byte2
r = (value & 0x7C00) >> 10
g = (value & 0x03e0) >> 5
b = (value & 0x001f) >> 0
img.append(r)
img.append(g)
img.append(b)
value = byte3 * 0x100 + byte4
r = (value & 0x7C00) >> 11
g = (value & 0x03e0) >> 5
b = (value & 0x001f) >> 0
img.append(r)
img.append(g)
img.append(b)
return img<|fim_middle|>rgb555_to_rgb<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(evs):
ts = (evs[:, 0] * 1e6).astype("uint64")
ad = (evs[:, 1:]).astype("uint64")
return ts, ad<|fim_middle|>cast_evs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
:avocado: tags=arch:mips64el
:avocado: tags=machine:malta
:avocado: tags=cpu:I6400
:avocado: tags=mips:smp
"""
self.do_test_i6400_framebuffer_logo(7)<|fim_middle|>test_mips_malta_i6400_framebuffer_logo_7cores<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
body, headers = _replicate_requests_body_args(json="invalid_but_ignored")
assert body == "invalid_but_ignored"
assert headers == {"Content-Type": "application/json"}<|fim_middle|>test_json_as_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, item):
"""Parse any additional notes on the timing of the meeting"""
return "See agenda to confirm exact times"<|fim_middle|>parse_time_notes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if os.path.exists(self.jobdir):
shutil.rmtree(self.jobdir)
self.status = "W"<|fim_middle|>clean<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, evaluation):
return (
"--custom graphics--: I should plot " + self.elements.__str__() + " items"
)<|fim_middle|>boxes_to_svg<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, x: np.ndarray) -> np.ndarray:
return np.tanh(x)<|fim_middle|>f<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> bool:
"""
Check if the document lists known types of activation that can be done
through the web. If this returns ``False``, it means that the endpoint
is of a highly unusual type, and you should directly inspect the
response's ``data`` attribute to see what is required. Sending users to
the web page for activation is also a fairly safe action to take.
Note that ``ActivationRequirementsResponse.supports_auto_activation``
directly implies
``ActivationRequirementsResponse.supports_web_activation``, so these
are *not* exclusive.
For example,
>>> tc = TransferClient(...)
>>> reqs_doc = tc.endpoint_get_activation_requirements(...)
>>> if not reqs_doc.supports_web_activation:
>>> # use `from __future__ import print_function` in py2
>>> print("Highly unusual endpoint. " +
>>> "Cannot webactivate. Raw doc: " +
>>> str(reqs_doc), file=sys.stderr)
>>> print("Sending user to web anyway, just in case.",
>>> file=sys.stderr)
>>> ...
:rtype: ``bool``
"""
return (
self.supports_auto_activation
or self["oauth_server"] is not None
or any(
x for x in self["DATA"] if x["type"] in ("myproxy", "delegate_myproxy")
)
)<|fim_middle|>supports_web_activation<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
super().METHOD_NAME()
# Test organizations
self.organization1 = self.organizations['from']
self.organization2 = self.organizations['to']
# Create some Credit Trades with different effective dates
self.trade1 = CreditTrade.objects.create(
status=self.statuses['approved'],
type=self.credit_trade_types['sell'],
initiator=self.organization1,
respondent=self.organization2,
is_rescinded=False,
trade_effective_date=timezone.now() - datetime.timedelta(days=1), # A past date
number_of_credits=10,
)
self.trade2 = CreditTrade.objects.create(
status=self.statuses['submitted'],
type=self.credit_trade_types['sell'],
initiator=self.organization1,
respondent=self.organization2,
is_rescinded=False,
trade_effective_date=timezone.now() + datetime.timedelta(days=1), # A future date
number_of_credits=20,
)
self.trade3 = CreditTrade.objects.create(
status=self.statuses['recommended'],
type=self.credit_trade_types['sell'],
initiator=self.organization1,
respondent=self.organization2,
is_rescinded=False,
trade_effective_date=timezone.now() - datetime.timedelta(days=1), # A past date
number_of_credits=30,
)<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")<|fim_middle|>name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(client, match):
resp = client.post(
"/predictions",
json={
"input": {
"text": "baz",
"num1": 5,
"path": "data:text/plain;base64,"
+ base64.b64encode(b"wibble").decode("utf-8"),
}
},
)
assert resp.status_code == 200
assert resp.json() == match({"output": "baz 50 wibble", "status": "succeeded"})<|fim_middle|>test_multiple_arguments<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(crv3_to_add=1e6*1e18):
crv3Liquidity = crv3_to_add / threepool_swap.get_virtual_price() * 1e18
ousd_to_add = crv3_to_add * 2
# add liquidity
lp_before = ousd_metapool.balanceOf(me)
ousd_metapool.add_liquidity([ousd_to_add, crv3Liquidity], 0, me, OPTS)
lp_after = ousd_metapool.balanceOf(me)
return [crv3Liquidity, ousd_to_add, lp_after-lp_before]<|fim_middle|>add_twice_the_ousd<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
all_embedding_angles = [[0], [0, 10, 20], [0, 40, 60]]
all_labels = [
torch.LongTensor([0]),
torch.LongTensor([0, 0, 0]),
torch.LongTensor([1, 2, 3]),
]
temperature = 0.1
for loss_class in [NTXentLoss, SupConLoss]:
loss_funcA = loss_class(temperature)
loss_funcB = loss_class(temperature, distance=LpDistance())
for loss_func in [loss_funcA, loss_funcB]:
for dtype in TEST_DTYPES:
for embedding_angles, labels in zip(
all_embedding_angles, all_labels
):
embeddings = torch.tensor(
[angle_to_coord(a) for a in embedding_angles],
requires_grad=True,
dtype=dtype,
).to(
TEST_DEVICE
) # 2D embeddings
loss = loss_func(embeddings, labels)
loss.backward()
self.assertEqual(loss, 0)<|fim_middle|>test_with_no_valid_pairs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(left: Dict[str, Any], right: Dict[str, Any]) -> Dict[str, Any]:
result = left.copy()
for key, value in right.items():
if key in left and isinstance(left[key], dict) and isinstance(value, dict):
result[key] = METHOD_NAME(left[key], value)
else:
result[key] = value
return result<|fim_middle|>patch_internal<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
headers = make_headers(content_type="text/csv")
data = "1.0,2.0,5.0"
responses = []
for MODEL_NAME in MODEL_NAMES:
response = requests.post(
INVOCATION_URL.format(MODEL_NAME), data=data, headers=headers
).json()
responses.append(response)
error = responses[0]["error"]
assert "unsupported content type text/csv" in error
assert responses[1] == {"predictions": [2.5, 3.0, 4.5]}<|fim_middle|>test_csv_input<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, port_idx, ledMode):
with open(self.f_led.format("port{}".format(port_idx)), 'w') as led_file:
led_file.write(str(ledMode))
(port, ctlid) = (self.led_mapping[port_idx][0], self.led_mapping[port_idx][1])
data = struct.pack('=HHHBBH', 0, 7, 4, ctlid, ledMode, port)
self.udpClient.sendto(data, ('localhost', 8101))<|fim_middle|>port_led_mode_update<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(
self, wrong_sample_rate):<|fim_middle|>test_create_for_inference_fails_with_wrong<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(item, label: QLabel, dialog: GlobalHotkeyDialog, clear_button: QPushButton):
dialog.load(item)
if item.enabled:
key = item.hotKey
label.setText(item.get_hotkey_string(key, item.modifiers))
clear_button.setEnabled(True)
return True
else:
label.setText("(None configured)")
clear_button.setEnabled(False)
return False<|fim_middle|>load_hotkey<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
with pytest.raises(ValueError):
SqliteAccountInfo._get_user_account_info_path(file_name='foo', profile='bar')<|fim_middle|>test_profile_and_file_name_conflict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# From stored _Damping objects, instantiate the dissipation/damping
# inplace : https://stackoverflow.com/a/1208792
self._dampers[:] = [
(damper.id(), damper(self._systems[damper.id()]))
for damper in self._dampers
]
# Sort from lowest id to highest id for potentially better memory access
# _dampers contains list of tuples. First element of tuple is rod number and
# following elements are the type of damping.
# Thus using lambda we iterate over the list of tuples and use rod number (x[0])
# to sort dampers.
self._dampers.sort(key=lambda x: x[0])<|fim_middle|>finalize_dampers<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(filters):
data = []
order_details = {}
get_work_order_details(filters, order_details)
get_purchase_order_details(filters, order_details)
get_production_plan_item_details(filters, data, order_details)
return data<|fim_middle|>get_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, device_id: str, file_name: str, kwargs: Mapping[str, str]) -> None:
"""Request that the current selection is written to the output device.
The output device to write with will be selected based on the device_id.
A file format is chosen from the list of available file formats by the
output device.
:param device_id: The handle of the device to write to.
:param file_name: A suggestion for the file name to write
to. Can be freely ignored if providing a file name makes no sense.
:param kwargs: Keyword arguments:
limit_mimetypes: Limit the possible mimetypes to use for writing to these types.
"""
if not Selection.hasSelection():
return
limit_mimetypes = kwargs.get("limit_mimetypes", False)
preferred_mimetypes = kwargs.get("preferred_mimetypes", None)
# On Windows, calling requestWrite() on LocalFileOutputDevice crashes when called from a signal
# handler attached to a QML MenuItem. So instead, defer the call to the next run of the event
# loop, since that does work.
Application.getInstance().callLater(self._writeToDevice, Selection.getAllSelectedObjects(), device_id, file_name, limit_mimetypes, preferred_mimetypes = preferred_mimetypes)<|fim_middle|>request_write_selection_to_device<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return "MgmtErrorFormat"<|fim_middle|>error_format<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
expected = dns.rrset.from_text('ns.secure.example.', 0, dns.rdataclass.IN, 'A', '{prefix}.9'.format(prefix=self._PREFIX))
query = dns.message.make_query('ns.secure.example', 'A', want_dnssec=True)
query.flags |= dns.flags.AD
res = self.sendUDPQuery(query)
self.assertMessageIsAuthenticated(res)
self.assertRRsetInAnswer(res, expected)
self.assertMatchingRRSIGInAnswer(res, expected)<|fim_middle|>test_a<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
notification: BaseNotification,
recipient: RpcActor,
shared_context: Mapping[str, Any] | None = None,
extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None = None,
) -> Mapping[str, Any]:
# TODO: move context logic to single notification class method
extra_context = (
extra_context_by_actor[recipient] if extra_context_by_actor and recipient else {}
)
context = get_context(notification, recipient, shared_context or {}, extra_context)
return get_builder_args_from_context(notification, context)<|fim_middle|>get_builder_args<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional['outputs.ApplicationLogsConfigResponse']:
"""
Application logs configuration.
"""
return pulumi.get(self, "application_logs")<|fim_middle|>application_logs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Update grid lines based upon current grid spacing and grid size
# First reset existing grid lines
self.minorLines.reset()
self.majorLines.reset()
self.centerLines.reset()
# Now redraw lines
numLines = int(math.ceil(self.gridSize/self.gridSpacing))
scaledSize = numLines * self.gridSpacing
center = self.centerLines
minor = self.minorLines
major = self.majorLines
for i in range(-numLines,numLines + 1):
if i == 0:
center.moveTo(i * self.gridSpacing, -scaledSize, 0)
center.drawTo(i * self.gridSpacing, scaledSize, 0)
center.moveTo(-scaledSize, i * self.gridSpacing, 0)
center.drawTo(scaledSize, i * self.gridSpacing, 0)
else:
if (i % 5) == 0:
major.moveTo(i * self.gridSpacing, -scaledSize, 0)
major.drawTo(i * self.gridSpacing, scaledSize, 0)
major.moveTo(-scaledSize, i * self.gridSpacing, 0)
major.drawTo(scaledSize, i * self.gridSpacing, 0)
else:
minor.moveTo(i * self.gridSpacing, -scaledSize, 0)
minor.drawTo(i * self.gridSpacing, scaledSize, 0)
minor.moveTo(-scaledSize, i * self.gridSpacing, 0)
minor.drawTo(scaledSize, i * self.gridSpacing, 0)
center.create()
minor.create()
major.create()
self.gridBack.setScale(scaledSize)<|fim_middle|>update_grid<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, chat_id, default_bot, contact):
tasks = asyncio.gather(
default_bot.send_contact(chat_id, contact=contact),
default_bot.send_contact(chat_id, contact=contact, protect_content=False),
)
protected, unprotected = await tasks
assert protected.has_protected_content
assert not unprotected.has_protected_content<|fim_middle|>test_send_contact_default_protect_content<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\<|fim_middle|>test_building_missing_file_from_specific_target<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> "Config":
"""
Returns the global config, this function should be used with care!
There are only a few cases where this function should be called, please read the description at the top and
make sure you have to call this function.
:return: the global config as a utility.Config object
"""
if GlobalStorage._global_config is not None:
return GlobalStorage._global_config
raise RuntimeError("The global config was not initialized!")<|fim_middle|>get_global_config<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(iterable: tp.Iterable[tp.Any]) -> tp.Iterator[tp.Tuple[tp.Any, tp.Any]]:
"""Returns an iterator over sliding pairs of the input iterator
s -> (s0,s1), (s1,s2), (s2, s3), ...
Note
----
Nothing will be returned if length of iterator is strictly less
than 2.
""" # From itertools documentation
a, b = itertools.tee(iterable)
next(b, None)
return zip(a, b)<|fim_middle|>pairwise<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(name: str) -> Logger:
"""Create or get a logger by its name. This is essentially a wrapper of python's native logger.
Parameters
----------
name : str
The name of the logger.
Returns
-------
logger : Logger
The logger instance.
"""
return logging.getLogger(name)<|fim_middle|>get_logger<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
self._tmpdir = tempfile.TemporaryDirectory(dir=os.getcwd())
self._tmpfile = tempfile.NamedTemporaryFile(dir=self._tmpdir.name, delete=False)
self._tmp_js_file = tempfile.NamedTemporaryFile(
dir=self._tmpdir.name, suffix="script.js", delete=False
)
self._tmp_png_image_file = tempfile.NamedTemporaryFile(
dir=self._tmpdir.name, suffix="image.png", delete=False
)
self._tmp_webp_image_file = tempfile.NamedTemporaryFile(
dir=self._tmpdir.name, suffix="image.webp", delete=False
)
self._symlink_outside_directory = "symlink_outside"
self._symlink_inside_directory = "symlink_inside"
os.symlink(
"/", os.path.join(self._tmpdir.name, self._symlink_outside_directory)
)
os.symlink(
self._tmpfile.name,
os.path.join(self._tmpdir.name, self._symlink_inside_directory),
)
self._filename = os.path.basename(self._tmpfile.name)
self._js_filename = os.path.basename(self._tmp_js_file.name)
self._png_image_filename = os.path.basename(self._tmp_png_image_file.name)
self._webp_image_filename = os.path.basename(self._tmp_webp_image_file.name)
super().METHOD_NAME()<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""
This test case is for testing initialization of rigid cylinder and it checks the
validity of the members of Cylinder class.
Returns
-------
"""
# setting up test params
start = np.random.rand(3)
direction = 5 * np.random.rand(3)
direction_norm = np.linalg.norm(direction)
direction /= direction_norm
normal = np.array((direction[1], -direction[0], 0))
base_length = 10
base_radius = np.random.uniform(1, 10)
density = np.random.uniform(1, 10)
mass = density * np.pi * base_radius ** 2 * base_length
# Second moment of inertia
A0 = np.pi * base_radius * base_radius
I0_1 = A0 * A0 / (4.0 * np.pi)
I0_2 = I0_1
I0_3 = 2.0 * I0_2
I0 = np.array([I0_1, I0_2, I0_3])
# Mass second moment of inertia for disk cross-section
mass_second_moment_of_inertia = np.zeros((3, 3), np.float64)
np.fill_diagonal(mass_second_moment_of_inertia, I0 * density * base_length)
# Inverse mass second of inertia
inv_mass_second_moment_of_inertia = np.linalg.inv(mass_second_moment_of_inertia)
test_rod = Cylinder(start, direction, normal, base_length, base_radius, density)
# checking origin and length of rod
assert_allclose(
test_rod.position_collection[..., -1],
start + base_length / 2 * direction,
atol=Tolerance.atol(),
)
# element lengths are equal for all rod.
# checking velocities, omegas and rest strains
# density and mass
rod_length = np.linalg.norm(test_rod.length)
assert_allclose(rod_length, base_length, atol=Tolerance.atol())
assert_allclose(
test_rod.velocity_collection, np.zeros((3, 1)), atol=Tolerance.atol()
)
assert_allclose(test_rod.omega_collection, np.zeros((3, 1)), atol=Tolerance.atol())
assert_allclose(test_rod.density, density, atol=Tolerance.atol())
# Check mass at each node. Note that, node masses is
# half of element mass at the first and last node.
assert_allclose(test_rod.mass, mass, atol=Tolerance.atol())
# checking directors, rest length
# and shear, bend matrices and moment of inertia
assert_allclose(
test_rod.inv_mass_second_moment_of_inertia[..., -1],
inv_mass_second_moment_of_inertia,
atol=Tolerance.atol(),
)<|fim_middle|>test_cylinder_initialization<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Initialize options."""
self.repository = None
self.realm = None
self.show_response = 0<|fim_middle|>initialize_options<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-10-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-10-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.ServiceBus/operations")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)<|fim_middle|>build_list_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, ykman_cli):
output = ykman_cli(
"piv", "access", "change-management-key", "-m", DEFAULT_MANAGEMENT_KEY, "-g"
).output
assert re.match(
r"^Generated management key: [a-f0-9]{48}$", output, re.MULTILINE
)
output = ykman_cli("piv", "info").output
assert "Management key is stored on the YubiKey" not in output<|fim_middle|>test_change_management_key_no_protect_generate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# A few upper/lowercase tests
self.assertEqual(0b0, 0B0)
self.assertEqual(0b1, 0B1)
self.assertEqual(0b10101010101, 0B10101010101)
# Baseline tests
self.assertEqual(0b0, 0)
self.assertEqual(0b10000, 16)
self.assertEqual(0b1111111111111111111111111111111, 2147483647)
self.assertEqual(0b111111111111111111111111111111111111111111111111111111111111111, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0b0), 0)
self.assertEqual(-(0b10000), -16)
self.assertEqual(-(0b1111111111111111111111111111111), -2147483647)
self.assertEqual(-(0b111111111111111111111111111111111111111111111111111111111111111), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-0b0, 0)
self.assertEqual(-0b10000, -16)
self.assertEqual(-0b1111111111111111111111111111111, -2147483647)
self.assertEqual(-0b111111111111111111111111111111111111111111111111111111111111111, -9223372036854775807)<|fim_middle|>test_bin_baseline<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
cached = []
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
cached.append(filename)
linecache.getline(filename, 1)
# Are all files cached?
cached_empty = [fn for fn in cached if fn not in linecache.cache]
self.assertEqual(cached_empty, [])
# Can we clear the cache?
linecache.clearcache()
cached_empty = [fn for fn in cached if fn in linecache.cache]
self.assertEqual(cached_empty, [])<|fim_middle|>test_clearcache<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertEqual(str(self.organizations['open']), 'open')<|fim_middle|>test_str<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(dataframe):
"""Test if DataFrame has clean_method method"""
assert dataframe.__getattr__("clean_names")<|fim_middle|>test_clean_names_registration<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwargs):
self.run_and_statis(quant=False, max_examples=100)<|fim_middle|>test<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if self.error_list is not None and self.url in self.error_list:
http_error_msg = "%s Client Error: %s for url: %s" % (
400,
"Simulate error",
self.url,
)
raise HTTPError(http_error_msg, response=self)<|fim_middle|>raise_for_status<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
return '' if settings.STATIC_CDN else 'http://' + get_site_domain()<|fim_middle|>get_static_url_prefix<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(test_case):
os.environ["ONEFLOW_BOXING_DISABLE_MIDDLE_NODE_AND_CHECK"] = "1"
arg_dict = OrderedDict()
arg_dict["src_nd_sbp"] = gen_1d_sbp()
arg_dict["dst_nd_sbp"] = gen_1d_sbp()
for arg in GenArgList(arg_dict):
_test_nccl_logical_send_recv_1d(test_case, *arg)<|fim_middle|>test_nccl_logical_send_recv_1d<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(old_request, new_request):
transport = RequestsTransport()
pipeline = Pipeline(transport)
old_response = transport.send(old_request, stream=True)
old_string = b"".join(old_response.stream_download(pipeline=pipeline))
new_response = transport.send(new_request, stream=True)
new_string = b"".join(new_response.stream_download(pipeline))
assert old_string == new_string == b"Hello, world!"<|fim_middle|>test_response_stream_download<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, config):
if config is None:
return
if not isinstance(config, dict):
raise ValidationError('IOI-styled contest expects no config or dict as config')
for key, value in config.items():
if key not in cls.config_defaults:
raise ValidationError('unknown config key "%s"' % key)
if not isinstance(value, type(cls.config_defaults[key])):
raise ValidationError('invalid type for config key "%s"' % key)<|fim_middle|>validate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Fully qualified resource Id for the resource
"""
return pulumi.get(self, "id")<|fim_middle|>id<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self,this_op):<|fim_middle|>is_in_group<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
""" Shows all the existing primitives that can be rendered, press a key to go through them
1) 4 shapes in a vertical 2 unit x 2 unit grid - as cylinder and cone have r=1, they should touch
2) 27 cubes in 3x3x3 grid, with scaling changing (1/2, 1, 2) in the same dimension as the translation
3) As above but rotations by 45 degrees in around axis in the same dimension as the translation
"""
import os
os.environ["QT_ENABLE_HIGHDPI_SCALING"] = "1"
app = QtWidgets.QApplication([])
cube = Cube(edge_colors=uniform_coloring(1, 1, 1), colors=uniform_coloring(0.7, 0.2, 0))
cone = Cone(edge_colors=uniform_coloring(1, 1, 1), colors=uniform_coloring(0, 0.7, 0.2))
cylinder = Cylinder(edge_colors=uniform_coloring(1, 1, 1), colors=uniform_coloring(0, 0.2, 0.7))
icos = Icosahedron(edge_colors=uniform_coloring(1, 1, 1), colors=uniform_coloring(0.7, 0, 0.7))
# Translations
translate_test = \
SceneGraphNode(
Translation(0,0,1,
Translation(0,-1,0,cube),
Translation(0,1,0,cone)),
Translation(0,0,-1,
Translation(0,-1,0,cylinder),
Translation(0,1,0,icos)))
# Scaling
scaling_components = []
for i in range(3):
for j in range(3):
for k in range(3):
component = Translation(-2 * (i-1), -2 * (j-1), -2 * (k-1), Scaling(2 ** (i - 1), 2 ** (j - 1), 2 ** (k - 1), cube))
scaling_components.append(component)
scaling_test = Scaling(0.5, 0.5, 0.5, *scaling_components)
# Rotations
cone_sphere = Scaling(0.5, 0.5, 0.5,
cone,
Translation(0, 1, 1,
Scaling(0.5, 0.5, 0.5,
icos)))
scaling_components = []
for i in range(3):
for j in range(3):
for k in range(3):
component = Translation(-2 * (i-1), -2 * (j-1), -2 * (k-1),
Rotation(45*(i - 1), 1, 0, 0,
Rotation(45*(j-1), 0, 1, 0,
Rotation(45*(k-1), 0, 0, 1,
cone_sphere))))
scaling_components.append(component)
rotation_test = Scaling(0.5, 0.5, 0.5, *scaling_components)
#
# Thing to iterate through the different tests
#
item_list = [
translate_test,
scaling_test,
rotation_test
]
# Turn off all of them
for item in item_list:
item.solid_render_enabled = False
item.wireframe_render_enabled = False
# Thing for going through each of the draw types of the primatives
def item_states(item: SceneGraphNode):
item.solid_render_enabled = True
item.wireframe_render_enabled = True
yield None
item.solid_render_enabled = False
item.wireframe_render_enabled = False
def scan_states():
while True:
for item in item_list:
for _ in item_states(item):
yield None
state = scan_states()
next(state)
#
# Set up and show window
#
mainWindow = QtWidgets.QMainWindow()
viewer = Scene(parent=mainWindow)
# Keyboard callback
def enable_disable(key):
next(state)
viewer.update()
viewer.on_key = enable_disable
for item in item_list:
viewer.add(item)
mainWindow.setCentralWidget(viewer)
mainWindow.show()
mainWindow.resize(600, 600)
app.exec_()<|fim_middle|>transform_tests<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self,
uri: str,
data: str or dict,
query: Query = tuple(),
status: Status = 200,
content_type: str = JSON,
item: str = None,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
"""
Performs a PATCH.
See the parameters in :meth:`ereuse_utils.test.Client.open`.
"""
return super().METHOD_NAME(uri, item=item, data=data, status=status, content_type=content_type,
accept=accept, headers=headers, query=query, **kw)<|fim_middle|>patch<|file_separator|> |