text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self):
length = self.nvert + 1
# parameters for both BFS and DFS
self._processed = length * [False]
self._discovered = length * [False]
self._parent = length * [None]
# additional parameters for DFS
self._finished = False
self._entry_time = length * [0]
self._exit_time = length * [0]
self._time = 0
self.cycle = [] # cycle path if a cycle is found during DFS<|fim_middle|>initialize_search<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, message_id: str) -> models.DbMessage:
query = (
sqlmodel.select(models.DbMessage)
.options(sqlalchemy.orm.selectinload(models.DbMessage.reports))
.where(models.DbMessage.id == message_id, models.DbMessage.role == "assistant")
)
message = (await self.session.exec(query)).one()
return message<|fim_middle|>get_assistant_message_by_id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.global_selection.METHOD_NAME()<|fim_middle|>go_back_to_issues<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
args = parse_args()
with open("rockset/prodVersions.json") as f:
prod_versions = json.load(f)
client = RocksetClient(
api_key=ROCKSET_API_KEY,
host="https://api.usw2a1.rockset.com",
)
response = client.QueryLambdas.execute_query_lambda(
query_lambda="correlation_matrix",
version=prod_versions["metrics"]["correlation_matrix"],
workspace="metrics",
parameters=[
{
"name": "workflowNames",
"type": "string",
"value": "pull,trunk,periodic,windows-binary-libtorch-debug,windows-binary-libtorch-release",
},
],
)
pivot = defaultdict(dict)
# Results look like (is_green, head_sha, name)
# Turn results into a nested dict of head_sha => name => is_green
for result in response.results:
# skip pending jobs
if result["is_green"] is None:
continue
head_sha = result["head_sha"]
if head_sha not in pivot:
pivot[head_sha] = {}
name = result["name"]
name = name.split("/", 1)[1].strip()
if name not in pivot[head_sha]:
pivot[head_sha][name] = 1
pivot[head_sha][name] *= result["is_green"]
pd.options.display.max_columns = None
pd.options.display.max_rows = None
pd.options.display.width = 0
df = pd.DataFrame(pivot).transpose().fillna(0)
if args.ignore_flaky:
# Ignore flaky results
df = ignore_flaky(df)
# TLDR; Use hamming distance to calculate the similarity between jobs instead
# of the default peason correlation provided by pandas df.corr()
#
# We should not use the default pearson correlation for categorical values here
# because the result makes little sense. As an example, I gather MacOS data for
# x86-64 and arm64 functorch as an example. They rarely fail except flaky, and
# the two data series are mostly 1. I expect to see a value indicating a high
# correlation between the twos, but the calculation returns 0 no correlation.
# Correlation metrics for continuous data measure how the change (increase or
# decrease) in one correlates with the other. Here there are just 0 and 1.
correlation_matrix = pd.DataFrame(
1 - pairwise_distances(df.transpose(), metric="hamming"),
index=df.columns,
columns=df.columns,
)
# Prepare for rendering in json:
# Turn the nested dict of name => name => corr to Array<xAxis, yAxis, corr>
correlation_matrix = correlation_matrix.to_dict()
data = []
for xIdx, xName in enumerate(correlation_matrix):
for yIdx, yName in enumerate(correlation_matrix[xName]):
value = correlation_matrix[xName][yName]
# nans mean we couldn't find any examples with both jobs populated.
if math.isnan(value):
continue
data.append((xIdx, yIdx, value))
with open("lib/correlation_matrix.json", "w") as f:
json.dump({"names": list(correlation_matrix.keys()), "data": data}, f, indent=4)<|fim_middle|>compute<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, family):
""" Change the font family.
May be either an alias (generic name is CSS parlance), such as:
'serif', 'sans-serif', 'cursive', 'fantasy', or 'monospace', or
a real font name.
"""
if family is None:
self._family = None
else:
if isinstance(family, bytes):
family = [family.decode("utf8")]
elif isinstance(family, str):
family = [family]
self._family = family<|fim_middle|>set_family<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
(Required only by `gkehub.ScopeIamPolicy`) The policy data generated by
a `organizations_get_iam_policy` data source.
"""
return pulumi.get(self, "policy_data")<|fim_middle|>policy_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
is_cuda: bool,
cosine_similarity: bool = False,
float32: bool = False,
disallowed_operators: Set[DisallowedOperator] = None,
) -> TorchBenchmarkRunner:
"""
Load the benchmark runner from TorchDynamo.
"""
@dataclass
class RunnerArgs:
"""
This class simulates the parsed args required by the benchmark code from TorchDynamo.
"""
ci: bool = False # Whether runs in CI mode. pylint: disable=invalid-name
training: bool = False # Whether it benchmarks training workload.
use_eval_mode: bool = True # Whether the model should be in eval mode.
dynamic_shapes: bool = False # Whether runs the model in dynamic shape mode.
float16: bool = False # Whether to cast model and inputs to float16
float32: bool = False # Whether to cast model and inputs to float32
accuracy: bool = False # Whether to perform a accuracy test
performance: bool = True # Whether to perform a performance test
cosine: bool = False # Whether to use consine similarity to check if output is correct.
args = RunnerArgs(cosine=cosine_similarity, float32=float32)
runner = TorchBenchmarkRunner()
runner.args = args
runner.model_iter_fn = runner.forward_pass
if disallowed_operators:
_disallow_operators(disallowed_operators)
if is_cuda:
# pylint: disable=import-outside-toplevel
import benchmarks.common # type: ignore
# pylint: enable=import-outside-toplevel
benchmarks.common.synchronize = torch.cuda.synchronize
return runner<|fim_middle|>load_torchdynamo_benchmark_runner<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")<|fim_middle|>provisioning_state<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument
"""Assert that an Affidavit can be created."""
user = factory_user_model()
token_info = TestJwtClaims.get_test_real_user(user.keycloak_guid, idp_userid=user.idp_userid)
patch_token_info(token_info, monkeypatch)
affidavit_info = TestAffidavit.get_test_affidavit_with_contact()
affidavit = AffidavitService.create_affidavit(affidavit_info=affidavit_info)
assert affidavit
assert affidavit.as_dict().get('status', None) == AffidavitStatus.PENDING.value<|fim_middle|>test_create_affidavit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
tx = MessageTransaction.create(**self.params)
tx.sign(self.alice)
addresses_state = dict(self.addresses_state)
state_container = StateContainer(addresses_state=addresses_state,
tokens=Indexer(b'token', None),
slaves=Indexer(b'slave', None),
lattice_pk=Indexer(b'lattice_pk', None),
multi_sig_spend_txs=dict(),
votes_stats=dict(),
block_number=1,
total_coin_supply=100,
current_dev_config=config.dev,
write_access=True,
my_db=self.state._db,
batch=None)
tx.apply(self.state, state_container)
self.assertEqual(addresses_state[self.alice.address].balance, 99)
storage_key = state_container.paginated_tx_hash.generate_key(self.alice.address, 1)
self.assertIn(storage_key, state_container.paginated_tx_hash.key_value)
self.assertEqual([tx.txhash], state_container.paginated_tx_hash.key_value[storage_key])<|fim_middle|>test_apply_message_txn<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
# dimension coordinate with no default index (explicit)
coords = Coordinates(coords={"x": [1, 2]}, indexes={})
assert "x" not in coords.xindexes<|fim_middle|>test_init_no_default_index<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
ba = bytearray(b'ab')
ba.remove(ord('b'))
# The capacity of ba should now be different from its length.
# Also, the rest of the buffer is not filled with binary zeroes - this is important for
# test_mac_key_length because according to RFC 2104, the key is padded with zeroes anyway.
# So providing a buffer with capacity > length, but with zero padding as the key argument to HMAC
# would not trigger the bug.
return ba<|fim_middle|>get_buffer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
srcloc, obsloc, freq, sigma, a, b, mu=(mu_0, mu_0, mu_0), eps=epsilon_0, moment=1.0<|fim_middle|>get_casing_hertz_mag_dipole_deriv_z<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, all=None, ticks=None, ticklabels=None, label=None):
if all:
_ticks, _ticklabels, _label = True, True, True
elif all is not None:
_ticks, _ticklabels, _label = False, False, False
else:
_ticks, _ticklabels, _label = None, None, None
if ticks is not None:
_ticks = ticks
if ticklabels is not None:
_ticklabels = ticklabels
if label is not None:
_label = label
tickOn = "tick%dOn" % self._axisnum
labelOn = "label%dOn" % self._axisnum
if _ticks is not None:
tickparam = {tickOn: _ticks}
self._axis.set_tick_params(**tickparam)
if _ticklabels is not None:
tickparam = {labelOn: _ticklabels}
self._axis.set_tick_params(**tickparam)
if _label is not None:
pos = self._axis.get_label_position()
if (pos == self._axis_direction) and not _label:
self._axis.label.set_visible(False)
elif _label:
self._axis.label.set_visible(True)
self._axis.set_label_position(self._axis_direction)<|fim_middle|>toggle<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return self._grant_types<|fim_middle|>grant_types<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(x):
return jax.scipy.special.digamma(x)<|fim_middle|>psi<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, get_mock):
plugin = UTorrentClientPlugin()
torrent = b'torrent'
self.assertFalse(plugin.add_torrent(torrent, None))<|fim_middle|>test_add_torrent_bad_settings<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# metrics_list_name = ["acc"]
# auc_metric = paddle.metric.Accuracy()
metrics_list_name = ["auc"]
auc_metric = paddle.metric.Auc()
metrics_list = [auc_metric]
return metrics_list, metrics_list_name<|fim_middle|>create_metrics<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, other):
"""Update the set, adding any elements from other which are not
already in the set.
"""
if not isinstance(other, Set):
raise ValueError('other must be a Set instance')
if self is other:
return
for item in other.items:
self.add(item)<|fim_middle|>union_update<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(addresses, executable):
symbol_table = {}
if sys.platform == 'darwin':
p = subprocess.Popen(['atos', '-o', executable], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
else:
p = subprocess.Popen(['addr2line', '-e', executable], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
str = ''
for s in addresses:
str += "0x%x\n" % s
stdout, stderr = p.communicate(str.encode())
text_symbols = stdout.decode().split('\n')
symbol_table = {}
for i,s in enumerate(addresses):
symbol_table[s] = text_symbols[i]
return symbol_table<|fim_middle|>load_symbol_table<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),<|fim_middle|>leave_call<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
buff = io.BytesIO(b"#ASDF 1.0.0\nFOO")
buff.seek(0)
with pytest.raises(ValueError, match=r"Invalid content between header and tree"), asdf.open(buff):
pass<|fim_middle|>test_junk_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(basic_xml):
xml = XML.from_string(basic_xml)
assert xml.root.tag == "Tests"<|fim_middle|>test_xml_from_string<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# SETTING UP DEFENCE GAN TRAINED MODELS
# * Clone the defence gan gitrepo https://github.com/yogeshbalaji/InvGAN
# * Follow the setup instructions and copy the following:
# * data/ to adversarial-robustness-toolbox/defence_gan/data/
# * output/gans/mnist to adversarial-robustness-toolbox/defence_gan/output/gans/mnist
# * output/gans_inv_nottrain/mnist to adversarial-robustness-toolbox/defence_gan/output/gans_inv_nottrain/mnist
# STEP 0
logging.info("Loading a Dataset")
(_, _), (x_test_original, y_test_original), min_pixel_value, max_pixel_value = load_mnist()
# TODO remove before PR request
# batch_size = x_test_original.shape[0]
batch_size = 1000
(x_test, y_test) = (x_test_original[:batch_size], y_test_original[:batch_size])
# STEP 1
logging.info("Creating a TS1 Mnist Classifier")
classifier = create_ts1_art_mnist_classifier(min_pixel_value, max_pixel_value)
classifier.fit(x_test, y_test, batch_size=batch_size, nb_epochs=3)
# Code to load the original defense_gan paper mnist classifier to reproduce paper results
# classifier_paper = create_defense_gan_paper_mnist_art_classifier()
# STEP 2
logging.info("Evaluate the ART classifier on non adversarial examples")
predictions = classifier.predict(x_test)
accuracy_non_adv = get_accuracy(predictions, y_test)
# STEP 3
logging.info("Generate adversarial examples")
attack = FastGradientMethod(classifier, eps=0.2)
x_test_adv = attack.generate(x=x_test)
# STEP 4
logging.info("Evaluate the classifier on the adversarial examples")
predictions = classifier.predict(x_test_adv)
accuracy_adv = get_accuracy(predictions, y_test)
# STEP 5
logging.info("Create DefenceGAN")
encoder = create_ts1_encoder_model(batch_size)
generator = create_ts1_generator_model(batch_size)
inverse_gan = InverseGAN(sess=generator._sess, gan=generator, inverse_gan=encoder)
# defense_gan = DefenseGAN(sess=generator.sess,
# generator=generator)
logging.info("Generating Defended Samples")
x_test_defended = inverse_gan(x_test_adv, maxiter=1)
# STEP 6
logging.info("Evaluate the classifier on the defended examples")
predictions = classifier.predict(x_test_defended)
accuracy_defended = get_accuracy(predictions, y_test)
logger.info("Accuracy on non adversarial examples: {}%".format(accuracy_non_adv))
logger.info("Accuracy on adversarial examples: {}%".format(accuracy_adv))
logger.info("Accuracy on defended examples: {}%".format(accuracy_defended))<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
testgroups = []
for round in request.contest.round_set.all():
pis = ProblemInstance.objects.filter(round=round)
res = {'id': str(round.id), 'name': round.name, 'tasks': []}
for pi in pis:
task = {
'name': pi.problem.name,
'short_name': pi.short_name,
'testgroups': [],
}
for test in Test.objects.filter(problem_instance=pi):
if test.group not in task['testgroups']:
task['testgroups'].append(test.group)
res['tasks'].append(task)
testgroups.append(res)
return testgroups<|fim_middle|>testgroups<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(module: Type[nn.Module]):
return module in _leaf_modules<|fim_middle|>is_notrace_module<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(key_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListIotHubResourceKeysForKeyNameResult:
"""
Get a shared access policy by name from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
:param str key_name: The name of the shared access policy.
:param str resource_group_name: The name of the resource group that contains the IoT hub.
:param str resource_name: The name of the IoT hub.
"""
__args__ = dict()
__args__['keyName'] = key_name
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:devices/v20221115preview:listIotHubResourceKeysForKeyName', __args__, opts=opts, typ=ListIotHubResourceKeysForKeyNameResult).value
return AwaitableListIotHubResourceKeysForKeyNameResult(
key_name=pulumi.get(__ret__, 'key_name'),
primary_key=pulumi.get(__ret__, 'primary_key'),
rights=pulumi.get(__ret__, 'rights'),
secondary_key=pulumi.get(__ret__, 'secondary_key'))<|fim_middle|>list_iot_hub_resource_keys_for_key<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, faces=None, color=None, join_faces=False):
"""Draw a selection of faces.
Parameters
----------
faces : list[list[int]], optional
A list of faces to draw.
The default is None, in which case all faces are drawn.
color : :class:`~compas.colors.Color` | dict[int, :class:`~compas.colors.Color`], optional
The color specification for the faces.
The default color is :attr:`VolMeshArtist.default_facecolor`.
join_faces : bool, optional
If True, join the faces into one mesh.
Returns
-------
list[:rhino:`Rhino.Geometry.Mesh`]
"""
self.face_color = color
faces = faces or self.faces
vertex_xyz = self.vertex_xyz
facets = []
for face in faces:
facets.append(
{
"points": [vertex_xyz[vertex] for vertex in self.volmesh.halfface_vertices(face)],
"name": "{}.face.{}".format(self.volmesh.name, face),
"color": self.face_color[face].rgb255,
}
)
return compas_ghpython.METHOD_NAME(facets)<|fim_middle|>draw_faces<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
generators: List[MagneticOperation],
) -> List[MagneticOperation]:
"""
Generate all coset operations
"""
coset = set()
que = Queue() # type: ignore
identity = MagneticOperation.identity()
que.put(identity)
while not que.empty():
g = que.get()
if g in coset:
continue
coset.add(g)
for h in generators:
# Take modulus by translation subgroup
gh = remainder1_symmetry_operation(g * h)
que.put(gh)
# Put identity in the first
coset.remove(identity)
ret = [identity] + list(coset) # type: ignore
return ret<|fim_middle|>traverse<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(func):
"""Map function to current loggers.
Args:
func (function): Function to call on every logger.
"""
for logger in LOGGERS.values():
func(logger)<|fim_middle|>map_logger<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""all users -> Start menu -> Programs -> Admin tools"""
return _get_path_buf(PathConstants.CSIDL_COMMON_ADMINTOOLS)<|fim_middle|>get_common_admin_tools<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
audit_info = AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=session.Session(
profile_name=None,
botocore_session=None,
),
audited_account=AWS_ACCOUNT_NUMBER,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
audited_user_id=None,
audited_partition="aws",
audited_identity_arn=None,
profile=None,
profile_region=None,
credentials=None,
assumed_role_info=None,
audited_regions=["us-east-1", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
return audit_info<|fim_middle|>set_mocked_audit_info<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self, list_, i):<|fim_middle|>list_pop<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(tas_series, group, window, nvals):
tas = tas_series(np.ones(366), start="2000-01-01")
grouper = Grouper(group, window=window)
grpd = grouper.group(tas)
if window > 1:
assert "window" in grpd.dims
assert grpd.count().max() == nvals<|fim_middle|>test_grouper_group<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
tic = monotonic()
toc = None
try:
yield lambda: toc - tic
finally:
toc = monotonic()<|fim_middle|>timer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(reserve, col):
size = 16
size += reserve * 16
col /= 8
size += col * (3 + 8 + 4 + 8 + 16 + 32 + 64 + 128)
size += col
return size<|fim_middle|>get_row_size<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(context, backend):
with raises(AssertionError, match=r"does not acc.*inner_max_num_threads"):
context(backend, inner_max_num_threads=1)<|fim_middle|>test_threadpool_limitation_in_child_context_error<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_aliases_of_yes_and_no<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
constraint=None, index_elements=None,
index_where=None, set_=None, where=None):
"""
Specifies a DO UPDATE SET action for ON CONFLICT clause.
Either the ``constraint`` or ``index_elements`` argument is
required, but only one of these can be specified.
:param constraint:
The name of a unique or exclusion constraint on the table,
or the constraint object itself if it has a .name attribute.
:param index_elements:
A sequence consisting of string column names, :class:`.Column`
objects, or other column expression objects that will be used
to infer a target index.
:param index_where:
Additional WHERE criterion that can be used to infer a
conditional target index.
:param set_:
Required argument. A dictionary or other mapping object
with column names as keys and expressions or literals as values,
specifying the ``SET`` actions to take.
If the target :class:`.Column` specifies a ".key" attribute distinct
from the column name, that key should be used.
.. warning:: This dictionary does **not** take into account
Python-specified default UPDATE values or generation functions,
e.g. those specified using :paramref:`.Column.onupdate`.
These values will not be exercised for an ON CONFLICT style of
UPDATE, unless they are manually specified in the
:paramref:`.Insert.on_conflict_do_update.set_` dictionary.
:param where:
Optional argument. If present, can be a literal SQL
string or an acceptable expression for a ``WHERE`` clause
that restricts the rows affected by ``DO UPDATE SET``. Rows
not meeting the ``WHERE`` condition will not be updated
(effectively a ``DO NOTHING`` for those rows).
.. versionadded:: 1.1
.. seealso::
:ref:`postgresql_insert_on_conflict`
"""
self._post_values_clause = OnConflictDoUpdate(
constraint, index_elements, index_where, set_, where)
return self<|fim_middle|>on_conflict_do_update<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, resource_version, timeout=5):
stream = self.k8s_api.get_host_definition_stream(resource_version, timeout)
for watch_event in stream:
watch_event = utils.munch(watch_event)
host_definition_info = self.resource_info_manager.generate_host_definition_info(watch_event.object)
if self.host_definition_manager.is_host_definition_in_pending_phase(host_definition_info.phase) and \
not utils.is_watch_object_type_is_delete(watch_event.type):
self._define_host_definition_after_pending_state(host_definition_info)<|fim_middle|>watch_host_definition_with_timeout<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
'''
Creates the connections needed for testing.
'''
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind((pycompletionserver.HOST, 0))
server.listen(1) #socket to receive messages.
from thread import start_new_thread
t = pycompletionserver.CompletionServer(server.getsockname()[1])
t.exit_process_on_kill = False
start_new_thread(t.run, ())
sock, _addr = server.accept()
return t, sock<|fim_middle|>create_connections<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters<|fim_middle|>url_parameters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, obj: typing.Any, name: typing.Optional[str] = ...
) -> None: ...<|fim_middle|>ensure_object_imported<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Called after Translator is set, prior to initializing pages.
"""
pass<|fim_middle|>init<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
pdt1 = PartialDateTime()
pdt2 = PartialDateTime(month=3, day=24)
self._test(pdt1, pdt2, "pdt_empty")<|fim_middle|>test_pdt_diff_no_fields<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls):
(x_train, y_train), (x_test, y_test), _, _ = load_dataset("mnist")
cls.mnist = (x_train, y_train), (x_test, y_test)<|fim_middle|>set_up_class<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(model_path):
"""Check whether there is a pyspark component_list stored in path"""
return 'stages' in os.listdir(model_path)<|fim_middle|>is_pipe<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
importlib.reload(cluster)
with patch.object(ClickhousePool, "execute") as execute:
execute.return_value = ClickhouseResult(
[("host_1", 9000, 1, 1), ("host_2", 9000, 2, 1)]
)
local_cluster = get_storage(StorageKey("errors")).get_cluster()
assert len(local_cluster.get_local_nodes()) == 1
assert local_cluster.get_local_nodes()[0].host_name == "host_1"
assert local_cluster.get_local_nodes()[0].port == 9000
assert local_cluster.get_local_nodes()[0].shard is None
assert local_cluster.get_local_nodes()[0].replica is None
distributed_cluster = get_storage(StorageKey("transactions")).get_cluster()
assert len(distributed_cluster.get_local_nodes()) == 2
assert distributed_cluster.get_local_nodes()[0].host_name == "host_1"
assert distributed_cluster.get_local_nodes()[1].host_name == "host_2"<|fim_middle|>test_get_local_nodes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(key):
if key == "user":
return [
"value",
"id",
"email",
"username",
"ip_address",
"times_seen",
"last_seen",
"first_seen",
]
else:
return ["value", "times_seen", "last_seen", "first_seen"]<|fim_middle|>get_header_fields<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(client):
response = client.post(react_url("/"))
assert response.status_code == 400
assert response.content == b"No GraphQL query found"<|fim_middle|>test_invalid_post_returns_400<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, embeddings, label_dict, **kwargs):
model_args = dict(self.model_args)
for k in kwargs:
if k in model_args:
del model_args[k]
return self.model_cls(
embeddings=embeddings,
label_type=self.train_label_type,
**model_args,
**kwargs,
)<|fim_middle|>build_model<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(admin_client):
def move_down_client(theme, css):
url = reverse(
"misago:admin:themes:move-css-down",
kwargs={"pk": theme.pk, "css_pk": css.pk},
)
return admin_client.post(url)
return move_down_client<|fim_middle|>move_down<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.run_test([self.report_1], environment=[self.env_1.name])
self.run_test([self.report_2], environment=[self.env_2.name])<|fim_middle|>test_environment_filter<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
"""Test that the JWKS endpoint is empty when plain auth is used."""
channel = self.make_request("GET", "/_synapse/jwks")
self.assertEqual(200, channel.code, channel.result)
self.assertEqual({"keys": []}, channel.json_body)<|fim_middle|>test_empty_jwks_for_msc3861_client_secret<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, editor, option, index):
self.initStyleOption(option, index)
option.showDecorationSelected = editor.style().styleHint(
QtWidgets.QStyle.SH_ItemView_ShowDecorationSelected, None, editor
)
widget = option.widget
style = widget.style() if widget else QtWidgets.QApplication.style()
geo = style.subElementRect(
QtWidgets.QStyle.SE_ItemViewItemText, option, widget
)
delta = self._q_smart_min_size(editor).width() - geo.width()
if delta > 0:
if editor.layoutDirection() == QtCore.Qt.RightToLeft:
geo.adjust(-delta, 0, 0, 0)
else:
geo.adjust(0, 0, delta, 0)
editor.setGeometry(geo)<|fim_middle|>update_editor_geometry<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(actual, desired, decimal=7):
"""Check that arrays are almost equal, including units.
Wrapper around :func:`numpy.testing.assert_array_almost_equal`
"""
actual, desired = check_and_drop_units(actual, desired)
check_mask(actual, desired)
numpy.testing.METHOD_NAME(actual, desired, decimal)<|fim_middle|>assert_array_almost_equal<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
client,
fake_project,
logged_user,
all_group,
tests_data_dir: Path,
osparc_product_name: str,
):
fake_project.update(
{
"accessRights": {
f"{all_group['gid']}": {"read": True, "write": False, "delete": False}
},
},
)
async with NewProject(
fake_project,
client.app,
user_id=logged_user["id"],
tests_data_dir=tests_data_dir,
product_name=osparc_product_name,
) as project:
print("-----> added project", project["name"])
yield project
print("<----- removed project", project["name"])<|fim_middle|>shared_project<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(s3_path):
try:
path_parts = s3_path.replace("s3://", "").split("/")
bucket = path_parts.pop(0)
key = "/".join(path_parts)
except Exception as exc:
raise mlrun.errors.MLRunInvalidArgumentError(
"failed to parse s3 bucket and key"
) from exc
return bucket, key<|fim_middle|>parse_s3_bucket_and_key<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
g = yield self.setup_generator()
buildrequest = yield self.insert_buildrequest_new()
build = yield g.partial_build_dict(self.master, buildrequest)
report = yield g.buildrequest_message(self.master, build)
g.formatter.format_message_for_build.assert_called_with(self.master, build,
is_buildset=True,
mode=self.all_messages,
users=[])
self.assertEqual(report, {
'body': 'start body',
'subject': 'start subject',
'type': 'plain',
'results': None,
'builds': [build],
'users': [],
'patches': [],
'logs': []
})<|fim_middle|>test_build_message_start_no_result<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> DeviceInfo:
return get_device(BB02BTC_BOOTLOADER)<|fim_middle|>get_bitbox02btc_bootloader<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
pass<|fim_middle|>pre_operations<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
field = MultilineField(allow_duplicates=True)
value_with_duplicates = '1,1'
self.assertEqual(field.clean(value_with_duplicates), ['1', '1'])<|fim_middle|>test_field_valid_when_duplicates_allowed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
A = Square('A',bounds1=(0, 0.5), bounds2=(0, 1))
B = Square('B',bounds1=(0.5, 1.), bounds2=(0, 1))
connectivity = [((0,0,1),(1,0,-1))]
patches = [A,B]
domain = Domain.join(patches, connectivity, 'domain')
x,y = domain.coordinates
solution = sin(pi*x)*sin(pi*y)
f = 2*pi**2*solution
l2_error, h1_error = run_poisson_2d(solution, f, domain, ncells=[2**2,2**2], degree=[2,2])
expected_l2_error = 0.002035229666394183
expected_h1_error = 0.056796387991647795
assert ( abs(l2_error - expected_l2_error) < 1e-7 )
assert ( abs(h1_error - expected_h1_error) < 1e-7 )<|fim_middle|>test_poisson_2d_2_patch_dirichlet_2<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(learner: Learner, dataset: AttrDict, num_invalids: int) -> AttrDict:
# noinspection PyProtectedMember
(
action_distribution,
policy_loss,
exploration_loss,
kl_old,
kl_loss,
value_loss,
loss_locals,
) = learner._calculate_losses(dataset, num_invalids)
return AttrDict(
policy_loss=policy_loss,
exploration_loss=exploration_loss,
kl_old=kl_old,
kl_loss=kl_loss,
value_loss=value_loss,
)<|fim_middle|>learner_losses_res<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
return pulumi.get(self, "project")<|fim_middle|>project<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
text,
outname,
box,
bg_color=DEFAULT_CONFIG["thumb_bg_color"],
font=DEFAULT_CONFIG["thumb_font"],
font_color=DEFAULT_CONFIG["thumb_font_color"],
font_size=DEFAULT_CONFIG["thumb_font_size"],
options=None,
):
"""Create a thumbnail image."""
kwargs = {}
if font:
kwargs["font"] = ImageFont.truetype(font, font_size)
if font_color:
kwargs["fill"] = font_color
img = PILImage.new("RGB", box, bg_color)
anchor = (box[0] // 2, box[1] // 2)
d = ImageDraw.Draw(img)
logger.info(f"kwargs: {kwargs}")
d.text(anchor, text, anchor="mm", **kwargs)
outformat = "JPEG"
logger.info("Save thumnail image: %s (%s)", outname, outformat)
save_image(img, outname, outformat, options=options, autoconvert=True)<|fim_middle|>generate_thumbnail<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, el, val):
self.driver.execute_script('''
var e=arguments[0]; var v=arguments[1]; e.value=v;''',
el, val)<|fim_middle|>change_val<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(func):
data = [
{timestamp_field: func(0), 'foo': 'bar', 'id': 1},
{timestamp_field: func(1), 'foo': 'bar', 'id': 2},
{timestamp_field: func(2), 'foo': 'bar', 'id': 3},
{timestamp_field: func(3), 'foo': 'bar', 'id': 4},
{timestamp_field: func(4), 'foo': 'bar', 'id': 5},
{timestamp_field: func(5), 'foo': 'bar', 'id': 6}
]
random.shuffle(data)
return data<|fim_middle|>get_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
fx = self.fx
assert set(
infer_constraints(
Instance(fx.gvi, [UnpackType(Instance(fx.std_tuplei, [fx.t]))]),
Instance(fx.gvi, [fx.a, fx.b]),
SUPERTYPE_OF,
)
) == {
Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a),
Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b),
}<|fim_middle|>test_unpack_homogenous_tuple<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(mock_db, mocker):
"""
tests the parsing and processing the ua found by the online query
"""
http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
# use a different profile for this unit test to make sure we don't already have info about
# it in the db
profileid = 'profile_192.168.99.99'
mock_db.get_user_agent_from_profile.return_value = None
# mock the function that gets info about the given ua from an online db
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
mock_requests.return_value.text = """{
"agent_name":"Safari",
"os_type":"Macintosh",
"os_name":"OS X"
}"""
# add os_type , os_name and agent_name to the db
ua_info = http_analyzer.get_user_agent_info(SAFARI_UA, profileid)
assert ua_info['os_type'] == 'Macintosh'
assert ua_info['browser'] == 'Safari'<|fim_middle|>test_parsing_online_ua_info<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, input_shape, **kwargs):
"""Construct a network and return its input and output layers.
Arguments
---------
input_shape : tuple of shape = (series_length (m), n_dimensions (d))
The shape of the data fed into the input layer.
Returns
-------
input_layer : keras.layers.Input
The input layer of the network.
output_layer : keras.layers.Layer
The output layer of the network.
"""
from tensorflow import keras
n_feature_maps = 64
input_layer = keras.layers.Input(input_shape)
# 1st residual block
conv_x = keras.layers.Conv1D(
filters=n_feature_maps, kernel_size=8, padding="same"
)(input_layer)
conv_x = keras.layers.BatchNormalization()(conv_x)
conv_x = keras.layers.Activation("relu")(conv_x)
conv_y = keras.layers.Conv1D(
filters=n_feature_maps, kernel_size=5, padding="same"
)(conv_x)
conv_y = keras.layers.BatchNormalization()(conv_y)
conv_y = keras.layers.Activation("relu")(conv_y)
conv_z = keras.layers.Conv1D(
filters=n_feature_maps, kernel_size=3, padding="same"
)(conv_y)
conv_z = keras.layers.BatchNormalization()(conv_z)
# expand channels for the sum
shortcut_y = keras.layers.Conv1D(
filters=n_feature_maps, kernel_size=1, padding="same"
)(input_layer)
shortcut_y = keras.layers.BatchNormalization()(shortcut_y)
output_block_1 = keras.layers.add([shortcut_y, conv_z])
output_block_1 = keras.layers.Activation("relu")(output_block_1)
# 2nd residual block
conv_x = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=8, padding="same"
)(output_block_1)
conv_x = keras.layers.BatchNormalization()(conv_x)
conv_x = keras.layers.Activation("relu")(conv_x)
conv_y = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=5, padding="same"
)(conv_x)
conv_y = keras.layers.BatchNormalization()(conv_y)
conv_y = keras.layers.Activation("relu")(conv_y)
conv_z = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=3, padding="same"
)(conv_y)
conv_z = keras.layers.BatchNormalization()(conv_z)
# expand channels for the sum
shortcut_y = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=1, padding="same"
)(output_block_1)
shortcut_y = keras.layers.BatchNormalization()(shortcut_y)
output_block_2 = keras.layers.add([shortcut_y, conv_z])
output_block_2 = keras.layers.Activation("relu")(output_block_2)
# 3rd residual block
conv_x = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=8, padding="same"
)(output_block_2)
conv_x = keras.layers.BatchNormalization()(conv_x)
conv_x = keras.layers.Activation("relu")(conv_x)
conv_y = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=5, padding="same"
)(conv_x)
conv_y = keras.layers.BatchNormalization()(conv_y)
conv_y = keras.layers.Activation("relu")(conv_y)
conv_z = keras.layers.Conv1D(
filters=n_feature_maps * 2, kernel_size=3, padding="same"
)(conv_y)
conv_z = keras.layers.BatchNormalization()(conv_z)
# no need to expand channels because they are equal
shortcut_y = keras.layers.BatchNormalization()(output_block_2)
output_block_3 = keras.layers.add([shortcut_y, conv_z])
output_block_3 = keras.layers.Activation("relu")(output_block_3)
# global average pooling
gap_layer = keras.layers.GlobalAveragePooling1D()(output_block_3)
return input_layer, gap_layer<|fim_middle|>build_network<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(app_context):
"""URL for creating a shortlink."""
return url_for('create_shortlink')<|fim_middle|>create_shortlink<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[Sequence[Any]]:
"""
Gets a page of NetworkManagerEffectiveSecurityAdminRules
"""
return pulumi.get(self, "value")<|fim_middle|>value<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(qw, qx, qy, qz):
'''
Translates from Quaternion to Yaw.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Yaw value translated from Quaternion
'''
rotateZa0=2.0*(qx*qy + qw*qz)
rotateZa1=qw*qw + qx*qx - qy*qy - qz*qz
rotateZ=0.0
if(rotateZa0 != 0.0 and rotateZa1 != 0.0):
rotateZ=atan2(rotateZa0,rotateZa1)
return rotateZ<|fim_middle|>quat2_yaw<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""test_tar_gz"""
filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_parquet",
"parquet_cpp_example.parquet.tar.gz",
)
filename = "file://" + filename
(
format, # pylint: disable=redefined-builtin
entries,
) = archive_io.list_archive_entries(filename, ["gz", "tar.gz"])
assert format.numpy().decode() == "tar.gz"
assert entries.shape == [2]
assert entries[0].numpy().decode() == "parquet_cpp_example.parquet.1"
assert entries[1].numpy().decode() == "parquet_cpp_example.parquet.2"
elements = archive_io.read_archive(filename, format, entries)
assert elements.shape == [2]
expected_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_parquet",
"parquet_cpp_example.parquet",
)
expected_filename = "file://" + expected_filename
assert elements[0].numpy() == tf.io.read_file(expected_filename).numpy()
assert elements[1].numpy() == tf.io.read_file(expected_filename).numpy()<|fim_middle|>test_tar_gz<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, logging_outputs) -> None:
super().METHOD_NAME(logging_outputs)
latency = sum(log.get("latency", 0) for log in logging_outputs)
delays_var = sum(log.get("delays_var", 0) for log in logging_outputs)
latency_loss = sum(log.get("latency_loss", 0) for log in logging_outputs)
nsentences = sum(log.get("nsentences", 0) for log in logging_outputs)
metrics.log_scalar("latency", latency.float() / nsentences, nsentences, round=3)
metrics.log_scalar("delays_var", delays_var / nsentences, nsentences, round=3)
metrics.log_scalar(
"latency_loss", latency_loss / nsentences, nsentences, round=3
)<|fim_middle|>reduce_metrics<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, pcliq, pcice, si):
sstar = self.wrm.saturation(pcliq)
tmp = (1.0 - si) * sstar
G1 = self.wrm.d_saturation( pcice + self.wrm.capillaryPressure( tmp + si))
G2 = self.wrm.d_capillaryPressure( tmp + si )
return -G1 / (sstar + G1*G2*(1-sstar))<|fim_middle|>dsi_dpcice_frozen_unsaturated<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, op: AssignMulti) -> GenAndKill:
return CLEAN<|fim_middle|>visit_assign_multi<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(a, n):
# Since numba slices can't be boxed at the moment
return a[build_full_slice_tuple(literally(n))]<|fim_middle|>full_slice_array<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, record: logging.LogRecord) -> None:
"""
Write a log record to the stream.
Parameters
----------
record: logging.LogRecord
The log record to write.
"""
color = self._get_color(record.levelno)
self._set_color(color)
logging.StreamHandler.METHOD_NAME(self, record)
self._set_color(self.FOREGROUND_WHITE)<|fim_middle|>emit<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, r):
# normal for client, reverse for server
request = await r.json()
lightning_amount_sat = request['invoiceAmount']
their_pubkey = bytes.fromhex(request['refundPublicKey'])
assert len(their_pubkey) == 33
swap = self.sm.create_reverse_swap(
payment_hash=None,
lightning_amount_sat=lightning_amount_sat,
their_pubkey=their_pubkey
)
response = {
"id": swap.payment_hash.hex(),
'preimageHash': swap.payment_hash.hex(),
"acceptZeroConf": False,
"expectedAmount": swap.onchain_amount,
"timeoutBlockHeight": swap.locktime,
"address": swap.lockup_address,
"redeemScript": swap.redeem_script.hex(),
}
return web.json_response(response)<|fim_middle|>create_normal_swap<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, value):
if type(value) == date:
return "{:%Y-%m-%d}".format(value)
else:
return str(value)<|fim_middle|>to_url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None: ...<|fim_middle|>python_exit<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)<|fim_middle|>send_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(n: int):
clear_worker_pool()
os.makedirs(SCRATCH_DIR)
# Reserve two cores so that bookkeeping does not interfere with runs.
cpu_count = multiprocessing.cpu_count() - 2
# Adjacent cores sometimes share cache, so we space out single core runs.
step = max(n, 2)
for i in range(0, cpu_count, step):
core_str = f"{i}" if n == 1 else f"{i},{i + n - 1}"
_, result_file = tempfile.mkstemp(suffix=".pkl", prefix=SCRATCH_DIR)
_WORKER_POOL.put((core_str, result_file, n))<|fim_middle|>fill_core_pool<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
iwork = self._lsoda_solver._integrator.iwork
rwork = self._lsoda_solver._integrator.rwork
order = iwork[14]
h = rwork[11]
yh = np.reshape(rwork[20:20 + (order + 1) * self.n],
(self.n, order + 1), order='F').copy()
return LsodaDenseOutput(self.t_old, self.t, h, order, yh)<|fim_middle|>dense_output_impl<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, slaveid):
"""
Callback function invoked by terminated slave threads
"""
self.lock.acquire()
try:
self.slavelist.remove("threadmanager-slave%s" % slaveid)
except:
pass
else:
self.activeThreadCount -= 1
self.lock.release()<|fim_middle|>slave_terminate_callback<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(attrname, old, new):
global X, y
dataset = dataset_select.value
algorithm = algorithm_select.value
n_clusters = int(clusters_slider.value)
n_samples = int(samples_slider.value)
X, y = get_dataset(dataset, n_samples)
X, y_pred = clustering(X, algorithm, n_clusters)
colors = [spectral[i] for i in y_pred]
source.data = dict(colors=colors, x=X[:, 0], y=X[:, 1])<|fim_middle|>update_samples_or_dataset<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(data):
try:
data.decode('cp1252')
except UnicodeDecodeError:
return False
else:
return True<|fim_middle|>is_c_p1252<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
with xargs._thread_mapper(10) as thread_map:
_self = thread_map.__self__ # type: ignore
assert isinstance(_self, concurrent.futures.ThreadPoolExecutor)<|fim_middle|>test_thread_mapper_concurrency_uses_threadpoolexecutor_map<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(store: str):
dataset = {
category
for category, in await models.Product.select("category").where(models.Product.store_id == store).gino.all()
if category
}
dataset.discard("all")
return ["all"] + sorted(dataset)<|fim_middle|>categories<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertFalse(self.carrier.prod_environment)
self.carrier.toggle_prod_environment()
self.carrier.onchange_prod_environment()
self.assertTrue(self.carrier.prod_environment)
self.carrier.toggle_prod_environment()
self.carrier.onchange_prod_environment()
self.assertFalse(self.carrier.prod_environment)
self.assertEqual(
self.carrier.get_tracking_link(self.picking),
"https://service.post.ch/EasyTrack/"
"submitParcelData.do?formattedParcelCodes=False",
)<|fim_middle|>test_misc<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
while(self.gui.client == None):
pass
while(True):
start_time = datetime.now()
self.gui.update_gui()
acknowledge_message = self.gui.get_acknowledge()
while(acknowledge_message == False):
acknowledge_message = self.gui.get_acknowledge()
self.gui.set_acknowledge(False)
finish_time = datetime.now()
self.iteration_counter = self.iteration_counter + 1
dt = finish_time - start_time
ms = (dt.days * 24 * 60 * 60 + dt.seconds) * 1000 + dt.microseconds / 1000.0
if(ms < self.ideal_cycle):
time.sleep((self.ideal_cycle-ms) / 1000.0)<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Tests the form of piecewise constant constraints that
# should be used for a backward discretization, i.e.
# that each input at non-sampling points is set equal to
# the next value in the time set, rather than the previous.
# This is the default.
n_time_points = 5
sample_points = [0, 2, 4]
sample_points_set = set(sample_points)
m = self._make_model(n_time_points=n_time_points)
inputs = [pyo.Reference(m.var[:, "B"]), m.input]
m.input_set, m.pwc_con = get_piecewise_constant_constraints(
inputs, m.time, sample_points
)
pred_expr = {
# Here we rely on knowledge that delta t == 1
(i, t): inputs[i][t] - inputs[i][t + 1] == 0
for t in m.time
if t not in sample_points_set
for i in range(len(inputs))
}
self.assertEqual(list(m.input_set), list(range(len(inputs))))
for i in range(len(inputs)):
for t in m.time:
if t in sample_points_set:
self.assertNotIn((i, t), m.pwc_con)
else:
self.assertIn((i, t), m.pwc_con)
self.assertEqual(
pyo.value(pred_expr[i, t]), pyo.value(m.pwc_con[i, t].expr)
)
self.assertTrue(
compare_expressions(pred_expr[i, t], m.pwc_con[i, t].expr)
)<|fim_middle|>test_pwc_constraint_backward<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.addUsageLine('Preprocess all mics in directory')
## params
self.addParamsLine('-i <inputFile> : A file that contains the path of input micrograph and possibly CTFs')
self.addParamsLine('-s <samplingRate> : sampling rate of the micrographs Angstroms/pixel')
self.addParamsLine('-d <donwsampleFactor> <D=1> : Downsamplig factor')
self.addParamsLine('-o <pathToProcessesMics> : A path to the directory where preprocessed micrograph will be saved')
self.addParamsLine('[--invert_contrast ] : Invert micrograph contrast')
self.addParamsLine('[ --phase_flip ] : Apply phase_flipping micrograph contrast')
self.addParamsLine('[ -t <numThreads> <N=1> ] : Number of threads')
## examples
self.addExampleLine(' xmipp_preprocess_mics -i path/to/inputs/file.txt -s 1.6 -d 4 -t 2 -o path/to/outDir')
self.addExampleLine(' path/to/inputs/file.txt:\n'
'#mic ctfparams\n'
'Runs/004986_XmippProtScreenDeepConsensus/extra/preProcMics/010_movie_aligned.mrc Runs/004986_XmippProtScreenDeepConsensus/tmp/010_movie_aligned.mrc.ctfParam\n'
'Runs/004986_XmippProtScreenDeepConsensus/extra/preProcMics/100_movie_aligned.mrc Runs/004986_XmippProtScreenDeepConsensus/tmp/100_movie_aligned.mrc.ctfParam\n'
'Runs/004986_XmippProtScreenDeepConsensus/extra/preProcMics/107_movie_aligned.mrc Runs/004986_XmippProtScreenDeepConsensus/tmp/107_movie_aligned.mrc.ctfParam\n'
)<|fim_middle|>define_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, **kwargs):
"""Pack all blocks in a stage into a ``ResLayer``"""
return ResLayer(
groups=self.groups,
base_width=self.base_width,
base_channels=self.base_channels,
**kwargs)<|fim_middle|>make_res_layer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*, challenge_set, client: Client, **kwargs):
tests = [
(200, None),
(200, challenge_set.non_participant),
(200, challenge_set.participant),
(200, challenge_set.participant1),
(200, challenge_set.creator),
(200, challenge_set.admin),
]
for test in tests:
assert_viewname_status(
code=test[0],
challenge=challenge_set.challenge,
client=client,
user=test[1],
**kwargs,
)<|fim_middle|>validate_open_view<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
qml.Rot(*angles_1, wires=0)
qml.Rot(*angles_2, wires=0)<|fim_middle|>original_ops<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, values):
ret = self._second(self._named_dense(values))
return ret<|fim_middle|>call<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
TestFactory = BasicRegistrationFactory()
with pytest.raises(AttributeError):
TestFactory.register(StandardWidget, validation_function="not_callable")<|fim_middle|>test_validation_fun_not_callable<|file_separator|> |