text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(self):
"""Make sure permissions and authentication is required to display oauth applications.
This is "sensitive" information"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)<|fim_middle|>test_list_applications_no_auth<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, row: int, col: int, txt: str): ...<|fim_middle|>set_cell_value<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertEqual(
self.env["nfe.40.lacres"]._fields["nfe40_lacres_vol_id"].comodel_name,
"nfe.40.vol",
)<|fim_middle|>test_m2o_concrete_to_concrete_spec<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
if self.node.scope.is_checked and self._type.can_be_checked_for_overflow():
return "(c)" + str(self._type.value)
return str(self._type.value)<|fim_middle|>type_str<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Process namespace tokens
namespace = self.project_config.project__package__namespace
if "managed" in self.options:
managed = process_bool_arg(self.options["managed"])
else:
managed = (
bool(namespace) and namespace in self.org_config.installed_packages
)
if "namespaced" in self.options:
namespaced = process_bool_arg(self.options["namespaced"])
else:
namespaced = bool(namespace) and self.org_config.namespace == namespace
namespace_prefix = ""
if namespace and (managed or namespaced):
namespace_prefix = namespace + "__"
self.object_name = self.object_name.replace("%%%NAMESPACE%%%", namespace_prefix)
self.field_name = self.field_name.replace("%%%NAMESPACE%%%", namespace_prefix)<|fim_middle|>apply_namespace<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
'''Create basic 3 row x 3 col search (find) dialog.
Other dialogs override subsidiary create_x methods as needed.
Replace and Find-in-Files add another entry row.
'''
top = Toplevel(self.root)
top.bind("<Return>", self.default_command)
top.bind("<Escape>", self.close)
top.protocol("WM_DELETE_WINDOW", self.close)
top.wm_title(self.title)
top.wm_iconname(self.icon)
_setup_dialog(top)
self.top = top
self.frame = Frame(top, padding="5px")
self.frame.grid(sticky="nwes")
top.grid_columnconfigure(0, weight=100)
top.grid_rowconfigure(0, weight=100)
self.row = 0
self.frame.grid_columnconfigure(0, pad=2, weight=0)
self.frame.grid_columnconfigure(1, pad=2, minsize=100, weight=100)
self.create_entries() # row 0 (and maybe 1), cols 0, 1
self.create_option_buttons() # next row, cols 0, 1
self.create_other_buttons() # next row, cols 0, 1
self.create_command_buttons() # col 2, all rows<|fim_middle|>create_widgets<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, key):
assert key in self.data.keys(), f"[*** Key Error] {key}"
return self.data[key]<|fim_middle|>get_item<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, orm):
# Deleting field 'Team.email'
db.delete_column(u'account_team', 'email')<|fim_middle|>backwards<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
stack: Stack, findstart: Literal[0, 1], base: str
) -> Union[int, Sequence[Mapping[str, Any]]]:
t0 = monotonic()
if findstart:
return -1
else:
s = state(commit_id=uuid4())
create_task(comp_func(stack=stack, manual=True, change=None, t0=t0, s=s))
return ()<|fim_middle|>omnifunc<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(worker_id: str) -> Generator[DockerClient, None, None]:
context = docker_client()
try:
client = context.__enter__()
except Exception as exc:
raise RuntimeError(
"Failed to create Docker client. Exclude tests that require Docker with "
"'--exclude-service docker'."
) from exc
try:
with cleanup_all_new_docker_objects(client, worker_id):
yield client
finally:
context.__exit__(*sys.exc_info())<|fim_middle|>docker<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(response):
"""Async handling of the server response"""
if thinking_defer and not thinking_defer.called:
# abort the thinking message if we were fast enough
thinking_defer.cancel()
if response:
# remember this response
self._add_to_memory(character, self, response)
else:
response = "... I'm sorry, I was distracted. Can you repeat?"
response = self.response_template.format(
name=self.get_display_name(character), response=response
)
# tell the character about it
if character.location:
character.location.msg_contents(
response,
mapping={"character": character},
from_obj=self,
)
else:
# fallback if character is not in a location
character.msg(f"{self.get_display_name(character)} says, {response}")<|fim_middle|>respond<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, versions): # pylint: disable=no-self-use
"""Custom JMESPath `sort_versions` function that sorts an array of strings as software versions."""
try:
return sorted(versions, key=version_to_tuple)
# if it wasn't sortable, return the input so the pipeline continues
except (TypeError, ValueError):
return versions<|fim_middle|>func_sort_versions<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self):<|fim_middle|>ok<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, inputs, training=True):
inputs = self.backend.cast(inputs, self.compute_dtype)
input_shape = self.backend.shape(inputs)
is_batched = len(input_shape) > 3
if not is_batched:
inputs = self.backend.numpy.expand_dims(inputs, axis=0)
h_diff = input_shape[self.height_axis] - self.height
w_diff = input_shape[self.width_axis] - self.width
def random_crop():
input_height, input_width = (
input_shape[self.height_axis],
input_shape[self.width_axis],
)
seed_generator = self._get_seed_generator(self.backend._backend)
h_start = self.backend.cast(
self.backend.random.uniform(
(),
0,
maxval=float(input_height - self.height + 1),
seed=seed_generator,
),
"int32",
)
w_start = self.backend.cast(
self.backend.random.uniform(
(),
0,
maxval=float(input_width - self.width + 1),
seed=seed_generator,
),
"int32",
)
if self.data_format == "channels_last":
return self.backend.core.slice(
inputs,
self.backend.numpy.stack([0, h_start, w_start, 0]),
[
self.backend.shape(inputs)[0],
self.height,
self.width,
self.backend.shape(inputs)[3],
],
)
else:
return self.backend.core.slice(
inputs,
self.backend.numpy.stack([0, 0, h_start, w_start]),
[
self.backend.shape(inputs)[0],
self.backend.shape(inputs)[1],
self.height,
self.width,
],
)
def resize():
outputs = image_utils.smart_resize(
inputs,
[self.height, self.width],
data_format=self.data_format,
backend_module=self.backend,
)
# smart_resize will always output float32, so we need to re-cast.
return self.backend.cast(outputs, self.compute_dtype)
if isinstance(h_diff, int) and isinstance(w_diff, int):
if training and h_diff >= 0 and w_diff >= 0:
outputs = random_crop()
else:
outputs = resize()
else:
predicate = self.backend.numpy.logical_and(
training,
self.backend.numpy.logical_and(h_diff >= 0, w_diff >= 0),
)
outputs = self.backend.cond(
predicate,
random_crop,
resize,
)
if not is_batched:
outputs = self.backend.numpy.squeeze(outputs, axis=0)
return outputs<|fim_middle|>call<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|> (manager, sources, prop_set, result):
used_libraries = []
deps = prop_set.dependency ()
sources.extend(d.value for d in deps)
sources = sequence.unique(sources)
for l in sources:
if l.type () and type.is_derived (l.type (), 'LIB'):
used_libraries.append (l)
created_libraries = []
for l in result:
if l.type () and type.is_derived (l.type (), 'LIB'):
created_libraries.append (l)
created_libraries = set.difference (created_libraries, used_libraries)
set_library_order_aux (created_libraries, used_libraries)<|fim_middle|>set_library_order<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")<|fim_middle|>type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> 'outputs.BatchEndpointResponse':
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "batch_endpoint_properties")<|fim_middle|>batch_endpoint_properties<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, data_pkl):
data = pickle.loads(data_pkl)
if '_id' in data.keys():
self._id = data['_id']
else:
self._id = None
self._parameters = NuRadioReco.framework.parameter_serialization.METHOD_NAME(
data['_parameters'],
parameters.particleParameters
)<|fim_middle|>deserialize<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, flatiter):
"""Rebuild a nested tuple structure.
"""
vals = deque(flatiter)
res = []
cur = res
stack = []
for op in self._code:
if op is _PUSH_LIST:
stack.append(cur)
cur.append([])
cur = cur[-1]
elif op is _APPEND_NEXT_VALUE:
cur.append(vals.popleft())
elif op is _APPEND_EMPTY_TUPLE:
cur.append(())
elif op is _POP:
cur = stack.pop()
assert not stack, stack
assert not vals, vals
return res<|fim_middle|>unflatten<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>() -> None:
logging.basicConfig(
level=os.environ.get("PYLOGGING_LEVEL", logging.INFO),
stream=sys.stderr,
datefmt="%Y-%m-%dT%H:%M:%S",
format=(
"%(asctime)s.%(msecs)03d %(process)d %(thread)d %(levelno)03d:%(levelname)-8s "
"%(name)-12s %(module)s:%(lineno)s:%(funcName)s %(message)s"
),
)
Application().run(sys.argv[1:])<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(Script):
code = 'import keyword; keyword.kwlist'
for seq in Script(code).infer():
assert seq.name == 'Sequence'
# This points towards the typeshed implementation
stub_seq, = seq.goto(only_stubs=True)
assert str(stub_seq.module_path).startswith(str(typeshed.TYPESHED_PATH))<|fim_middle|>test_keywords_variable<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
db: orm.Session,
user: users_models.DatabaseUser,
tool: tools_models.DatabaseTool,
token: dict[str, t.Any],
**kwargs,
) -> tuple[
JupyterConfigEnvironment,
list[operators_models.Volume],
list[core_models.Message],<|fim_middle|>configuration_hook<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(datamodel, tables, classes):
def map_class(tabledef):
cls = classes[ tabledef.name ]
table = tables[ tabledef.table ]
def make_relationship(reldef):
if not hasattr(reldef, 'column') or reldef.relatedModelName not in classes:
return
remote_class = classes[ reldef.relatedModelName ]
column = getattr(table.c, reldef.column)
relationship_args = {'foreign_keys': column}
if hasattr(reldef, 'otherSideName'):
backref_args = {'uselist': reldef.type != 'one-to-one'}
if remote_class is cls:
backref_args['remote_side'] = table.c[ tabledef.idColumn ]
relationship_args['backref'] = orm.backref(reldef.otherSideName, **backref_args)
return reldef.name, orm.relationship(remote_class, **relationship_args)
properties = { tabledef.idFieldName: table.c[tabledef.idColumn] }
properties.update({ flddef.name: table.c[flddef.column]
for flddef in tabledef.fields })
properties.update(relationship
for relationship in [ make_relationship(reldef)
for reldef in tabledef.relationships ]
if relationship is not None)
orm.mapper(cls, table, properties=properties)
for tabledef in datamodel.tables:
map_class(tabledef)<|fim_middle|>map_classes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, simulation, period):
period = period.start.period(DateUnit.MONTH).offset("first-of")
salaire_imposable = simulation.calculate("salaire_imposable", period)
return (salaire_imposable < 500) * 100.0<|fim_middle|>formula_2010_01_01<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, datum):
text = str(datum)
inp = text
key = self.identifier.encode(inp)
result = {"key": key, "input": inp, "text": text}
return result<|fim_middle|>parse_text<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, state_dict: Mapping[str, Any]):
r"""Loads the ShardedOptimizer state.
Args:
state_dict (dict): ShardedOptimizer state. Should be an object returned
from a call to :meth:`state_dict`.
"""
# TODO: implement load_state_dict
raise NotImplementedError("ShardedOptimizer load_state_dict not implemented yet!")<|fim_middle|>load_state_dict<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""
all available profiles for API
Returns:
HTML content or available profiles
"""
res = ""
for profile in sorted(load_all_profiles().keys()):
label = "success" if (
profile == "scan"
) else "warning" if (
profile == "brute"
) else "danger" if (
profile == "vulnerability"
) else "default"
res += """<label><input id="{0}" type="checkbox" class="checkbox checkbox-{0}"><a class="label
label-{1}">{0}</a></label> """.format(profile, label)
return res<|fim_middle|>profiles<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
super().METHOD_NAME()
self.login_as(user=self.user)
self.monitor1 = self._create_monitor()
self.monitor2 = self._create_monitor()
self.env_prod = self._create_monitor_environment(monitor=self.monitor1)
self.env_debug = self._create_monitor_environment(monitor=self.monitor1, name="debug")
# Be sure to note the freeze time above
self.since = self.monitor1.date_added
self.until = self.monitor1.date_added + timedelta(hours=2)
self.add_checkin(self.monitor1, offset={"minutes": 1})
self.add_checkin(self.monitor1, offset={"minutes": 1}, status=CheckInStatus.IN_PROGRESS)
self.add_checkin(self.monitor1, offset={"minutes": 2}, env=self.env_debug)
self.add_checkin(
self.monitor1,
offset={"hours": 1, "minutes": 1},
status=CheckInStatus.MISSED,
)
self.add_checkin(
self.monitor1,
offset={"hours": 1, "minutes": 2},
env=self.env_debug,
status=CheckInStatus.ERROR,
)
self.add_checkin(
self.monitor1,
offset={"hours": 1, "minutes": 1},
status=CheckInStatus.TIMEOUT,
)
self.add_checkin(
self.monitor1,
offset={"hours": 1, "minutes": 2},
env=self.env_debug,
status=CheckInStatus.TIMEOUT,
)
self.add_checkin(self.monitor2, offset={"minutes": 1})
self.add_checkin(self.monitor2, offset={"minutes": 2})<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(self, payload: dict, in_edge=None) -> Result:
try:
dot = self._get_dot_accessor(payload)
ip = dot[self.config.ip]
location = await self.client.fetch(ip)
result = {
"city": location.city.name,
"country": {
"name": location.country.name,
"code": location.country.iso_code
},
"county": location.subdivisions.most_specific.name,
"postal": location.postal.code,
"latitude": location.location.latitude,
"longitude": location.location.longitude
}
return Result(port="location", value=result)
except Exception as e:
self.console.error(str(e))
return Result(port="error", value={"payload": payload, "error": str(e)})<|fim_middle|>run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, dataset: Dataset, is_train: bool) -> Dataset:
# self.validate_schema(dataset, is_train)
dataset = dataset.rename_columns(self._rename_fields)
dataset = dataset.map(self._rename_examples,
load_from_cache_file=self._load_from_cache_file,
num_proc=self._num_workers
)
dataset = dataset.map(self._convert_start_and_end_positions_from_bytes_to_chars,
load_from_cache_file=self._load_from_cache_file,
num_proc=self._num_workers
)
dataset = super().METHOD_NAME(dataset, is_train)
return dataset<|fim_middle|>adapt_dataset<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Path:
raise NotImplementedError<|fim_middle|>recover_loc<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""
Creates a MatrixStore SQLite database using data sourced from Postgres.
This provides an easy way of using existing test fixtures with the
MatrixStore.
"""
latest_date = ImportLog.objects.latest_in_category("prescribing").current_at
end_date = str(latest_date)[:7]
return matrixstore_from_data_factory(
_DatabaseFixtures(), end_date=end_date, months=60
)<|fim_middle|>matrixstore_from_postgres<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(question):
"""Convert user input from yes/no variations to True/False"""
while True:
reply = input(question + " [y/N] ").lower().strip()
if not reply or reply[0] == "n":
return False
if reply[0] == "y":
return True<|fim_middle|>yes_or_no<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.ensure_one()
return URL_SCSS_GEN_TEMPLATE % self.id<|fim_middle|>scss_get_url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(plugin,
item_id,
video_id,
download_mode=False,
**kwargs):
return resolver_proxy.get_stream_dailymotion(plugin, video_id,
download_mode)<|fim_middle|>get_video_url<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return "get_flatsurface"<|fim_middle|>get_flatsurface<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(name):
"""Returns a (maybe empty) list of QKeySequences for the named snippet."""
ac = collection()
return ac and ac.METHOD_NAME(name) or []<|fim_middle|>shortcuts<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
with mock.patch("mslib.mscolab.mscolab.input", return_value="n"):
assert confirm_action("") is False
with mock.patch("mslib.mscolab.mscolab.input", return_value=""):
assert confirm_action("") is False
with mock.patch("mslib.mscolab.mscolab.input", return_value="y"):
assert confirm_action("") is True<|fim_middle|>test_confirm_action<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
The location where the email service stores its data at rest.
"""
return pulumi.get(self, "data_location")<|fim_middle|>data_location<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(unused_argv):
tf.logging.set_verbosity(FLAGS.log)
if not FLAGS.run_dir:
tf.logging.fatal('--run_dir required')
return
if not FLAGS.sequence_example_file:
tf.logging.fatal('--sequence_example_file required')
return
sequence_example_file_paths = tf.gfile.Glob(
os.path.expanduser(FLAGS.sequence_example_file))
run_dir = os.path.expanduser(FLAGS.run_dir)
config = melody_rnn_config_flags.config_from_flags()
mode = 'eval' if FLAGS.eval else 'train'
build_graph_fn = events_rnn_graph.get_build_graph_fn(
mode, config, sequence_example_file_paths)
train_dir = os.path.join(run_dir, 'train')
if not os.path.exists(train_dir):
tf.gfile.MakeDirs(train_dir)
tf.logging.info('Train dir: %s', train_dir)
if FLAGS.eval:
eval_dir = os.path.join(run_dir, 'eval')
if not os.path.exists(eval_dir):
tf.gfile.MakeDirs(eval_dir)
tf.logging.info('Eval dir: %s', eval_dir)
num_batches = (
(FLAGS.num_eval_examples or
magenta.common.count_records(sequence_example_file_paths)) //
config.hparams.batch_size)
events_rnn_train.run_eval(build_graph_fn, train_dir, eval_dir, num_batches)
else:
events_rnn_train.run_training(build_graph_fn, train_dir,
FLAGS.num_training_steps,
FLAGS.summary_frequency,
checkpoints_to_keep=FLAGS.num_checkpoints)<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, separation):
"""Change the distance between a port and its anchor particle.
separation : float, required
Distance to shift port along the orientation vector from the anchor
particle position. If no anchor is provided, the port will be
shifted from the origin.
"""
if self.used:
warn(
"This port is already being used and changing its separation "
"will have no effect on the distance between particles."
)
if self.anchor:
self.translate_to(self.anchor.pos)
else:
self.translate_to((0, 0, 0))
self.translate(separation * self.direction)<|fim_middle|>update_separation<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True
) -> IPv4Network | IPv6Network: ...<|fim_middle|>ip_network<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(grid):
"""
Test that grdfill fails without arguments for `mode` and `L`.
"""
with pytest.raises(GMTInvalidInput):
grdfill(grid=grid)<|fim_middle|>test_grdfill_required_args<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
*,
dump: Union[bool, str] = False,
outline: bool = False,
**__kwargs: Any,
) -> None:
"""Do nothing."""<|fim_middle|>pre_run<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return """\
color
Sets the color of the threshold line.
width
Sets the width (in px) of the threshold line.
"""<|fim_middle|>prop_descriptions<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Test that it works correctly inside of a column."""
col1, col2, col3 = st.columns([2.5, 1.5, 8.3])
with col1:
st.text_area("foo")
all_deltas = self.get_all_deltas_from_queue()
# 5 elements will be created: 1 horizontal block, 3 columns, 1 widget
self.assertEqual(len(all_deltas), 5)
text_area_proto = self.get_delta_from_queue().new_element.text_area
self.assertEqual(text_area_proto.label, "foo")<|fim_middle|>test_inside_column<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
dist = Distribution.from_name('distinfo-pkg')
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)<|fim_middle|>test_retrieves_version_of_self<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Test the name after inverting the QFT is IQFT and not QFT_dg."""
iqft = QFT(1).inverse()
i2qft = iqft.inverse()
with self.subTest(msg="inverted once"):
self.assertEqual(iqft.name, "IQFT")
with self.subTest(msg="inverted twice"):
self.assertEqual(i2qft.name, "QFT")
with self.subTest(msg="inverse as kwarg"):
self.assertEqual(QFT(1, inverse=True).name, "IQFT")<|fim_middle|>test_name_after_inverting<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(pars):
assert pars[1].name == "ham"
assert pars["ham"].name == "ham"
assert pars[pars[1]].name == "ham"
with pytest.raises(TypeError):
pars[42.3]
with pytest.raises(IndexError):
pars[3]
with pytest.raises(IndexError):
pars["lamb"]
with pytest.raises(ValueError):
pars[Parameter("bam!", 99)]<|fim_middle|>test_parameters_getitem<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, custom_model_id):
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import FormRecognizerClient, RecognizedForm
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
model_id_fixed_rows_table = os.getenv("MODEL_ID_FIXED_ROW_TABLES", custom_model_id)
form_recognizer_client = FormRecognizerClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__),
"..", "..", "./sample_forms/forms/label_table_fixed_rows1.pdf"))
with open(path_to_sample_forms, "rb") as f:
form = f.read()
poller = form_recognizer_client.begin_recognize_custom_forms(
model_id=model_id_fixed_rows_table, form=form
)
result: list[RecognizedForm] = poller.result()
print("\n--------Recognizing labeled table with fixed rows--------\n")
for recognized_form in result:
for name, field in recognized_form.fields.items():
# substitute "table" for the label given to the table tag during training
# (if different than sample training docs)
if name == "table":
for row_name, column in field.value.items():
print("Row '{}' has columns:".format(row_name))
for column_name, column_value in column.value.items():
print("...Column '{}' with value '{}' and a confidence score of {}".format(
column_name, column_value.value, column_value.confidence
))
else: # non-table tagged FormField
print("...Field '{}' has value '{}' with a confidence score of {}".format(
name,
field.value,
field.confidence
))<|fim_middle|>test_recognize_tables_fixed_rows<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(BSP_ROOT, dist_dir):
import sys
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(BSP_ROOT), 'tools'))
from sdk_dist import dist_do_building
dist_do_building(BSP_ROOT, dist_dir)<|fim_middle|>dist_handle<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return {
"14": {
"Visual Studio": "14",
"msvc": "190",
"gcc": "5",
"clang": "3.2",
"apple-clang": "4.3",
},
"17": {
"Visual Studio": "15" if Version(self.version) < "3.3.0" else "16",
"msvc": "191" if Version(self.version) < "3.3.0" else "192",
"gcc": "7",
"clang": "6",
"apple-clang": "10",
},
}.get(self._min_cppstd, {})<|fim_middle|>compilers_minimum_version<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(oauth2_settings, rf, settings, oidc_only_view, caplog):
assert oauth2_settings.OIDC_ENABLED is False
settings.DEBUG = False
with caplog.at_level(logging.WARNING, logger="oauth2_provider"):
rsp = oidc_only_view(rf.get("/"))
assert rsp.status_code == 404
assert len(caplog.records) == 1
assert "OIDC views are not enabled" in caplog.records[0].message<|fim_middle|>test_oidc_only_mixin_oidc_disabled_no<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# UCERF3 rupture
rup = [r for r in self.src.iter_ruptures()][0]
tors = rup.surface.tors
tors._set_coordinate_shift()
# Create the contexts
gmm = AbrahamsonEtAl2014()
param = dict(imtls={'PGA': []}, cache_distances=True)
cm = ContextMaker('*', [gmm], param)
ctxs = list(cm.get_ctx_iter(self.src, self.sitec))
# Get multiline
dcache = cm.dcache
ml = _get_multi_line(dcache, self.src.rupture_idxs[0])
# Test shift
aae(tors.shift, ml.shift, decimal=4)
# Test umax
tors.set_u_max()
aae(tors.u_max, ml.u_max, decimal=4)
# Test uupps
tupps, uupps, weis = get_tus(tors.lines, self.sitec.mesh)
aae(tupps, ml.tupps, decimal=3)
aae(uupps, ml.uupps, decimal=3)
aae(weis, ml.weis, decimal=3)
# Test T and U
ml.set_tu(self.sitec.mesh)
tors.set_tu(self.sitec.mesh)
aae(ml.uut, ml.uut, decimal=3)
aae(ml.tut, ml.tut, decimal=3)
# Test Rx
da = ml.get_rx_distance()
expected = tors.get_rx_distance()
aae(da, expected, decimal=3)
# Test Ry0
da = ml.get_ry0_distance()
expected = tors.get_ry0_distance()
aae(da, expected, decimal=3)
aae(ctxs[0].ry0, expected, decimal=3)<|fim_middle|>test_multi_cache_02<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
with pytest.raises(DagsterInvalidDefinitionError):
StaticPartitionsDefinition(["foo...bar"])
with pytest.raises(DagsterInvalidDefinitionError, match="n"):
StaticPartitionsDefinition(["foo\nfoo"])
with pytest.raises(DagsterInvalidDefinitionError, match="b"):
StaticPartitionsDefinition(["foo\bfoo"])<|fim_middle|>test_static_partitions_invalid_chars<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# win cuda 10.2 cannot pass
a = jt.random((100,))
b = a**3
b.sync()<|fim_middle|>test_pow<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Returns whether this field handles multiple addresses
"""
return len(self.get_address_types()) > 1<|fim_middle|>is_multi_address<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertEqual(
self.harvester_class.title(),
"Obsession-Worthy Peanut Butter Cookie Ice Cream",
)<|fim_middle|>test_title<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Return the maximum entry in the vector."""
with self.dat.vec_ro as v:
return v.METHOD_NAME()[1]<|fim_middle|>max<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(args):
args.embed_dim = getattr(args, "embed_dim", 1280)
args.num_attention_heads = getattr(args, "num_attention_heads", 20)
args.num_layers = getattr(args, "num_layers", 36)
default_architecture(args)<|fim_middle|>hf_gpt2_large<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
content = "Hello"
destination = Path(self.tmpdir) / "test_save_text_file"
save_text_file(content, destination)
with destination.open("r") as file:
assert content == file.read()<|fim_middle|>test_save_text_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
comments = self.hello_world_project.get_pr(72).get_comments(
filter_regex="comment updates"
)
assert len(comments) == 1
before_comment = comments[0].body
before_edited = comments[0].edited
comments[0].body = "see if updating works"
assert comments[0].body == "see if updating works"
assert comments[0].edited > before_edited
comments[0].body = before_comment
assert comments[0].body == before_comment<|fim_middle|>test_pr_comments_updates<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_nonpositive_num_lstm_layers_raises<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
super(RouteTaskChangeTestCase, self).METHOD_NAME()
self.generate_fixture_project_status()
self.generate_fixture_project()
self.generate_fixture_asset_type()
self.generate_fixture_asset()
self.generate_fixture_sequence()
self.generate_fixture_shot()
self.generate_fixture_department()
self.generate_fixture_task_type()
self.generate_fixture_task_status()
self.generate_fixture_task_status_wip()
self.generate_fixture_task_status_retake()
self.generate_fixture_task_status_done()
self.generate_fixture_task_status_todo()
self.generate_fixture_person()
self.generate_fixture_assigner()
self.generate_fixture_task()
self.open_status_id = str(self.task_status.id)
self.wip_status_id = str(self.task_status_wip.id)
self.retake_status_id = str(self.task_status_retake.id)
self.done_status_id = str(self.task_status_done.id)
self.is_event_fired = False
events.unregister_all()<|fim_middle|>set_up<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
total = self.list.get_count()
name = _(u"Authorized account %d") % (total+1)
self.list.insert_item(False, name)
if self.list.get_count() == 1:
self.list.select_item(0)<|fim_middle|>add_new_session_to_list<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(s):
"""For output which does not require specific line order"""
return sorted(_strip_and_dedent(s).splitlines())<|fim_middle|>split_and_sort<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> _models.PrivateLinkResourceListResult:
"""Gets the private link resources that need to be created for a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourceListResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.PrivateLinkResourceListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._api_version or "2021-04-01"))
cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None)
request = build_list_by_storage_account_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized<|fim_middle|>list_by_storage_account<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(dev):
return f"<{dev.id}: {dev.device_kind}>"<|fim_middle|>fmt_device<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(filename):
if not os.path.isfile(filename):
msg = 'No such file of "%s"' % filename
raise FileNotFoundError(msg)<|fim_middle|>infile_exist<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, attrs):
attrs = super(TransactionSerializer, self).METHOD_NAME(attrs)
if not attrs:
return attrs
if self.instance:
if self.instance.state != Transaction.States.Initial:
message = "The transaction cannot be modified once it is in {}"\
" state.".format(self.instance.state)
raise serializers.ValidationError(message)
return attrs<|fim_middle|>validate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
p = gevent.spawn(util.wrap_errors(AssertionError, gevent.sleep), 2)
gevent.sleep(0) # wait for p to start, because actual order of switching is reversed
switcher = gevent.spawn(p.switch, None)
result = p.get()
assert isinstance(result, AssertionError), result
assert 'Invalid switch' in str(result), repr(str(result))
switcher.kill()<|fim_middle|>test_sleep_invalid_switch<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, create_parameter, aws_client):
prefix = f"/prefix-{short_uid()}"
path = f"{prefix}/path"
value = "value"
param = create_parameter(Name=path, Value=value, Type="String")
aws_client.ssm.label_parameter_version(
Name=path, ParameterVersion=param["Version"], Labels=["latest"]
)
list_of_params = aws_client.ssm.get_parameters_by_path(
Path=prefix, ParameterFilters=[{"Key": "Label", "Values": ["latest"]}]
)
assert len(list_of_params["Parameters"]) == 1
found_param = list_of_params["Parameters"][0]
assert path == found_param["Name"]
assert found_param["ARN"]
assert found_param["Type"] == "String"
assert found_param["Value"] == "value"<|fim_middle|>test_get_parameters_by_path_and_filter<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Sequence[str]:
return pulumi.get(self, "connectors")<|fim_middle|>connectors<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(mock_alias_mult):
"""
Tests return of target as differing list to known alias
"""
with patch(
"salt.modules.aliases.__parse_aliases",
MagicMock(return_value=mock_alias_mult),
):
ret = aliases.has_target("hello", ["world@earth.com", "mars@space.com"])
assert not ret<|fim_middle|>test_has_target_list_mult_differs<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
font, _ = self.objectGenerator("font")
groups = font.groups
groups["public.kern1.X"] = ["A", "B", "C"]
groups["public.kern2.X"] = ["A", "B", "C"]
kerning = font.kerning
kerning.update({
("public.kern1.X", "public.kern2.X"): 100,
("B", "public.kern2.X"): 101,
("public.kern1.X", "B"): 102,
("A", "A"): 103,
})
return kerning<|fim_middle|>get_kerning_generic<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(xray_structure):
asu_mappings = xray_structure.asu_mappings(buffer_thickness=3.5)
pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
scattering_types = xray_structure.scatterers().extract_scattering_types()
pair_asu_table.add_covalent_pairs(
scattering_types, exclude_scattering_types=flex.std_string(("H","D")))
return pair_asu_table.extract_pair_sym_table()<|fim_middle|>get_pair_sym_table<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertEqual(
format_grammar._parse_segments('/* " */', False),
(['/* " */'], {}, []),
)<|fim_middle|>test_block_comment_quote<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Tests setting the language to go (does not exist)"""
lang = "go"
self._setup(lang)
langs = rhnTranslate.cat.getlangs()
if hasattr(sys, "version_info"):
# On python 1.5.2 we don't really get an idea what the language
# is, so it's ok to check for the first component
self.assertFalse(langs[0] == lang, "Language is %s" % langs[0])
else:
self.assertTrue(langs[0] == lang, "Language is %s" % langs[0])<|fim_middle|>test_setlangs_go<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_osx_launchd_plist_parser<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(test_id, random_tests):
pathFile = pathlib.Path(__file__).parent.resolve()
if "src" in os.listdir(pathFile):
mypath = os.path.join(pathFile, "../tests/src")
else:
import opengate.tests
mypath = os.path.join(
pathlib.Path(opengate.tests.__file__).resolve().parent, "../tests/src"
)
print("Look for tests in: " + mypath)
if not check_tests_data_folder():
return False
# Look if torch is installed
torch = True
torch_tests = [
"test034_gan_phsp_linac.py",
"test038_gan_phsp_spect_gan_MT.py",
"test038_gan_phsp_spect_gan_aa.py",
"test038_gan_phsp_spect_gan_se.py",
"test038_gan_phsp_spect_gan_ze.py",
"test040_gan_phsp_pet_gan.py",
"test043_garf.py",
"test045_speedup_all_WIP.py",
"test047_gan_vox_source_cond.py",
]
try:
import torch
except:
torch = False
windowsWrongTests = [
"test014_engine_2.py",
"test060_PhsSource_ParticleName_direct.py",
"test060_PhsSource_ParticleName_fromPHS_PDGCode.py",
"test060_PhsSource_ParticleName_fromPHS_ParticleName.py",
"test060_PhsSource_rotation.py",
"test060_PhsSource_translation.py",
"test043_garf5_region_MT_subproc.py",
"test043_garf2_region_subproc.py.py",
"test061_user_event_info.py",
]
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
files = []
for f in onlyfiles:
if "WIP" in f:
print(f"Ignoring: {f:<40} (Work In Progress) ")
continue
if "visu" in f:
continue
if "OLD" in f:
continue
if "old" in f:
continue
if "test" not in f:
continue
if ".py" not in f:
continue
if ".log" in f:
continue
if "all_tes" in f:
continue
if "_base" in f:
continue
if "test045_speedup" in f:
continue
if "_helpers" in f:
continue
if os.name == "nt" and "_MT" in f:
continue
if os.name == "nt" and f in windowsWrongTests:
continue
if not torch and f in torch_tests:
print(f"Ignoring: {f:<40} (Torch is not available) ")
continue
files.append(f)
files = sorted(files)
if test_id != "all":
test_id = int(test_id)
files_new = []
for f in files:
id = int(f[4:7])
if id >= test_id:
files_new.append(f)
else:
print(f"Ignoring: {f:<40} (< {test_id}) ")
files = files_new
elif random_tests:
files_new = files[-10:]
prob = 0.25
files = files_new + random.sample(files[:-10], int(prob * (len(files) - 10)))
files = sorted(files)
print(f"Running {len(files)} tests")
print(f"-" * 70)
failure = False
for f in files:
start = time.time()
print(f"Running: {f:<46} ", end="")
cmd = "python " + os.path.join(mypath, f"{f}")
log = os.path.join(os.path.dirname(mypath), f"log/{f}.log")
r = os.system(f"{cmd} > {log}")
# subprocess.run(cmd, stdout=f, shell=True, check=True)
if r == 0:
print(colored.stylize(" OK", color_ok), end="")
else:
if r == 2:
# this is probably a Ctrl+C, so we stop
gate.fatal("Stopped by user")
else:
print(colored.stylize(" FAILED !", color_error), end="")
failure = True
os.system("cat " + log)
end = time.time()
print(f" {end - start:5.1f} s {log:<65}")
print(not failure)<|fim_middle|>go<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Set the IGNORE_PERF_VAR variable back to its original value.
if self._original_ignore_perf_value is not None:
os.environ[self.IGNORE_PERF_VAR] = self._original_ignore_perf_value
else:
del os.environ[self.IGNORE_PERF_VAR]
tf.compat.v1.mixed_precision.disable_mixed_precision_graph_rewrite()
super().METHOD_NAME()<|fim_middle|>tear_down<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, memobj, syms=None):
return VdbMemoryCanvas(memobj, syms=syms, parent=self)<|fim_middle|>init_memory_canvas<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
arr = np.array([-128, -128, -0.5, -0.5, -3, -3, 127, 127], dtype=np.int8)
self.assertEqual(arr[0], -128)
self.assertEqual(arr[1], -128)
self.assertEqual(arr[-1], 127)
received = arr.tostring()
self.assertEqual(len(received), len(arr))
self.assertEqual(np.int8(received[0]), -128)
self.assertEqual(np.int8(received[1]), -128)
unpacked = HackRF.bytes_to_iq(received, len(received) // 2)
self.assertEqual(unpacked[0], complex(-1, -1))
self.assertAlmostEqual(unpacked[1], complex(0, 0), places=1)
self.assertAlmostEqual(unpacked[2], complex(0, 0), places=1)
self.assertEqual(unpacked[3], complex(1, 1))
packed = HackRF.iq_to_bytes(unpacked)
self.assertEqual(received, packed)<|fim_middle|>test_hackrf_pack_unpack<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_xsslint encounters an error parsing the xsslint output log
"""
_mock_counts.return_value = {}
with pytest.raises(SystemExit):
call_task('pavelib.quality.run_xsslint')<|fim_middle|>test_xsslint_violation_number_not_found<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Unique identifier for the code signing configuration.
"""
return pulumi.get(self, "config_id")<|fim_middle|>config_id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(type_str, field=""):
try:
type_list = type_str.split(",")
# 安全校验,强制转int
type_list = list(map(int, type_list))
if field:
type_int_list = list(map(str, type_list))
type_int_str = ",".join(type_int_list)
return f" and {field} in ({type_int_str}) "
except Exception:
return ""
else:
return type_list<|fim_middle|>turn_int_list_of_str<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(capfd):
"""Test ``get_jobs`` stdout for results == 0.
When no results are found, a single line is printed.
"""
_mock_ipython(MockIPython(None))
get_jobs(filters={"circuitNumQubits": -1})
out, err = capfd.readouterr()
assert out == "No jobs found matching given criteria\n"
assert len(err) == 0<|fim_middle|>test_get_jobs_no_results<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, lower: int, upper: int, num_vals:int):
lbl = self.fuzzing_label
if lower > -1:
low = lower if lower < lbl.fuzz_maximum + num_vals else lbl.fuzz_maximum - num_vals
for i in range(low, low + num_vals):
lbl.add_decimal_fuzz_value(i)
if upper > -1:
up = upper if upper < lbl.fuzz_maximum + 1 else lbl.fuzz_maximum - 1
for i in range(up - num_vals + 1, up + 1):
lbl.add_decimal_fuzz_value(i)
self.update()<|fim_middle|>add_boundaries<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return (
"%s %s" % (self.first_name, self.last_name)
if self.first_name and self.last_name
else self.username
)<|fim_middle|>get_common_name<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*args):
"""Compute the logic AND between expressions."""
filename, loc = get_src_loc()
loc = ast.Location(filename, loc)
# pylint: disable=redefined-variable-type
expr = ast.ConstantOp(1, UInt(1), loc)
for arg in args:
arg = ast.CastOp(arg, UInt(1), loc)
expr = ast.LogicalAnd(expr, arg, loc)
return expr<|fim_middle|>and<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return {<|fim_middle|>get_finished_subscription_by_task<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
if not self._logger.isEnabledFor(logging.DEBUG):
return
reclaimable_count = sum(
1 for cam in self._previously_open.values() if cam.end_time < INFINITY
)
self._logger.debug(
"existing=%d (reclaimable=%d) / " "found=%d (known=%d new=%d missing=%d)",
len(self._previously_open),
reclaimable_count,
len(self._now_open),
len(self._keepers),
len(self._new),
len(self._missing),
)<|fim_middle|>log_stats<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None, implicit_outputs=None, pool=None, dyndep=None):
outputs = as_list(outputs)
out_outputs = [escape_path(x) for x in outputs]
all_inputs = [escape_path(x) for x in as_list(inputs)]
if implicit:
implicit = [escape_path(x) for x in as_list(implicit)]
all_inputs.append('|')
all_inputs.extend(implicit)
if order_only:
order_only = [escape_path(x) for x in as_list(order_only)]
all_inputs.append('||')
all_inputs.extend(order_only)
if implicit_outputs:
implicit_outputs = [escape_path(x)
for x in as_list(implicit_outputs)]
out_outputs.append('|')
out_outputs.extend(implicit_outputs)
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
if pool is not None:
self._line(' pool = %s' % pool)
if dyndep is not None:
self._line(' dyndep = %s' % dyndep)
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs<|fim_middle|>build<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(unused_argv):
releaseVal = ReleaseValidation(FLAGS.commit_sha)
releaseVal.ExtractTestInfoFromPath(FLAGS.path)
if FLAGS.detail:
releaseVal.PrintAllTests()
else:
releaseVal.PrintSummary()
sys.exit(releaseVal.ValidateRelease())<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
yield self._interface<|fim_middle|>interface<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, **kwargs):
response = self.rp.parse_authentication_response(cherrypy.session,
kwargs["url_fragment"])
html_page = self._load_HTML_page_from_file("htdocs/success_page.html")
# Support for hybrid flow
authz_code = None
try:
authz_code = response["code"]
except KeyError:
pass
access_token = None
try:
access_token = response["access_token"]
userinfo = self.rp.make_userinfo_request(cherrypy.session,
access_token)
except KeyError:
pass
return html_page.format(authz_code, access_token,
response["id_token"], userinfo)<|fim_middle|>repost_fragment<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(ctx: discord.AutocompleteContext):
"""get all files in the indexes folder"""
try:
return [
file
for file in os.listdir(
EnvService.find_shared_file(
f"indexes/{str(ctx.interaction.guild.id)}/"
)
)
if file.startswith(ctx.value.lower())
][
:25
] # returns the 25 first files from your current input
except Exception:
return ["No server indexes found, add an index"]<|fim_middle|>get_server_indexes<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(train_loader, val_loader, device, model):
# loss function, optimizer and scheduler
loss_fcn = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=0.01)
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.5)
# training loop
for epoch in range(350):
model.METHOD_NAME()
total_loss = 0
for batch, (batched_graph, labels) in enumerate(train_loader):
batched_graph = batched_graph.to(device)
labels = labels.to(device)
feat = batched_graph.ndata.pop("attr")
logits = model(batched_graph, feat)
loss = loss_fcn(logits, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
total_loss += loss.item()
scheduler.step()
train_acc = evaluate(train_loader, device, model)
valid_acc = evaluate(val_loader, device, model)
print(
"Epoch {:05d} | Loss {:.4f} | Train Acc. {:.4f} | Validation Acc. {:.4f} ".format(
epoch, total_loss / (batch + 1), train_acc, valid_acc
)
)<|fim_middle|>train<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return len(self.data_)<|fim_middle|>size<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
db_token = Token(self.serial2, tokentype="motp")
db_token.save()
token = MotpTokenClass(db_token)
token.update({"genkey": "1",
"motppin": self.motppin,
"pin": self.otppin})
db_token = Token.query.filter(Token.serial == self.serial2).first()
token = MotpTokenClass(db_token)
# check that the userpin is set
self.assertTrue(token.token.user_pin, token.token.user_pin)
# check that the otp value is set
self.assertTrue(token.token.key_enc, token.token.key_enc)<|fim_middle|>test_03_enroll_genkey<|file_separator|> |