text
stringlengths 67
7.88k
|
---|
<|fim_prefix|>def <|fim_suffix|>(registry=None):
if registry is None:
registry = getUtility(IRegistry)
upload_fields = registry.get('castle.file_upload_fields', None)
if upload_fields is None:
# not updated yet, b/w compatiable
required_upload_fields = registry.get(
'castle.required_file_upload_fields', []) or []
result = [{
'name': 'title',
'label': 'Title',
'widget': 'text',
'required': 'title' in required_upload_fields,
'for-file-types': '*'
}, {
'name': 'description',
'label': 'Summary',
'widget': 'textarea',
'required': 'description' in required_upload_fields,
'for-file-types': '*'
}, {
'name': 'tags',
'label': 'Tags',
'widget': 'tags',
'required': 'tags' in required_upload_fields,
'for-file-types': '*'
}, {
'name': 'youtube_url',
'label': 'Youtube URL',
'widget': 'text',
'required': 'youtube_url' in required_upload_fields,
'for-file-types': 'video'
}]
else:
result = []
for field in upload_fields:
if 'name' not in field:
continue
# need to make copy of data otherwise we're potentially
# modifying the record directly
data = {}
data.update(field)
# make sure all required field are in place
if data.get('required'):
data['required'] = str(data['required']).lower() in ('true', 't', '1')
else:
data['required'] = False
if 'label' not in data:
data[u'label'] = data[u'name'].capitalize()
if 'widget' not in field:
data[u'widget'] = u'text'
if 'for-file-types' not in data:
data[u'for-file-types'] = u'*'
result.append(data)
return result<|fim_middle|>get_upload_fields<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(*args, **kwargs):
pass<|fim_middle|>wrapped<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, peer, buffer_info):
assert isinstance(self.plugin, GlobalPlugin)
assert len(buffer_info) == 1
self._initialize_buffers(peer, buffer_info)
first_view = buffer_info[0]["views"][0]
self.plugin.METHOD_NAME(self.views[first_view])<|fim_middle|>new_buffer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(app_config, agent_config):
BaseWorld.clear_config()
BaseWorld.apply_config('main', app_config)
BaseWorld.apply_config('agents', agent_config)
yield BaseWorld
BaseWorld.clear_config()<|fim_middle|>base_world<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
# Issue#5900
#
# Ensure RUNPATH is added to extension modules with RPATH if
# GNU ld is used
# darwin
sys.platform = 'darwin'
self.assertEqual(self.cc.rpath_foo(), '-L/foo')
# hp-ux
sys.platform = 'hp-ux'
old_gcv = sysconfig.get_config_var
def gcv(v):
return 'xxx'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
def gcv(v):
return 'gcc'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
def gcv(v):
return 'g++'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
sysconfig.get_config_var = old_gcv
# irix646
sys.platform = 'irix646'
self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
# osf1V5
sys.platform = 'osf1V5'
self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
# GCC GNULD
sys.platform = 'bar'
def gcv(v):
if v == 'CC':
return 'gcc'
elif v == 'GNULD':
return 'yes'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
# GCC non-GNULD
sys.platform = 'bar'
def gcv(v):
if v == 'CC':
return 'gcc'
elif v == 'GNULD':
return 'no'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
# GCC GNULD with fully qualified configuration prefix
# see #7617
sys.platform = 'bar'
def gcv(v):
if v == 'CC':
return 'x86_64-pc-linux-gnu-gcc-4.4.2'
elif v == 'GNULD':
return 'yes'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
# non-GCC GNULD
sys.platform = 'bar'
def gcv(v):
if v == 'CC':
return 'cc'
elif v == 'GNULD':
return 'yes'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), '-R/foo')
# non-GCC non-GNULD
sys.platform = 'bar'
def gcv(v):
if v == 'CC':
return 'cc'
elif v == 'GNULD':
return 'no'
sysconfig.get_config_var = gcv
self.assertEqual(self.cc.rpath_foo(), '-R/foo')<|fim_middle|>test_runtime_libdir_option<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
old: Optional[dict], new: Optional[dict]
) -> Tuple[str, str, Union[int, str], str]:
"""
Derive OTU information for a new change document
from the old and new joined OTU documents.
:param old: the old, joined OTU document
:param new: the new, joined OTU document
:return: the parent reference ID and otu ID, name, and abbreviation
"""
try:
otu_id = old["_id"]
except TypeError:
otu_id = new["_id"]
try:
otu_name = old["name"]
except TypeError:
otu_name = new["name"]
try:
otu_version = int(new["version"])
except (TypeError, KeyError):
otu_version = "removed"
try:
ref_id = old["reference"]["id"]
except (TypeError, KeyError):
ref_id = new["reference"]["id"]
return otu_id, otu_name, otu_version, ref_id<|fim_middle|>derive_otu_information<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.nwbfile.add_scratch([1, 2, 3, 4], name='test', description='test data')
assert_array_equal(self.nwbfile.get_scratch('test'), np.array([1, 2, 3, 4]))<|fim_middle|>test_add_scratch_list<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, orm):
# Deleting field 'QueryHistory.server_name'
db.delete_column('beeswax_queryhistory', 'server_name')
# Deleting field 'QueryHistory.server_host'
db.delete_column('beeswax_queryhistory', 'server_host')
# Deleting field 'QueryHistory.server_port'
db.delete_column('beeswax_queryhistory', 'server_port')
# Changing field 'QueryHistory.query'
db.alter_column('beeswax_queryhistory', 'query', self.gf('django.db.models.fields.CharField')(max_length=1024))<|fim_middle|>backwards<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
read = Mock()
ss = TcpipSocketStream(None, read, 10.0, None)
ss.connect()
self.assertTrue(ss.connected)
ss.disconnect()
self.assertFalse(ss.connected)
read.close.assert_called_once()<|fim_middle|>test_closes_the_read_socket<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(stream, filename, framerate=7000, rescale=False, width=None,
**kwargs): # @UnusedVariable
"""
Writes a audio WAV file from given ObsPy Stream object. The seismogram is
squeezed to audible frequencies.
The generated WAV sound file is as a result really short. The data
are written uncompressed as signed 4-byte integers.
.. warning::
This function should NOT be called directly, it registers via the
the :meth:`~obspy.core.stream.Stream.write` method of an
ObsPy :class:`~obspy.core.stream.Stream` object, call this instead.
:type stream: :class:`~obspy.core.stream.Stream`
:param stream: The ObsPy Stream object to write.
:type filename: str
:param filename: Name of the audio WAV file to write.
:type framerate: int, optional
:param framerate: Sample rate of WAV file to use. This this will squeeze
the seismogram (default is 7000).
:type rescale: bool, optional
:param rescale: Maximum to maximal representable number
:type width: int, optional
:param width: dtype to write, 1 for '<u1', 2 for '<i2' or 4 for '<i4'.
tries to autodetect width from data, uses 4 otherwise
"""
i = 0
file_path = Path(filename)
base = file_path.parent / file_path.stem
if width not in WIDTH2DTYPE.keys() and width is not None:
raise TypeError("Unsupported Format Type, word width %dbytes" % width)
for trace in stream:
# try to autodetect width from data, see #791
if width is None:
if trace.data.dtype.str[-2:] in ['u1', 'i2', 'i4']:
tr_width = int(trace.data.dtype.str[-1])
else:
tr_width = 4
else:
tr_width = width
# write WAV file
if len(stream) >= 2:
filename = "%s%03d%s" % (base, i, file_path.suffix)
w = wave.open(filename, 'wb')
try:
trace.stats.npts = len(trace.data)
# (nchannels, sampwidth, framerate, nframes, comptype, compname)
w.setparams((1, tr_width, framerate, trace.stats.npts, 'NONE',
'not compressed'))
data = trace.data
dtype = WIDTH2DTYPE[tr_width]
if rescale:
# optimal scale, account for +/- and the zero
maxint = 2 ** (tr_width * 8 - 1) - 1
# upcast for following rescaling
data = data.astype(np.float64)
data = data / abs(data).max() * maxint
data = np.require(data, dtype=dtype)
w.writeframes(data.tobytes())
finally:
w.close()
i += 1<|fim_middle|>write_wav<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
'''Play the file.'''
self.state = 'play'
self.dispatch('on_play')<|fim_middle|>play<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
staff_api_client, permission_manage_shipping, shipping_method
):
# given
shipping_method.store_value_in_metadata({PUBLIC_KEY: PUBLIC_VALUE})
shipping_method.save(update_fields=["metadata"])
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethodType", shipping_method.pk
)
# when
response = execute_clear_public_metadata_for_item(
staff_api_client,
permission_manage_shipping,
shipping_method_id,
"ShippingMethodType",
)
# then
assert item_without_public_metadata(
response["data"]["deleteMetadata"]["item"],
shipping_method,
shipping_method_id,
)<|fim_middle|>test_delete_public_metadata_for_shipping_method<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(tmpdir, monkeypatch):
'''Tests that no error is raised if the build process worked.
This done by using `true` as build command.
'''
assert GOceanBuild._infrastructure_built is False
monkeypatch.setattr(Compile, "TEST_COMPILE", True)
monkeypatch.setattr(Compile, "TEST_COMPILE_OPENCL", True)
monkeypatch.setattr(GOceanBuild, "_make_command", "true")
GOceanBuild(tmpdir)._build_infrastructure()
assert GOceanBuild._infrastructure_built is True<|fim_middle|>test_make_works<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, __msg): ...<|fim_middle|>adderrorinfo<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, ctx: CommandContext, token: str):
pass<|fim_middle|>process_token<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(grad, l1_ratio=0.5):
"""Compute divergence + id of image gradient + id.
Parameters
----------
grad : ndarray, shape (4, nx, ny, nz, ...)
where `img_shape` is the shape of the brain bounding box, and
n_axes = len(img_shape).
l1_ratio : float in the interval [0, 1]; optional (default .5)
Constant that mixes L1 and spatial prior terms in the penalization.
Returns
-------
res : ndarray, shape (nx, ny, nz, ...)
The computed divergence + id operator.
Raises
------
RuntimeError
"""
if not (0.0 <= l1_ratio <= 1.0):
raise RuntimeError(
f"l1_ratio must be in the interval [0, 1]; got {l1_ratio}"
)
res = np.zeros(grad.shape[1:])
# the divergence part
for d in range(grad.shape[0] - 1):
this_grad = np.rollaxis(grad[d], d)
this_res = np.rollaxis(res, d)
this_res[:-1] += this_grad[:-1]
this_res[1:-1] -= this_grad[:-2]
if len(this_grad) > 1:
this_res[-1] -= this_grad[-2]
res *= 1.0 - l1_ratio
# the identity part
res -= l1_ratio * grad[-1]
return res<|fim_middle|>div_id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(convolution_pytorch):
"""Test a convolution neural network with more layers"""
_run_and_check_model(convolution_pytorch, torch.zeros((1, 1, 28, 28)))<|fim_middle|>test_convolution<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> ISA:
return ISA.NULL<|fim_middle|>get_isa<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
max_idx = -1
for possible_dir in os.listdir("."):
if os.path.isdir(possible_dir):
try:
idx = int(possible_dir.replace(BASE_PROJ_DIR, ""))
max_idx = max(idx, max_idx)
except:
continue
max_idx += 1
return BASE_PROJ_DIR + str(max_idx)<|fim_middle|>get_next_workdir<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
provider_name: str,
tokens: dict[str, str],
) -> None:
"""
Update the access/refresh tokens for a provider in the Airflow Variable store.
This update does not affect the tokens for any other existing providers.
"""
log.info(f"Updating tokens for provider: {provider_name}")
current_tokens = _var_get(OAUTH2_TOKEN_KEY)
current_tokens[provider_name] = {
"access_token": tokens["access_token"],
"refresh_token": tokens["refresh_token"],
}
Variable.set(OAUTH2_TOKEN_KEY, current_tokens, serialize_json=True)<|fim_middle|>update_tokens<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, mapper, table):
util = bulkdata.utils.SqlAlchemyMixin()
util.models = {}
util.metadata = mock.Mock()
util._create_record_type_table("Account_rt_mapping")
assert "Account_rt_mapping" in util.models<|fim_middle|>test_create_record_type_table<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>( self ) :<|fim_middle|>test_current_config_and_context<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
assert_raises_with_msg(
TypeError,
f"'{__name__}.Example' object attribute 'a' is 'tuple', got 'None'.",
Example(a=()).config, a=None
)<|fim_middle|>test_failure_converting_to_tuple<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, name, scheme):
for i in range(1, self.scheme.count()):
n = self.scheme.itemText(i)
if n.lower() > name.lower():
self.scheme.insertItem(i, name, scheme)
break
else:
self.scheme.addItem(name, scheme)<|fim_middle|>insert_scheme_item<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.folder.invokeFactory("Document", id="doc", text=RichTextValue("data"))
self.assertEqual(self.folder.doc.text.raw, "data")
self.assertEqual(self.folder.doc.Format(), "text/html")<|fim_middle|>test_document_create<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, ax):
return self.sps[ax].plots["all", "test"].figure<|fim_middle|>test_mesh_slice_hexaheadral_all_test<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Test that items get removed."""
self.queue.push(self.item_a, PriorityQueue.PRIORITY_LOW)
self.assertFalse(self.item_b in self.queue)
self.queue.push(self.item_b, PriorityQueue.PRIORITY_MEDIUM,
timestamp=make_datetime(10))
self.queue.push(self.item_c, PriorityQueue.PRIORITY_MEDIUM,
timestamp=make_datetime(5))
self.assertTrue(self.item_b in self.queue)
self.queue.remove(self.item_b)
self.assertFalse(self.item_b in self.queue)
self.queue._verify()<|fim_middle|>test_remove<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, name=None, disk_id=None):
data = {}
response = self.array.unmap(name=name, naa=disk_id)
data["unmap_local"] = response
response = self.remote_array.unmap(name=name, naa=disk_id)
data["unmap_remote"] = response
response = self.array.del_disk(name=name, naa=disk_id)
data["del_disk_local"] = response
response = self.remote_array.del_disk(name=name, naa=disk_id)
data["del_disk_remote"] = response
return data<|fim_middle|>delete_disk_hypermetro<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.matcher.builder = "foo"
ret = yield self.matcher.match(("forceschedulers", "sched1"), "force")
self.assertNotMatch(ret)<|fim_middle|>test_forcesched_builder_no_match<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, request):
pass<|fim_middle|>dump_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(session: dict, cookie_session_backend: ClientSideSessionBackend) -> None:
ciphertext = cookie_session_backend.dump_data(session)
assert isinstance(ciphertext, list)
for text in ciphertext:
assert len(text) <= CHUNK_SIZE
plain_text = cookie_session_backend.load_data(ciphertext)
assert plain_text == session<|fim_middle|>test_dump_and_load_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, t: types.UnionType) -> set[str]:
return self._visit(t.items)<|fim_middle|>visit_union_type<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
try:
blackout = Blackout.parse(request.json)
except Exception as e:
raise ApiError(str(e), 400)
if Scope.admin in g.scopes or Scope.admin_blackouts in g.scopes:
blackout.user = blackout.user or g.login
else:
blackout.user = g.login
blackout.customer = assign_customer(wanted=blackout.customer, permission=Scope.admin_blackouts)
try:
blackout = blackout.create()
except Exception as e:
raise ApiError(str(e), 500)
write_audit_trail.send(current_app._get_current_object(), event='blackout-created', message='', user=g.login,
customers=g.customers, scopes=g.scopes, resource_id=blackout.id, type='blackout', request=request)
if blackout:
return jsonify(status='ok', id=blackout.id, blackout=blackout.serialize), 201, {'Location': absolute_url('/blackout/' + blackout.id)}
else:
raise ApiError('insert blackout failed', 500)<|fim_middle|>create_blackout<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request, object, acton_flag, message):
return LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(type(object)).pk,
object_id=object.pk,
object_repr=str(object),
action_flag=acton_flag,
change_message=message
)<|fim_middle|>add_log_entry<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, obj, key, ignore):
''' succeed iff key in obj or key in ignore. '''
if key in ignore: return
if key not in obj:
print("***",key, file=sys.stderr)
self.assertIn(key, obj)<|fim_middle|>assert_haskey<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(rows, only_signed, prefix):
"""Parses the rows given by `git tag`. Removes the objectypes and signatures and returns the
refnames"""
# Removes any line that doesn't start with 'tag' and strip 'tag ' from those lines
rows = [x[4:] for x in rows if x.startswith("tag")]
# Strips "Y " or "N " and removes non-signed if requested
rows = [x[2:] for x in rows if x[0] == "Y" or not only_signed]
if prefix is None:
rows = [x for x in rows if "/" not in x]
return rows<|fim_middle|>parse_tags<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, value: Optional[float]) -> None:
"""When not draining we pass thru to the socket,
since when draining we control the timeout.
"""
if value is not None:
self._recv_timeout_sec = value
if self._drain_thread is None:
socket.socket.METHOD_NAME(self, value)<|fim_middle|>settimeout<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Returns the current assetstore. If none exists, this will raise a 500
exception.
"""
current = self.findOne({'current': True})
if current is None:
raise GirderException(
'No current assetstore is set.',
'girder.model.assetstore.no-current-assetstore')
return current<|fim_middle|>get_current<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> List[T]:
result: List[T] = []
if not self.is_acyclic():
raise CycleError
ready_nodes: Deque[NodeInfo] = deque(
[n for n in self._nodes.values() if n.num_predecessors == 0])
while ready_nodes:
current_node: NodeInfo = ready_nodes.popleft()
for v in current_node.successors:
v.num_predecessors -= 1
if v.num_predecessors == 0:
ready_nodes.append(v)
result.append(current_node.node)
return result<|fim_middle|>sort<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, items):
ITEM_MAP = {
ACCEPT: (_("Accept"), self.on_accept),
ASSESS: (_("Assess"), self.on_assess),
OBSERVE: (_("Observe"), self.on_observe),
FOLLOW: (_("Follow"), self.on_follow),
CHAT: (_("Chat"), self.on_chat),
CHALLENGE: (_("Challenge"), self.on_challenge),
FINGER: (_("Finger"), self.on_finger),
ARCHIVED: (_("Archived"), self.on_archived),
}
self.menu = Gtk.Menu()
for item in items:
if item == SEPARATOR:
menu_item = Gtk.SeparatorMenuItem()
else:
label, callback = ITEM_MAP[item]
menu_item = Gtk.MenuItem(label)
menu_item.connect("activate", callback)
self.menu.append(menu_item)
self.menu.attach_to_widget(self.tv, None)<|fim_middle|>create_local_menu<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(librepcb, helpers):
"""
Test an upgrade which copies from 'data' to 'v0.1'
"""
v01_path = os.path.join(librepcb.workspace_path, 'v0.1')
data_path = os.path.join(librepcb.workspace_path, 'data')
shutil.rmtree(v01_path)
os.remove(os.path.join(data_path, '.librepcb-data')) # Downgrade to v0.1
with librepcb.open() as app:
# Perform upgrade.
wizard = app.widget('initWorkspaceWizard')
src_label = app.widget('initWorkspaceWizardUpgradeSourceLabel')
assert data_path in src_label.properties()['text']
dst_label = app.widget('initWorkspaceWizardUpgradeDestinationLabel')
assert v01_path in dst_label.properties()['text']
app.widget('initWorkspaceWizardFinishButton').click()
# Verify that the v0.1 directory has been created and the data
# directory has been upgraded.
helpers.wait_until_widget_hidden(wizard)
assert os.path.exists(v01_path)
assert os.path.exists(os.path.join(data_path, '.librepcb-data'))
# Verify that the control panel is now opened.
app.widget('controlPanel').properties()['visible'] is True
# Open LibrePCB again to see if the workspace is automatically opened.
with librepcb.open() as app:
app.widget('controlPanel').properties()['visible'] is True<|fim_middle|>test_upgrade_data<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
run_fdtd_2d(dace.dtypes.DeviceType.GPU)<|fim_middle|>test_gpu<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> Optional[str]:
"""
Format of the policyContent.
"""
return pulumi.get(self, "format")<|fim_middle|>format<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(name):
for row in data["regiontypes"]:
if row["description"] == name:
return row["regiontypeid"]
raise Exception()<|fim_middle|>get_tier_ids<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(packet, dataId, prim, appId, data, crc):
# if dataId != 0xa1:
# return
print("packet: %s (%4d)" % (dump(packet), lineNumber), end=' ')
cells = (data & 0xF0) >> 4
battnumber = data & 0xF
voltage1 = ((data & 0x000FFF00) >> 8) // 5
voltage2 = ((data & 0xFFF00000) >> 20) // 5
print(" FLVSS: no cells: %d, cell: %d: voltages: %0.2f %0.2f" % (cells, battnumber, voltage1 / 100., voltage2 / 100.))<|fim_middle|>parse_fl_vss<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(attrs, args):
"""Get the parameters necessary to construct a compute TE
from a ethosu_conv2d Relay call."""
ifm = args[0]
weight = args[1]
scale_bias = args[2]
lut = args[3]
ifm_scale = attrs.ifm_scale
ifm_zero_point = attrs.ifm_zero_point
weight_zero_point = attrs.weight_zero_point
ofm_scale = attrs.ofm_scale
ofm_zero_point = attrs.ofm_zero_point
strides = attrs.strides
padding = attrs.padding
dilation = attrs.dilation
activation = attrs.activation
clip_min = attrs.clip_min
clip_max = attrs.clip_max
rounding_mode = attrs.rounding_mode
upscale = attrs.upscale
ifm_layout = attrs.ifm_layout
ofm_layout = attrs.ofm_layout
return (
ifm,
weight,
scale_bias,
lut,
ifm_scale,
ifm_zero_point,
weight_zero_point,
ofm_scale,
ofm_zero_point,
strides,
padding,
dilation,
activation,
clip_min,
clip_max,
rounding_mode,
upscale,
ifm_layout,
ofm_layout,
)<|fim_middle|>extract_ethosu_conv2d_params<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.do_test_connect_after_remove('hard')<|fim_middle|>test_connect_after_remove_force<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
connection_config: ConnectionConfig = Depends(_get_connection_config),
*,
db: Session = Depends(deps.get_db),<|fim_middle|>delete_access_manual_webhook<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, existingTransaction):
"""
_commitTransaction_
Commit a database transaction that was begun by self.beginTransaction().
"""
if not existingTransaction:
myThread = threading.currentThread()
myThread.transaction.commit()
return<|fim_middle|>commit_transaction<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
XG3 = nx.Graph()
edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), (5, 0, 10)]
XG3.add_weighted_edges_from(edges)
assert nx.astar_path(XG3, 0, 3) == [0, 1, 2, 3]
assert nx.astar_path_length(XG3, 0, 3) == 15<|fim_middle|>test_astar_undirected2<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(filename):
"""Load a yaml file into a dictionary."""
with open(filename, mode="r", encoding="utf8") as fh:
return yaml.safe_load(fh)<|fim_middle|>load_yaml<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")<|fim_middle|>provisioning_state<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
x,
y,
orientation="h",
color="red",
width=1.0,
fill_color="red",
marker="square",
marker_size=2,
value_mapper_class=LinearMapper,
padding=30,
):
assert len(x) == len(y)
# If you know it is monotonically increasing, sort_order can
# be set to 'ascending'
index = ArrayDataSource(x, sort_order="none")
value = ArrayDataSource(y, sort_order="none")
index_range = DataRange1D(tight_bounds=False)
index_range.add(index)
index_mapper = LinearMapper(range=index_range)
value_range = DataRange1D(tight_bounds=False)
value_range.add(value)
value_mapper = value_mapper_class(range=value_range)
plot = ScatterPlot(
index=index,
value=value,
index_mapper=index_mapper,
value_mapper=value_mapper,
orientation=orientation,
color=color,
fill_color=fill_color,
marker=marker,
marker_size=marker_size,
padding=[40, 15, 15, 20], # left, right, top, bottom
border_visible=True,
border_width=1,
bgcolor="white",
use_backbuffer=True,
backbuffer_padding=False,
unified_draw=True,
draw_layer="plot",
overlay_border=True,
)
vertical_grid = PlotGrid(
component=plot,
mapper=index_mapper,
orientation="vertical",
line_color="gray",
line_style="dot",
)
horizontal_grid = PlotGrid(
component=plot,
mapper=value_mapper,
orientation="horizontal",
line_color="gray",
line_style="dot",
)
vertical_axis = PlotAxis(orientation="left", mapper=plot.value_mapper)
horizontal_axis = PlotAxis(
orientation="bottom",
title="Time (s)",
mapper=plot.index_mapper,
)
plot.underlays.append(vertical_grid)
plot.underlays.append(horizontal_grid)
# Have to add axes to overlays because we are backbuffering the main plot,
# and only overlays get to render in addition to the backbuffer.
plot.overlays.append(vertical_axis)
plot.overlays.append(horizontal_axis)
return plot<|fim_middle|>create_gridded_scatter_plot<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
# Test saving a 16384 x 16384 image
OpenDatabase(silo_data_path("curv2d.silo"))
AddPlot("Pseudocolor", "d")
DrawPlots()
view2=GetView2D()
view2.fullFrameActivationMode=view2.Off
SetView2D(view2)
swa=SaveWindowAttributes()
# ensure the image is written to the run_dir by providing full path
imgOutName=pjoin(TestEnv.params["run_dir"], "image_16384x16384.png")
swa.width = 16384
swa.height = 16384
swa.fileName = imgOutName
swa.family = 0
SetSaveWindowAttributes(swa)
SaveWindow()
# Comparing md5 sum instead of image, since the image is large.
md5_hash = hashlib.md5()
with open(imgOutName, "rb") as f:
# Read and update hash in chunks of 4K
for byte_block in iter(lambda: f.read(4096),b""):
md5_hash.update(byte_block)
TestValueEQ("md5 hash for 16384x16384 image", md5_hash.hexdigest(), "9196b516c25ecbeac1fab4cd54ee0c59")<|fim_middle|>test_large_image<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(apiobj, table_factory):
table_factory('word',
definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB')
assert apiobj.api.search_category([], near_query='Berlin') == []<|fim_middle|>test_category_no_categories<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
class FakeResponse(DictObject):
def json(self):
return self._json
response = FakeResponse(
status_code=501,
headers={"content-type": "application/json;charset=utf-8"},
reason="SchlumpError",
text='{"reason":"it schlumped"}',
_json={"reason": "it schlumped"},
request=DictObject(
url="http://foo.com/bar/bat",
headers={"xyz": "pdq"},
method="PUT",
body="body",
),
)
pytest.raises(SynapseHTTPError, _raise_for_status, response, verbose=False)<|fim_middle|>test_raise_for_status<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
self, resource_group_name: str, elastic_san_name: str, **kwargs: Any
) -> _models.PrivateLinkResourceListResult:
"""Gets the private link resources that need to be created for a elastic San.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param elastic_san_name: The name of the ElasticSan. Required.
:type elastic_san_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourceListResult or the result of cls(response)
:rtype: ~azure.mgmt.elasticsan.models.PrivateLinkResourceListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None)
request = build_list_by_elastic_san_request(
resource_group_name=resource_group_name,
elastic_san_name=elastic_san_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized<|fim_middle|>list_by_elastic_san<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, factory, version, v3):
factory(UID, NAME, IMAGE, {"version": version}, TEAMS, TAGS, DEPLOYMENT_ID, NAMESPACE, None, None)
v3.assert_called_with(UID, NAME, IMAGE, TEAMS, TAGS, ANY, DEPLOYMENT_ID, NAMESPACE, None, None)<|fim_middle|>test_parsed_by_current_version<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(hrp, data):
"""Compute a Bech32 string given HRP and data values."""
combined = data + bech32_create_checksum(hrp, data)
return hrp + "1" + "".join([CHARSET[d] for d in combined])<|fim_middle|>bech32_encode<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(rspecifiers, in_filetype, percent):
assert percent <= 1.0
if not isinstance(rspecifiers, list):
rspecifiers = [rspecifiers]
feat = np.concatenate(
[
load_feature_shard(rspecifier, in_filetype, percent)
for rspecifier in rspecifiers
],
axis=0,
)
logging.info(f"loaded feature with dimension {feat.shape}")
return feat<|fim_middle|>load_feature<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
v: float,
divisor: int,
min_value: Optional[int] = None,
) -> int:
"""Find the smallest integer larger than v and divisible by divisor."""
if not min_value:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v<|fim_middle|>make_divisible<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(client: Client, prev_hash):
inp1 = messages.TxInputType(
address_n=tools.parse_path("m/44h/0h/0h/0/0"),
amount=123_456_789,
prev_hash=prev_hash,
prev_index=0,
script_type=messages.InputScriptType.SPENDP2SHWITNESS,
)
out1 = messages.TxOutputType(
address="mhRx1CeVfaayqRwq5zgRQmD7W5aWBfD5mC",
amount=12_300_000,
script_type=messages.OutputScriptType.PAYTOADDRESS,
)
with pytest.raises(TrezorFailure) as e:
btc.sign_tx(client, "Testnet", [inp1], [out1], prev_txes={})
_check_error_message(prev_hash, client.features.model, e.value.message)<|fim_middle|>test_invalid_prev_hash<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(next_link=None):
if not next_link:
request = build_get_request(
scope=scope,
filter=filter,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request<|fim_middle|>prepare_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, path_string : StrictStr, path_integer : StrictInt, **kwargs) -> ApiResponse: # noqa: E501
"""Test path parameter(s) # noqa: E501
Test path parameter(s) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tests_path_string_path_string_integer_path_integer_with_http_info(path_string, path_integer, async_req=True)
>>> result = thread.get()
:param path_string: (required)
:type path_string: str
:param path_integer: (required)
:type path_integer: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the ApiResponse.data will
be set to none and raw_data will store the
HTTP response body without reading/decoding.
Default is True.
:type _preload_content: bool, optional
:param _return_http_data_only: response data instead of ApiResponse
object with status code, headers, etc
:type _return_http_data_only: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:type _content_type: string, optional: force content-type for the request
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(str, status_code(int), headers(HTTPHeaderDict))
"""
_params = locals()
_all_params = [
'path_string',
'path_integer'
]
_all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'_content_type',
'_headers'
]
)
# validate the arguments
for _key, _val in _params['kwargs'].items():
if _key not in _all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method tests_path_string_path_string_integer_path_integer" % _key
)
_params[_key] = _val
del _params['kwargs']
_collection_formats = {}
# process the path parameters
_path_params = {}
if _params['path_string']:
_path_params['path_string'] = _params['path_string']
if _params['path_integer']:
_path_params['path_integer'] = _params['path_integer']
# process the query parameters
_query_params = []
# process the header parameters
_header_params = dict(_params.get('_headers', {}))
# process the form parameters
_form_params = []
_files = {}
# process the body parameter
_body_params = None
# set the HTTP header `Accept`
_header_params['Accept'] = self.api_client.select_header_accept(
['text/plain']) # noqa: E501
# authentication setting
_auth_settings = [] # noqa: E501
_response_types_map = {
'200': "str",
}
return self.api_client.call_api(
'/path/string/{path_string}/integer/{path_integer}', 'GET',
_path_params,
_query_params,
_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
response_types_map=_response_types_map,
auth_settings=_auth_settings,
async_req=_params.get('async_req'),
_return_http_data_only=_params.get('_return_http_data_only'), # noqa: E501
_preload_content=_params.get('_preload_content', True),
_request_timeout=_params.get('_request_timeout'),
collection_formats=_collection_formats,
_request_auth=_params.get('_request_auth'))<|fim_middle|>tests_path_string_path_string_integer_path<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> None:
self.proto.lineReceived(b"echo test > test8; echo test >> test8; cat test8")
self.assertEqual(self.tr.value(), b"test\ntest\n" + PROMPT)<|fim_middle|>test_echo_command_008<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, s1, s2):
# Estimates the energy between s1 and s2 based on 2D projection
return 0.<|fim_middle|>estimate_energy_barrier<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
'''
Set status to 'off'
Input:
None
Output:
None
'''
self.set_status(False)<|fim_middle|>off<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(task: et.Element, ds: datastore) -> Optional[et.Element]:
"""Convert old task XML into the new format."""
tid = task.attrib['id']
real_task = ds.task_factory(tid)
if task is None:
return
# Get the old task properties
# TIDs were stored as UUID, but sometimes they were not present
tid = task.get('uuid') or real_task.get_uuid() or tid_cache[tid]
status = task.get('status')
title = task.find('title').text
content = task.find('content')
try:
done_date = task.find('donedate').text
except AttributeError:
done_date = None
try:
due_date = task.find('duedate').text
except AttributeError:
due_date = None
try:
modified = task.find('modified').text
except AttributeError:
modified = None
try:
added = task.find('added').text
except AttributeError:
added = None
try:
start = task.find('startdate').text
except AttributeError:
start = None
# Build the new task
new_task = et.Element('task')
new_task.set('status', status)
new_task.set('id', tid)
new_title = et.SubElement(new_task, 'title')
new_title.text = title
tags = et.SubElement(new_task, 'tags')
for tag_name in task.get('tags').split(','):
if tag_name:
tag_id = tags_cache[tag_name]
task_tag = et.SubElement(tags, 'tag')
task_tag.text = tag_id
dates = et.SubElement(new_task, 'dates')
new_added = et.SubElement(dates, 'added')
new_modified = et.SubElement(dates, 'modified')
if added:
added = str(Date(added))
else:
added = date.today().isoformat()
new_added.text = added
if modified:
modified = modified[:10]
modified = str(Date(modified))
else:
modified = date.today().isoformat()
new_modified.text = modified
if done_date:
new_done = et.SubElement(dates, 'done')
new_done.text = str(Date(done_date))
if start:
start = Date(start)
if start.is_fuzzy():
new_start = et.SubElement(dates, 'fuzzyStart')
else:
new_start = et.SubElement(dates, 'start')
new_start.text = str(start)
if due_date:
due_date = Date(due_date)
if due_date.is_fuzzy():
new_due = et.SubElement(dates, 'fuzzyDue')
else:
new_due = et.SubElement(dates, 'due')
new_due.text = str(due_date)
recurring = et.SubElement(new_task, 'recurring')
recurring.set('enabled', 'false')
subtasks = et.SubElement(new_task, 'subtasks')
for sub in task.findall('subtask'):
new_sub = et.SubElement(subtasks, 'sub')
new_sub.text = tid_cache[sub.text]
new_content = et.SubElement(new_task, 'content')
if content is not None:
new_content.text = et.CDATA(convert_content(content.text))
else:
new_content.text = et.CDATA('')
return new_task<|fim_middle|>convert_task<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_instance', identity)(values))<|fim_middle|>initial_validation<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>( # type: ignore
scenario: Scenario,
*,
n_configs: int | None = None,
n_configs_per_hyperparamter: int = 10,
max_ratio: float = 0.25,
additional_configs: list[Configuration] = [],
) -> SobolInitialDesign:
"""Returns a Sobol design instance.
Parameters
----------
scenario : Scenario
n_configs : int | None, defaults to None
Number of initial configurations (disables the arguments ``n_configs_per_hyperparameter``).
n_configs_per_hyperparameter: int, defaults to 10
Number of initial configurations per hyperparameter. For example, if my configuration space covers five
hyperparameters and ``n_configs_per_hyperparameter`` is set to 10, then 50 initial configurations will be
samples.
max_ratio: float, defaults to 0.25
Use at most ``scenario.n_trials`` * ``max_ratio`` number of configurations in the initial design.
Additional configurations are not affected by this parameter.
additional_configs: list[Configuration], defaults to []
Adds additional configurations to the initial design.
"""
return SobolInitialDesign(
scenario=scenario,
n_configs=n_configs,
n_configs_per_hyperparameter=n_configs_per_hyperparamter,
max_ratio=max_ratio,
additional_configs=additional_configs,
)<|fim_middle|>get_initial_design<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, args):
if hasattr(args, "enable_attack") and args.enable_attack:
logging.info("------init attack..." + args.attack_type.strip())
self.is_enabled = True
self.attack_type = args.attack_type.strip()
self.attacker = None
if self.attack_type == ATTACK_METHOD_BYZANTINE_ATTACK:
self.attacker = ByzantineAttack(args)
elif self.attack_type == ATTACK_LABEL_FLIPPING:
self.attacker = LabelFlippingAttack(args)
elif self.attack_type == BACKDOOR_ATTACK_MODEL_REPLACEMENT:
self.attacker = ModelReplacementBackdoorAttack(args)
elif self.attack_type == ATTACK_METHOD_DLG:
self.attacker = DLGAttack(args=args)
else:
self.is_enabled = False
if self.is_enabled:
if hasattr(args, MLEngineBackend.ml_engine_args_flag) and args.ml_engine in [
MLEngineBackend.ml_engine_backend_tf,
MLEngineBackend.ml_engine_backend_jax,
MLEngineBackend.ml_engine_backend_mxnet,
]:
logging.info(
"FedMLAttacker is not supported for the machine learning engine: %s. "
"We will support more engines in the future iteration."
% args.ml_engine
)
self.is_enabled = False<|fim_middle|>init<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
y = np.array([1.0, 2])
x = np.array([0.0, 0.0])
theta_E = 1.0
q = 0.9999
s = 0.00001
phi_G = 0
e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)
values = self.nie.function(x, y, theta_E, e1, e2, s_scale=s)
delta_pot = values[1] - values[0]
values_spemd = self.sis.function(x, y, theta_E)
delta_pot_spemd = values_spemd[1] - values_spemd[0]
npt.assert_almost_equal(delta_pot, delta_pot_spemd, decimal=4)
if bool_test is True:
q = 0.99
s = 0.000001
phi_G = 0
e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)
values = self.nie.function(x, y, theta_E, e1, e2, s_scale=s)
delta_pot = values[1] - values[0]
gamma = 2.0
values_spemd = self.spemd.function(x, y, theta_E, gamma, e1, e2, s_scale=s)
delta_pot_spemd = values_spemd[1] - values_spemd[0]
npt.assert_almost_equal(delta_pot, delta_pot_spemd, decimal=2)<|fim_middle|>test_function<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
cls, resource: AbstractExternalResource
) -> AbstractExternalResource:
resource = resource.update()
return resource<|fim_middle|>do_resource_update<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
inference_request: InferenceRequest,
expected: Any,
):
decoded = decode_inference_request(inference_request)
if isinstance(expected, pd.DataFrame):
pd.testing.assert_frame_equal(decoded, expected)
else:
assert decoded == expected<|fim_middle|>test_decode_inference_request<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, max_wait=30):
r = 0
prev_s = None
while True:
s = self.get_smf_status("network/routing-setup")
if s == "online":
break
if s != prev_s or prev_s is None:
self.log.info("waiting for network/routing-setup online state. current state: %s" % s)
prev_s = s
r += 1
if r > max_wait:
self.log.error("timeout waiting for network/routing-setup online state")
break
time.sleep(1)<|fim_middle|>wait_net_smf<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(from_node: str, to_node: str) -> Json:
return {"type": "edge", "from": from_node, "to": to_node, "edge_type": "delete"}<|fim_middle|>edge<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, account_key, first_subaddress_index=0, num_subaddresses=1):
return self.request(f"monitors", {
"account_key": account_key,
"first_subaddress": first_subaddress_index,
"num_subaddresses": num_subaddresses,
})<|fim_middle|>create_monitor<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(network):
r"""
Find any throats that point to a non-existent pore
"""
hits = np.any(network.conns > (network.Np - 1), axis=1)
return hits<|fim_middle|>headless_throats<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(op_upd, t, k, lst):
if lst:
if type(lst[next(iter(lst))]) == dict:
for i in lst:
METHOD_NAME(op_upd, t, k+(i,), lst[i])
return
if op_upd:
do_update(t, k, lst)
else:
do_delete(t, k, lst)<|fim_middle|>do_operate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(argv, stdout, environ):
progname = argv[0]
optlist, _ = getopt.getopt(argv[1:], "", ["help", "test",])
testflag = 0
for (field, _) in optlist:
if field == "--help":
usage(progname)
return
elif field == "--test":
testflag = 1
if testflag:
test()
return
loop()<|fim_middle|>main<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(ev):
Sequencer.read_sequence()<|fim_middle|>change_bpm<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return_code, result = get_result_and_return_code(['ipintutil', '-a', 'ipv6', '-d', 'all'])
assert return_code == 0
verify_output(result, show_multi_asic_ipv6_intf_all)<|fim_middle|>test_show_ip_intf_v6_all<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(fn):
cls._known_urns[urn] = parameter_type, fn
return fn<|fim_middle|>register<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
global c
global link
c.compiler = c.GccCompiler("/Projects/avr-tools/bin/avr-")
c.flags += ["-mmcu=atmega256rfr2", "-ffunction-sections", "-fdata-sections"]
link.linker = link.GccLinker("/Projects/avr-tools/bin/avr-")
link.flags += ["-mmcu=atmega256rfr2", "-mrelax", "-Wl,--gc-sections"]
link.strip = True<|fim_middle|>setup_avr<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, *args, **kwargs):
return<|fim_middle|>save_session<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, entry):
self.pattern = entry.get_text()
config.set('plugins', self.c_pattern, self.pattern)<|fim_middle|>pattern_changed<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
block_power = 10
block_size = 2 ** block_power
num_info_bits = 2 ** (block_power - 1)
num_frozen_bits = block_size - num_info_bits
frozen_bit_positions = cc.frozen_bit_positions(
block_size, num_info_bits, 0.0)
frozen_bit_values = np.array([0] * num_frozen_bits,)
bits, gr_data = self.generate_test_data(
block_size, num_info_bits, frozen_bit_positions, frozen_bit_values, 1, True)
polar_decoder = fec.polar_decoder_sc.make(
block_size, num_info_bits, frozen_bit_positions, frozen_bit_values)
src = blocks.vector_source_f(gr_data, False)
dec_block = extended_decoder(polar_decoder, None)
snk = blocks.vector_sink_b(1)
self.tb.connect(src, dec_block)
self.tb.connect(dec_block, snk)
self.tb.run()
res = np.array(snk.data()).astype(dtype=int)
self.assertTupleEqual(tuple(res), tuple(bits))<|fim_middle|>test_002_one_vector<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(text):
sys.exit(os.path.basename(sys.argv[0]) + " ERROR: " + text)<|fim_middle|>error<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
sc = SomeClass()
for model in models:
print("update-caffe2-models.py: generating", model)
caffe2_model_dir = sc._caffe2_model_dir(model)
onnx_model_dir, onnx_models_dir = sc._onnx_model_dir(model)
subprocess.check_call(["echo", model])
with open(os.path.join(caffe2_model_dir, "value_info.json"), "r") as f:
value_info = f.read()
subprocess.check_call(
[
"convert-caffe2-to-onnx",
"--caffe2-net-name",
model,
"--caffe2-init-net",
os.path.join(caffe2_model_dir, "init_net.pb"),
"--value-info",
value_info,
"-o",
os.path.join(onnx_model_dir, "model.pb"),
os.path.join(caffe2_model_dir, "predict_net.pb"),
]
)
subprocess.check_call(
["tar", "-czf", model + ".tar.gz", model], cwd=onnx_models_dir
)<|fim_middle|>generate_models<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, folder, f):
file = os.path.join(
os.path.dirname(__file__), '..', 'data', folder, f)
serializer = XmlSerializer()
with open(file) as fp:
xml = etree.parse(fp).getroot()
self.wf_spec = WorkflowSpec.deserialize(
serializer, xml, filename=file)
self.workflow = Workflow(self.wf_spec)<|fim_middle|>load_workflow_spec<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self, angle: float = 45, center: tuple[float, float] = (0, 0)<|fim_middle|>rotate<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
graph = graph_0()
assert set(graph.children_of('a')) == {'d', 'e'}
assert set(graph.children_of('b')) == {'d', 'e'}
assert set(graph.children_of('c')) == {'f'}
assert set(graph.children_of('d')) == {'f'}
assert set(graph.children_of('e')) == {'g'}
assert set(graph.children_of('f')) == set()
assert set(graph.children_of('g')) == set()<|fim_middle|>test_children_of<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
db, inflexible_devices, setup_sources
) -> dict[Sensor, list[int | float]]:
"""
Set up inflexible devices and forecasts.
"""
# 2 days of test data
time_slots = initialize_index(
start=pd.Timestamp("2015-01-01").tz_localize("Europe/Amsterdam"),
end=pd.Timestamp("2015-01-03").tz_localize("Europe/Amsterdam"),
resolution="15T",
)
# PV (8 hours at zero capacity, 8 hours at 90% capacity, and again 8 hours at zero capacity)
headroom = 0.1 # 90% of nominal capacity
pv_sensor = inflexible_devices["PV power sensor"]
capacity = pv_sensor.get_attribute("capacity_in_mw")
pv_values = (
[0] * (8 * 4) + [(1 - headroom) * capacity] * (8 * 4) + [0] * (8 * 4)
) * (len(time_slots) // (24 * 4))
add_as_beliefs(db, pv_sensor, pv_values, time_slots, setup_sources["Seita"])
# Residual demand (1 MW continuously)
residual_demand_sensor = inflexible_devices["residual demand power sensor"]
residual_demand_values = [-1] * len(time_slots)
add_as_beliefs(
db,
residual_demand_sensor,
residual_demand_values,
time_slots,
setup_sources["Seita"],
)
return {
pv_sensor: pv_values,
residual_demand_sensor: residual_demand_values,
}<|fim_middle|>add_inflexible_device_forecasts<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(dirpath):
"""
Creates ``verta_config.yaml`` containing an empty dictionary in `dirpath`.
Parameters
----------
dirpath : str
Path to the directory that will contain the config file.
Returns
-------
config_filepath : str
Absolute path to the newly-created config file
"""
config_filepath = os.path.join(dirpath, CONFIG_YAML_FILENAME)
config_filepath = os.path.expanduser(config_filepath)
config_filepath = os.path.abspath(config_filepath)
with open(config_filepath, "w") as f:
yaml.dump({}, f)
return config_filepath<|fim_middle|>create_empty_config_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(func):
@wraps(func)
def inner(*args, **kwargs):
if clear_output:
self.clear_output(*clear_args, **clear_kwargs)
with self:
return func(*args, **kwargs)
return inner<|fim_middle|>capture_decorator<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, form):
if form not in self.entries:
return False
return True<|fim_middle|>form_exists<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(request):
userid = d.get_int(request.params.get('userid', ''))
if request.userid != userid and userid in staff.ADMINS and request.userid not in staff.TECHNICAL:
raise WeasylError('InsufficientPermissions')
profile.do_manage(request.userid, userid,
username=request.params.get('username', '').strip() if 'ch_username' in request.params else None,
full_name=request.params.get('full_name', '').strip() if 'ch_full_name' in request.params else None,
catchphrase=request.params.get('catchphrase', '').strip() if 'ch_catchphrase' in request.params else None,
birthday=request.params.get('birthday', '') if 'ch_birthday' in request.params else None,
gender=request.params.get('gender', '') if 'ch_gender' in request.params else None,
country=request.params.get('country', '') if 'ch_country' in request.params else None,
remove_social=request.params.getall('remove_social'),
permission_tag='permission-tag' in request.params)
raise HTTPSeeOther(location="/admincontrol")<|fim_middle|>admincontrol_manageuser_post<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, adapter, nic0):
CUSTOM_OPTS1 = {
'bridge_opts': 'multicast_router=0 multicast_snooping=0'
}
CUSTOM_OPTS2 = {
'bridge_opts': 'multicast_router=0 multicast_snooping=1'
}
NETCREATE = {
NETWORK_NAME: {
'nic': nic0,
'switch': 'legacy',
'custom': CUSTOM_OPTS1,
}
}
NETEDIT = {
NETWORK_NAME: {
'nic': nic0,
'switch': 'legacy',
'custom': CUSTOM_OPTS2,
}
}
with adapter.reset_persistent_config():
with adapter.setupNetworks(NETCREATE, {}, NOCHK):
adapter.setSafeNetworkConfig()
adapter.setupNetworks(NETEDIT, {}, NOCHK)
adapter.assertBridgeOpts(NETWORK_NAME, NETEDIT[NETWORK_NAME])
adapter.restore_nets()
adapter.assertBridgeOpts(NETWORK_NAME, NETCREATE[NETWORK_NAME])<|fim_middle|>test_restore_bridge_with_custom_opts<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""
Function to handle the ok button being clicked. Closes this
window.
:return: True when the window has closed.
"""
self.close()<|fim_middle|>ok_button_clicked<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, data):
if isinstance(data, ProductListItem):
return data
queryset = self.get_queryset()
try:
return queryset.get(product__name=data)
except ObjectDoesNotExist:
self.fail(
"does_not_exist", slug_name=self.slug_field, value=smart_str(data)
)
except (TypeError, ValueError):
self.fail("invalid")
return None<|fim_middle|>to_internal_value<|file_separator|> |