code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def getValidityConstr(self): <NEW_LINE> <INDENT> for period in self._sections.keys(): <NEW_LINE> <INDENT> section = self._sections[period] <NEW_LINE> hasClass = 0 <NEW_LINE> if section != None and section.courseType != CourseType.OFF: <NEW_LINE> <INDENT> hasClass = 1 <NEW_LINE> <DEDENT> expr = [(var, 1) for var in self._lpVars[period]] <NEW_LINE> yield (LpAffineExpression(expr) <= hasClass)
Yields expressions of if periods have 0 or 1 class.
625941a26aa9bd52df03693e
def _get_axis_labels(self, in_dataset): <NEW_LINE> <INDENT> labels = in_dataset.data_info.get('axis_labels')[0] <NEW_LINE> volX, volY, volZ = self._get_volume_dimensions() <NEW_LINE> labels = [str(volX) + '.voxel_x.voxels', str(volZ) + '.voxel_z.voxels'] <NEW_LINE> if volY: <NEW_LINE> <INDENT> labels.append(str(volY) + '.voxel_y.voxels') <NEW_LINE> <DEDENT> labels = {in_dataset: labels} <NEW_LINE> return labels
Get the new axis labels for the output dataset - this is now a volume. Parameters ---------- in_dataset : :class:`savu.data.data_structures.data.Data` The input dataset to the plugin. Returns ------- labels : dict The axis labels for the dataset that is output from the plugin.
625941a26aa9bd52df036940
def bonferroni_correction(pvals): <NEW_LINE> <INDENT> n = len(pvals) <NEW_LINE> return [min(x * n , 1.0) for x in pvals]
Bonferroni correction. Reference: http://en.wikipedia.org/wiki/Bonferroni_correction
625941a26aa9bd52df036944
def main(self): <NEW_LINE> <INDENT> sys.path.append(os.curdir) <NEW_LINE> sys.path.append(os.path.join(os.curdir, "applications")) <NEW_LINE> sys.path.append(os.path.join(os.curdir, "plugins")) <NEW_LINE> sys.path.append(os.path.join(os.curdir, "framework")) <NEW_LINE> if not self.settings.consolelog: <NEW_LINE> <INDENT> if self.settings.production: <NEW_LINE> <INDENT> cgitb.enable( display=False ,logdir=os.path.join(os.curdir, 'logs') ,format='text') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cgitb.enable() <NEW_LINE> <DEDENT> <DEDENT> from framework import router <NEW_LINE> try: <NEW_LINE> <INDENT> from applications import handlers <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> handlers = None <NEW_LINE> <DEDENT> router.Router(handlers, self.settings).route()
Set some things to work the framework. Then launch the application
625941a36aa9bd52df036948
def unlisten_multicast_group(self, multicast_address, source = None, network_interface = None, handler = None): <NEW_LINE> <INDENT> def converter(server): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if network_interface is not None and source is not None: <NEW_LINE> <INDENT> self.java_obj.unlistenMulticastGroup(multicast_address, network_interface, source, AsyncHandler(handler, converter)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.java_obj.unlistenMulticastGroup(multicast_address, AsyncHandler(handler, converter)) <NEW_LINE> <DEDENT> return self
Leaves a multicast group and so stop listen for packets send to it on the given network interface. The handler is notified once the operation completes. @param multicast_address: the address of the multicast group to leave @param source: the address of the source for which we will stop listen for mulicast packets @param network_interface: the network interface on which to stop listen for packets. @param handler: then handler to notify once the operation completes @return self: returns itself for method-chaining
625941a36aa9bd52df036952
def __handle_response(self, msg, handler): <NEW_LINE> <INDENT> packets = msg.packets <NEW_LINE> if packets[0] == flags.response: <NEW_LINE> <INDENT> self.__print__( "Response received for request id %s" % packets[1], level=1) <NEW_LINE> if self.requests.get(packets[1]): <NEW_LINE> <INDENT> addr = packets[2] <NEW_LINE> if addr: <NEW_LINE> <INDENT> _msg = cast(Tuple[MsgPackable, ...], self.requests.get(packets[1])) <NEW_LINE> self.requests.pop(packets[1]) <NEW_LINE> self.connect(addr[0][0], addr[0][1], addr[1]) <NEW_LINE> self.routing_table[addr[1]].send(*_msg) <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> return None
This callback is used to deal with response signals. Its two primary jobs are: - if it was your request, send the deferred message - if it was someone else's request, relay the information Args: msg: A :py:class:`~py2p.base.Message` handler: A :py:class:`~py2p.mesh.MeshConnection` Returns: Either ``True`` or ``None``
625941a36aa9bd52df036959
def get_darksky_data(self, service, location, timestamp=None): <NEW_LINE> <INDENT> service_json_name = '' <NEW_LINE> if service in SERVICES_MAPPING: <NEW_LINE> <INDENT> service_json_name = SERVICES_MAPPING[service]['json_name'] <NEW_LINE> <DEDENT> if "lat" in location and 'long' in location: <NEW_LINE> <INDENT> if timestamp: <NEW_LINE> <INDENT> timestamp = int((timestamp.replace(tzinfo=None) - datetime.datetime.utcfromtimestamp(0)). total_seconds()) <NEW_LINE> url = "https://api.darksky.net/forecast/{key}/{lat}," "{long},{timestamp}?units=us".format( key=self._api_key, lat=location['lat'], long=location['long'], timestamp=timestamp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = "https://api.darksky.net/forecast/{key}/{lat}," "{long}?units=us".format( key=self._api_key, lat=location['lat'], long=location['long']) <NEW_LINE> <DEDENT> if self.performance_mode: <NEW_LINE> <INDENT> services = ["currently", "hourly", "minutely", "daily"] <NEW_LINE> if service_json_name and service_json_name in services: <NEW_LINE> <INDENT> services.remove(service_json_name) <NEW_LINE> url += "&exclude=" + ",".join(services) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("Requested service {} is not provided" " by the Darksky API".format(service)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid location. Expected format is: ' '"{"lat": "xxx.xxxx", "long": "xxx.xxxx"}"') <NEW_LINE> <DEDENT> _log.info("requesting url: {}".format(url)) <NEW_LINE> grequest = [grequests.get(url, verify=requests.certs.where(), headers=self.headers, timeout=3)] <NEW_LINE> gresponse = grequests.map(grequest)[0] <NEW_LINE> self.add_api_call() <NEW_LINE> if gresponse is None: <NEW_LINE> <INDENT> raise RuntimeError("get request did not return any " "response") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = jsonapi.loads(gresponse.content) <NEW_LINE> return response <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.generate_response_error(url, gresponse.status_code)
Generic method called by the current and forecast service endpoint methods to fetch a forecast request from the Darksky API. If performance mode is set to True, the url adds exclusions for the services provided by the API that were not requested. :param service: requested service endpoint :param location: location dictionary for building url :param timestamp: timestamp of a record if this request is for the Time Machine end point :return: Darksky forecast request response
625941a46aa9bd52df036973
def append(self, obj, **kw): <NEW_LINE> <INDENT> obj = self.sanitize(obj, **kw) <NEW_LINE> return super(MeshHookList, self).append(obj)
The input :py:class:`~.hook.MeshHook` *obj* will be appended into the list. The *obj* argument is usually a :py:class:`~.hook.MeshHook` type or a :py:class:`~.anchor.MeshAnchor` type, instead of an instance of them. The method will automatically create the necessary Hook object when detect acceptable type object passed as the first argument. See :py:meth:`sanitize` for conditions of the input *obj* and *kw*. >>> import solvcon as sc >>> from solvcon.testing import create_trivial_2d_blk >>> cse = MeshCase() # No arguments because of demonstration. >>> hook_list = MeshHookList(cse) >>> len(hook_list) 0 >>> # Create two hooks of the same type and content. >>> hook_list.append(sc.MeshHook, dummy="name") >>> hook_list.append(sc.MeshHook, dummy="name") >>> hook_list[0].kws['dummy'], hook_list[1].kws['dummy'] ('name', 'name') >>> # The two hooks aren't identical. >>> hook_list[0] is hook_list[1] False
625941a46aa9bd52df036975
def only_classmethods(cls): <NEW_LINE> <INDENT> for k, v in cls.__dict__.items(): <NEW_LINE> <INDENT> if (not k.startswith('__') and callable(v) and not isinstance(v, (classmethod, staticmethod))): <NEW_LINE> <INDENT> setattr(cls, k, classmethod(v)) <NEW_LINE> <DEDENT> <DEDENT> return cls
convert all normal methods to classmethods
625941a56aa9bd52df03698d
def model_filter(func): <NEW_LINE> <INDENT> def process_terms(terms): <NEW_LINE> <INDENT> new_terms = [] <NEW_LINE> cand = '' <NEW_LINE> for term in terms: <NEW_LINE> <INDENT> if term.startswith(sign) or cand.endswith(sign): <NEW_LINE> <INDENT> cand = cand + term <NEW_LINE> continue <NEW_LINE> <DEDENT> if cand: <NEW_LINE> <INDENT> new_terms.append(cand) <NEW_LINE> <DEDENT> cand = term <NEW_LINE> <DEDENT> if cand: <NEW_LINE> <INDENT> new_terms.append(cand) <NEW_LINE> <DEDENT> return new_terms <NEW_LINE> <DEDENT> @functools.wraps(func) <NEW_LINE> def wrapper(queryset, query, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(query, basestring): <NEW_LINE> <INDENT> query = query.split() <NEW_LINE> query = process_terms(query) <NEW_LINE> <DEDENT> if query: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return func(queryset, query, *args, **kwargs) <NEW_LINE> <DEDENT> except (FieldError, TypeError) as e: <NEW_LINE> <INDENT> logging.error("%s", e.message) <NEW_LINE> return queryset.none() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return queryset <NEW_LINE> <DEDENT> <DEDENT> return wrapper
Decorator to format query before passing it to a filter function. The purpose of the decorator is to: a) Split the queries into multiple keywords (space/tab separated). b) Concatenate terms that have the ADMIN_FIELD_SIGN ("=") between them. b) Ignore any empty queries.
625941a56aa9bd52df036993
def convertToTitle(self, n): <NEW_LINE> <INDENT> str = "" <NEW_LINE> stack = [] <NEW_LINE> while n // 26 != 0: <NEW_LINE> <INDENT> stack.append(n % 26) <NEW_LINE> n = n // 26 <NEW_LINE> <DEDENT> stack.append(n % 26) <NEW_LINE> i = 0 <NEW_LINE> while i < len(stack): <NEW_LINE> <INDENT> if stack[i] == 0: <NEW_LINE> <INDENT> if i+1 < len(stack): <NEW_LINE> <INDENT> stack[i] = 26 <NEW_LINE> stack[i+1] -= 1 <NEW_LINE> <DEDENT> <DEDENT> i += 1 <NEW_LINE> <DEDENT> if stack[-1] == 0: <NEW_LINE> <INDENT> stack.pop() <NEW_LINE> <DEDENT> while len(stack) != 0: <NEW_LINE> <INDENT> x = stack.pop() <NEW_LINE> str += chr((x - 1) + ord("A")) <NEW_LINE> <DEDENT> return str
:type n: int :rtype: str
625941a56aa9bd52df036997
def test_01(self): <NEW_LINE> <INDENT> flag = True <NEW_LINE> try: <NEW_LINE> <INDENT> from xobox.utils.compat import EX_OK <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> flag = False <NEW_LINE> <DEDENT> self.assertTrue(flag)
Test Case 01: Verify EX_OK is provided Test is passed if no exception is raised.
625941a56aa9bd52df0369a3
def execute(self,statement): <NEW_LINE> <INDENT> self.engine.execute(statement)
执行sql语句。 :param statement: :return:
625941a66aa9bd52df0369ad
@app.route('/group/<group_codename>/events/<int:event_id>/delete', methods=['GET', 'POST']) <NEW_LINE> def event_delete(group_codename, event_id): <NEW_LINE> <INDENT> (infonav, this_group, this_event) = gbot.views.get_context(group_codename=group_codename, event_id=event_id) <NEW_LINE> current_member = gbot.views.get_current_member(group_codename, current_user) <NEW_LINE> if this_event.is_host(current_member): <NEW_LINE> <INDENT> form = gbot.forms.DeleteForm(request.form) <NEW_LINE> content = {'event_name':this_event.name} <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> if form.delete == True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> controllers.event.delete_event(request,event_id) <NEW_LINE> flash("You successfully cancelled the Event called " + str(this_event.name) + ".", "success") <NEW_LINE> return redirect(url_for(gbot.views.event.event_list, group_codename=group_codename)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> flash("Oof, something just went wrong. Sorry about that. Check this out: " + str(e)) <NEW_LINE> return redirect(url_for(gbot.views.event.event_list, group_codename=group_codename)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> flash("Okay, " + str(this_event.name) + " is still happening!") <NEW_LINE> return redirect(url_for(gbot.views.event.event_detail, group_codename=group_codename, event_id=event_id)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return render_template('pages/events/delete.html', infonav=infonav, form=form, content=content)
This page essentially confirms whether or not the current_member (who must be a host) REALLY wants to delete the event.
625941a66aa9bd52df0369b1
def reduce(self, start, limit, reduce_op=operator.add): <NEW_LINE> <INDENT> if limit is None: <NEW_LINE> <INDENT> limit = self.capacity <NEW_LINE> <DEDENT> if limit < 0: <NEW_LINE> <INDENT> limit += self.capacity <NEW_LINE> <DEDENT> if reduce_op == operator.add: <NEW_LINE> <INDENT> result = 0.0 <NEW_LINE> <DEDENT> elif reduce_op == min: <NEW_LINE> <INDENT> result = float('inf') <NEW_LINE> <DEDENT> elif reduce_op == max: <NEW_LINE> <INDENT> result = float('-inf') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RLGraphError("Unsupported reduce OP. Support ops are [add, min, max].") <NEW_LINE> <DEDENT> start += self.capacity <NEW_LINE> limit += self.capacity <NEW_LINE> while start < limit: <NEW_LINE> <INDENT> if start & 1: <NEW_LINE> <INDENT> result = reduce_op(result, self.values[start]) <NEW_LINE> start += 1 <NEW_LINE> <DEDENT> if limit & 1: <NEW_LINE> <INDENT> limit -= 1 <NEW_LINE> result = reduce_op(result, self.values[limit]) <NEW_LINE> <DEDENT> start = start >> 1 <NEW_LINE> limit = limit >> 1 <NEW_LINE> <DEDENT> return result
Applies an operation to specified segment. Args: start (int): Start index to apply reduction to. limit (end): End index to apply reduction to. reduce_op (Union(operator.add, min, max)): Reduce op to apply. Returns: Number: Result of reduce operation
625941a66aa9bd52df0369b7
def plot_with_difference(original, fooled): <NEW_LINE> <INDENT> fooled_v = fooled.clone().view(3,224,224).cpu().detach() <NEW_LINE> fooled_back = inv_transform(fooled_v) <NEW_LINE> original_back = inv_transform(original.clone()) <NEW_LINE> plt.figure() <NEW_LINE> plt.subplot(131) <NEW_LINE> plt.imshow(original_back) <NEW_LINE> plt.title('original') <NEW_LINE> boi_diff = ImChop.difference(fooled_back, original_back) <NEW_LINE> plt.subplot(132) <NEW_LINE> plt.imshow(boi_diff) <NEW_LINE> plt.title('difference') <NEW_LINE> plt.subplot(133) <NEW_LINE> plt.imshow(fooled_back) <NEW_LINE> plt.title('fooling') <NEW_LINE> print("Total value difference:", np.array(boi_diff).sum(),"\nAverage value difference:", np.array(boi_diff).mean())
Takes the normalised tensor of two images and plot them
625941a66aa9bd52df0369bb
def circ_moment(alpha, w, p=1, cent=False, dim=0): <NEW_LINE> <INDENT> if w.size == 0: <NEW_LINE> <INDENT> w = np.ones(alpha.shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> checkShapesEqual(w, alpha) <NEW_LINE> <DEDENT> if cent: <NEW_LINE> <INDENT> theta = np.array(circ_mean(alpha, w, dim)) <NEW_LINE> theta = theta[~np.isnan(theta)] <NEW_LINE> theta_size = len(theta) <NEW_LINE> alpha = circ_dist(alpha, np.tile(theta, (alpha.shape[0] / theta_size,))) <NEW_LINE> <DEDENT> n = alpha.shape[dim] <NEW_LINE> cbar = np.sum(np.cos(p*alpha) * w, axis=dim) * 1.0 / n <NEW_LINE> sbar = np.sum(np.sin(p*alpha) * w, axis=dim) * 1.0 / n <NEW_LINE> mp = cbar + 1j * sbar <NEW_LINE> rho_p = np.abs(mp) <NEW_LINE> mu_p = np.angle(mp) <NEW_LINE> return (mp, rho_p, mu_p)
Calculates the complex p-th centred or non-centred moment of the angular data in angle. Input: alpha sample of angles [w weightings in case of binned angle data] [p p-th moment to be computed, default is p=1] [cent if true, central moments are computed, default = false] [dim compute along this dimension, default is 1st axis] If dim argument is specified, all other optional arguments can be left empty: circ_moment(alpha, [], [], [], dim) Output: mp complex p-th moment rho_p magnitude of the p-th moment mu_p angle of th p-th moment References: Statistical analysis of circular data, Fisher, p. 33/34
625941a66aa9bd52df0369c5
def _get_prefix_limit(self): <NEW_LINE> <INDENT> return self.__prefix_limit
Getter method for prefix_limit, mapped from YANG variable /bgp/neighbors/neighbor/afi_safis/afi_safi/l3vpn_ipv4_multicast/prefix_limit (container) YANG Description: Configure the maximum number of prefixes that will be accepted from a peer
625941a76aa9bd52df0369d5
def setup_platform(hass, config, add_entities, discovery_info=None): <NEW_LINE> <INDENT> import pywink <NEW_LINE> for sensor in pywink.get_sensors(): <NEW_LINE> <INDENT> _id = sensor.object_id() + sensor.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> if sensor.capability() in SENSOR_TYPES: <NEW_LINE> <INDENT> add_entities([WinkBinarySensorDevice(sensor, hass)]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key in pywink.get_keys(): <NEW_LINE> <INDENT> _id = key.object_id() + key.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkBinarySensorDevice(key, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for sensor in pywink.get_smoke_and_co_detectors(): <NEW_LINE> <INDENT> _id = sensor.object_id() + sensor.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkSmokeDetector(sensor, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for hub in pywink.get_hubs(): <NEW_LINE> <INDENT> _id = hub.object_id() + hub.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkHub(hub, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for remote in pywink.get_remotes(): <NEW_LINE> <INDENT> _id = remote.object_id() + remote.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkRemote(remote, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for button in pywink.get_buttons(): <NEW_LINE> <INDENT> _id = button.object_id() + button.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkButton(button, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for gang in pywink.get_gangs(): <NEW_LINE> <INDENT> _id = gang.object_id() + gang.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkGang(gang, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for door_bell_sensor in pywink.get_door_bells(): <NEW_LINE> <INDENT> _id = door_bell_sensor.object_id() + door_bell_sensor.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> add_entities([WinkBinarySensorDevice(door_bell_sensor, hass)]) <NEW_LINE> <DEDENT> <DEDENT> for camera_sensor in pywink.get_cameras(): <NEW_LINE> <INDENT> _id = camera_sensor.object_id() + camera_sensor.name() <NEW_LINE> if _id not in hass.data[DOMAIN]['unique_ids']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if camera_sensor.capability() in SENSOR_TYPES: <NEW_LINE> <INDENT> add_entities([WinkBinarySensorDevice(camera_sensor, hass)]) <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> _LOGGER.info("Device isn't a sensor, skipping")
Set up the Wink binary sensor platform.
625941a76aa9bd52df0369d9
def dumps_request( self, method, params=(), id=0 ): <NEW_LINE> <INDENT> if not isinstance(method, (str, unicode)): <NEW_LINE> <INDENT> raise TypeError('"method" must be a string (or unicode string).') <NEW_LINE> <DEDENT> if not isinstance(params, (tuple, list)): <NEW_LINE> <INDENT> raise TypeError("params must be a tuple/list.") <NEW_LINE> <DEDENT> return '{"method": %s, "params": %s, "id": %s}' % (self.dumps(method), self.dumps(params), self.dumps(id))
serialize JSON-RPC-Request :Parameters: - method: the method-name (str/unicode) - params: the parameters (list/tuple) - id: if id=None, this results in a Notification :Returns: | {"method": "...", "params": ..., "id": ...} | "method", "params" and "id" are always in this order. :Raises: TypeError if method/params is of wrong type or not JSON-serializable
625941a76aa9bd52df0369db
def p_consume_id_funcion(p): <NEW_LINE> <INDENT> global variable_nombre_funcion <NEW_LINE> if not func_table.existeFuncion(p[1]): <NEW_LINE> <INDENT> raise NameError("La funcion no existe") <NEW_LINE> <DEDENT> variable_nombre_funcion = p[1]
consume_id_funcion : ID
625941a76aa9bd52df0369e3
def get_size(self): <NEW_LINE> <INDENT> return self.size
获取队列元素个数 :return:
625941a76aa9bd52df0369e7
def show_port(self, context, port_id): <NEW_LINE> <INDENT> return dict(port=self._show_port(context, port_id))
Return the port for the client given the port id. :param context - Request context. :param port_id - The id of port to be queried. :returns: A dict containing port data keyed by 'port'. e.g. {'port': {'port_id': 'abcd', 'fixed_ip_address': '1.2.3.4'}}
625941a86aa9bd52df0369ff
def p_TypeArgumentList(p): <NEW_LINE> <INDENT> p[0] = mytuple(["TypeArgumentList"]+p[1:])
TypeArgumentList : TypeArgument COMMATypeArgumentS | IDENT COMMATypeArgumentS
625941a86aa9bd52df036a03
def check_response_errors(response, session): <NEW_LINE> <INDENT> if response['status'] != 'ok': <NEW_LINE> <INDENT> from wi.utils.auth import logout <NEW_LINE> error_code = response['status'] <NEW_LINE> error_msg = get_error(error_code) <NEW_LINE> raise RestErrorException(error_msg) <NEW_LINE> <DEDENT> return response
Checks status of response response and throws appropriate error.
625941a96aa9bd52df036a0f
def construct_data(kills, pilots): <NEW_LINE> <INDENT> return '\n'.join(kills + pilots)
Construct textarea content for resubmission Given a list of kills and a list of pilots, put them together in a form that enables them to be fed back into this view
625941a96aa9bd52df036a22
def process_tracklog_file_v1(point_file, track_file, protocol, database_path): <NEW_LINE> <INDENT> print("building track logs") <NEW_LINE> track_log_oids = {} <NEW_LINE> mission_field_names, mission_field_types = extract_mission_attributes_from_protocol( protocol ) <NEW_LINE> mission_fields_count = len(mission_field_names) <NEW_LINE> columns = ( ["SHAPE@"] + mission_field_names + protocol["csv"]["track_logs"]["field_names"] ) <NEW_LINE> types = protocol["csv"]["track_logs"]["field_types"] <NEW_LINE> table_name = protocol["csv"]["track_logs"]["name"] <NEW_LINE> table = os.path.join(database_path, table_name) <NEW_LINE> s_key = protocol["csv"]["track_logs"]["start_key_indexes"] <NEW_LINE> e_key = protocol["csv"]["track_logs"]["end_key_indexes"] <NEW_LINE> gps_keys = protocol["csv"]["gps_points"]["key_indexes"] <NEW_LINE> last_point = None <NEW_LINE> with arcpy.da.InsertCursor(table, columns) as cursor: <NEW_LINE> <INDENT> for line in csv.reader(track_file): <NEW_LINE> <INDENT> if sys.version_info[0] < 3: <NEW_LINE> <INDENT> items = [item.decode("utf-8") for item in line] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> items = line <NEW_LINE> <DEDENT> protocol_items = items[:mission_fields_count] <NEW_LINE> other_items = items[mission_fields_count:] <NEW_LINE> start_time, end_time = other_items[s_key[T]], other_items[e_key[T]] <NEW_LINE> track, last_point = build_track_geometry( point_file, last_point, start_time, end_time, gps_keys ) <NEW_LINE> row = ( [track] + [ cast(item, mission_field_types[i]) for i, item in enumerate(protocol_items) ] + [cast(item, types[i]) for i, item in enumerate(other_items)] ) <NEW_LINE> track_log_oids[start_time] = cursor.insertRow(row) <NEW_LINE> <DEDENT> <DEDENT> return track_log_oids
Build a track log feature class and return the object IDs of the new track logs.
625941aa6aa9bd52df036a28
def setSocialNetwork(self, newSocialNetwork): <NEW_LINE> <INDENT> self._socialNetwork = newSocialNetwork
Set a new SocialNetwork Requires: newSocialNetwork SocialNetwork Ensures: self.getSocialNetwork() == newSocialNetwork
625941aa6aa9bd52df036a2a
def matlab_style_gauss2D(shape=np.array([11,11]),sigma=1.5): <NEW_LINE> <INDENT> siz = (shape-np.array([1,1]))/2 <NEW_LINE> std = sigma <NEW_LINE> eps = 2.2204e-16 <NEW_LINE> x = np.arange(-siz[1], siz[1]+1, 1) <NEW_LINE> y = np.arange(-siz[0], siz[1]+1, 1) <NEW_LINE> m,n = np.meshgrid(x, y) <NEW_LINE> h = np.exp(-(m*m + n*n).astype(np.float32) / (2.*sigma*sigma)) <NEW_LINE> h[ h < eps*h.max() ] = 0 <NEW_LINE> sumh = h.sum() <NEW_LINE> if sumh != 0: <NEW_LINE> <INDENT> h = h.astype(np.float32) / sumh <NEW_LINE> <DEDENT> return h
2D gaussian mask - should give the same result as MATLAB's fspecial('gaussian',[shape],[sigma])
625941aa6aa9bd52df036a2c
@home.route('/') <NEW_LINE> def homepage(): <NEW_LINE> <INDENT> return make_response(jsonify( { 'message':'welcome to the hello-books app you can register in order to get the most out of the app' } )), 200
get requests for homepage
625941aa6aa9bd52df036a38
def __init__(self, atoms, protcomplex, cclass, altconf): <NEW_LINE> <INDENT> Mol.__init__(self, altconf) <NEW_LINE> self.complex = cclass <NEW_LINE> self.full_mol = protcomplex <NEW_LINE> self.all_atoms = atoms <NEW_LINE> self.rings = self.find_rings(self.full_mol, self.all_atoms) <NEW_LINE> self.hydroph_atoms = self.hydrophobic_atoms(self.all_atoms) <NEW_LINE> self.hbond_acc_atoms = self.find_hba(self.all_atoms) <NEW_LINE> self.hbond_don_atom_pairs = self.find_hbd(self.all_atoms, self.hydroph_atoms) <NEW_LINE> self.charged = self.find_charged(self.full_mol) <NEW_LINE> self.halogenbond_acc = self.find_hal(self.all_atoms)
Find all relevant parts which could take part in interactions
625941aa6aa9bd52df036a3e
def hamming_weight(n: int) -> int: <NEW_LINE> <INDENT> count = 0 <NEW_LINE> while n != 0: <NEW_LINE> <INDENT> n &= n - 1 <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> return count
# 191: Write a function that takes an unsigned integer and return the number of '1' bits it has (also known as the Hamming weight).
625941aa6aa9bd52df036a42
def convert_obs_groups_binning_def_michi_to_default(): <NEW_LINE> <INDENT> altitude_edges = Angle([0, 20, 23, 27, 30, 33, 37, 40, 44, 49, 53, 58, 64, 72, 90], 'degree') <NEW_LINE> azimuth_edges = Angle([-90, 90, 270], 'degree') <NEW_LINE> list_obs_group_axis = [ObservationGroupAxis('ALT', altitude_edges, 'bin_edges'), ObservationGroupAxis('AZ', azimuth_edges, 'bin_edges')] <NEW_LINE> obs_groups_michi = ObservationGroups(list_obs_group_axis) <NEW_LINE> print("Observation groups 'michi':") <NEW_LINE> print(obs_groups_michi.obs_groups_table) <NEW_LINE> outfile = 'bg_observation_groups_michi.ecsv' <NEW_LINE> print('Writing {}'.format(outfile)) <NEW_LINE> obs_groups_michi.write(outfile) <NEW_LINE> lookup_obs_groups_michi = Table() <NEW_LINE> n_cols = 1 + len(list_obs_group_axis) <NEW_LINE> n_rows = obs_groups_michi.n_groups <NEW_LINE> lookup_obs_groups_michi['GROUP_ID'] = np.zeros(n_rows, dtype=np.int) <NEW_LINE> lookup_obs_groups_michi['ALT_ID'] = np.zeros(n_rows, dtype=np.int) <NEW_LINE> lookup_obs_groups_michi['AZ_ID'] = np.zeros(n_rows, dtype=np.int) <NEW_LINE> count_groups = 0 <NEW_LINE> for alt_id in np.arange(len(altitude_edges) - 1): <NEW_LINE> <INDENT> for az_id in np.arange(len(azimuth_edges) - 1): <NEW_LINE> <INDENT> lookup_obs_groups_michi['GROUP_ID'][count_groups] = count_groups <NEW_LINE> lookup_obs_groups_michi['ALT_ID'][count_groups] = alt_id <NEW_LINE> lookup_obs_groups_michi['AZ_ID'][count_groups] = az_id <NEW_LINE> count_groups += 1 <NEW_LINE> <DEDENT> <DEDENT> print("lookup table:") <NEW_LINE> print(lookup_obs_groups_michi) <NEW_LINE> outfile = 'lookup_obs_groups_michi.ecsv' <NEW_LINE> print('Writing {}'.format(outfile)) <NEW_LINE> ascii.write(lookup_obs_groups_michi, outfile, format='ecsv', fast_writer=False)
Convert observation groups binning definition "michi" to "default".
625941aa6aa9bd52df036a48
def _file_wr(self, filename, data): <NEW_LINE> <INDENT> with open(filename, 'w') as f: <NEW_LINE> <INDENT> json.dump([data], f)
Helper function to write a file
625941ab6aa9bd52df036a4c
def monsterate(match): <NEW_LINE> <INDENT> if not match: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> bits = match.groupdict('') <NEW_LINE> body = match.group(0) <NEW_LINE> if bits['count'] == '1': <NEW_LINE> <INDENT> bits['count'] = '' <NEW_LINE> <DEDENT> if bits['name'] in names: <NEW_LINE> <INDENT> sys.stderr.write(f'found {bits["name"]}\n') <NEW_LINE> return body <NEW_LINE> <DEDENT> fail = list() <NEW_LINE> for name in translate(bits): <NEW_LINE> <INDENT> if name in MOBS: <NEW_LINE> <INDENT> body = 'MV%(mv)s, %(hd)sHD, %(atk)s%(special)s; %(ac)sAC, %(sv)s+, ML%(ml)s, %(cl)s/%(xp)s' % MOBS[name] <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fail.append(name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> foo = '\n '.join(fail) <NEW_LINE> sys.stderr.write(f'missing {bits["name"]}\n{foo}') <NEW_LINE> <DEDENT> return '*%s*: %s' % (('%(count)s %(name)s' % bits).strip(), body)
Thing with monster.
625941ab6aa9bd52df036a50
@api.route('/logout', methods=['GET']) <NEW_LINE> def logout(): <NEW_LINE> <INDENT> response = make_response( jsonify({'success': 'The user has been logged out.'}), 200 ) <NEW_LINE> response.delete_cookie('public_id') <NEW_LINE> unset_jwt_cookies(response) <NEW_LINE> return response
Logs out user and sends a response to clear access and refresh tokens, public_id cookie, and returns success message Returns {Object<json>} 200 success: {string}
625941ac6aa9bd52df036a6e
def dir_is_empty(path): <NEW_LINE> <INDENT> return isdir(path) and len(os.listdir(path)) == 0
Check if a directory is empty
625941ac6aa9bd52df036a74
def play_episode(act, embedding, features): <NEW_LINE> <INDENT> env = gym_environment.WSNEnvironment( problem_generator=lambda: (embedding, None), features=features, early_exit_factor=np.infty, additional_timeslot_reward=-1, restart_reward=0, success_reward=0, seedgen=None, ) <NEW_LINE> return _play_episode_in_env(act, env)
Play an entire episode and report the reward
625941ac6aa9bd52df036a7a
def resize(self, width=None, height=None, filter='triangle', blur=1): <NEW_LINE> <INDENT> if width is None: <NEW_LINE> <INDENT> width = self.width <NEW_LINE> <DEDENT> if height is None: <NEW_LINE> <INDENT> height = self.height <NEW_LINE> <DEDENT> if not isinstance(width, numbers.Integral): <NEW_LINE> <INDENT> raise TypeError('width must be a natural number, not ' + repr(width)) <NEW_LINE> <DEDENT> elif not isinstance(height, numbers.Integral): <NEW_LINE> <INDENT> raise TypeError('height must be a natural number, not ' + repr(height)) <NEW_LINE> <DEDENT> elif width < 1: <NEW_LINE> <INDENT> raise ValueError('width must be a natural number, not ' + repr(width)) <NEW_LINE> <DEDENT> elif height < 1: <NEW_LINE> <INDENT> raise ValueError('height must be a natural number, not ' + repr(height)) <NEW_LINE> <DEDENT> elif not isinstance(blur, numbers.Real): <NEW_LINE> <INDENT> raise TypeError('blur must be numbers.Real , not ' + repr(blur)) <NEW_LINE> <DEDENT> elif not isinstance(filter, (basestring, numbers.Integral)): <NEW_LINE> <INDENT> raise TypeError('filter must be one string defined in wand.image.' 'FILTER_TYPES or an integer, not ' + repr(filter)) <NEW_LINE> <DEDENT> if isinstance(filter, basestring): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> filter = FILTER_TYPES.index(filter) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise ValueError(repr(filter) + ' is an invalid filter type; ' 'choose on in ' + repr(FILTER_TYPES)) <NEW_LINE> <DEDENT> <DEDENT> elif (isinstance(filter, numbers.Integral) and not (0 <= filter < len(FILTER_TYPES))): <NEW_LINE> <INDENT> raise ValueError(repr(filter) + ' is an invalid filter type') <NEW_LINE> <DEDENT> blur = ctypes.c_double(float(blur)) <NEW_LINE> library.MagickResizeImage(self.wand, width, height, filter, blur)
Resizes the image. :param width: the width in the scaled image. default is the original width :type width: :class:`numbers.Integral` :param height: the height in the scaled image. default is the original height :type height: :class:`numbers.Integral` :param filter: a filter type to use for resizing. choose one in :const:`FILTER_TYPES`. default is ``'triangle'`` :type filter: :class:`basestring`, :class:`numbers.Integral` :param blur: the blur factor where > 1 is blurry, < 1 is sharp. default is 1 :type blur: :class:`numbers.Real`
625941ac6aa9bd52df036a88
def unload(self, key): <NEW_LINE> <INDENT> assert isinstance(key, PGPKey) <NEW_LINE> pkid = id(key) <NEW_LINE> if pkid in self._keys: <NEW_LINE> <INDENT> [ kd.remove(pkid) for kd in [self._pubkeys, self._privkeys] if pkid in kd ] <NEW_LINE> self._keys.pop(pkid) <NEW_LINE> for m, a in [ (m, a) for m in self._aliases for a, p in m.items() if p == pkid ]: <NEW_LINE> <INDENT> m.pop(a) <NEW_LINE> if a in self: <NEW_LINE> <INDENT> self._sort_alias(a) <NEW_LINE> <DEDENT> <DEDENT> if key.is_primary: <NEW_LINE> <INDENT> [ self.unload(sk) for sk in key.subkeys.values() ]
Unload a loaded key and its subkeys. The easiest way to do this is to select a key using :py:meth:`PGPKeyring.key` first:: with keyring.key("DSA von TestKey") as key: keyring.unload(key) :param key: The key to unload. :type key: :py:obj:`PGPKey`
625941ad6aa9bd52df036a9b
def _check_project_dir(project_dir, force=False): <NEW_LINE> <INDENT> if os.path.exists(project_dir) and not os.path.isdir(project_dir): <NEW_LINE> <INDENT> raise CommandError(f'The path {project_dir} already exists and is a file.') <NEW_LINE> <DEDENT> if not os.path.isdir(project_dir): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if force: <NEW_LINE> <INDENT> logger.info('\'%s\' already exists - removing', os.path.basename(project_dir)) <NEW_LINE> shutil.rmtree(project_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise CommandError(f'\'{os.path.basename(project_dir)}\' already exists. Either remove this ' 'project or use the force option')
Check if a project directory with the given name already exists. If such a project exists, only continue if the ``force`` flag has been specified.
625941ad6aa9bd52df036a9f
def get_field_alias_grid_column_names_by_order(self): <NEW_LINE> <INDENT> self.column_name_list = self.get_grid_column_names_by_order(self.field_alias_grid_div_id) <NEW_LINE> return self.column_name_list
Implementing get field alias grid column names by order functionality :return: column_name_list
625941add7e4931a7ee9dc18
def StockTable(inputfilename,taxbracketfile): <NEW_LINE> <INDENT> input=open(inputfilename) <NEW_LINE> UniqTickers=UT.UniqueTickers(inputfilename) <NEW_LINE> data_string=json.load(input) <NEW_LINE> emailReportMsg="" <NEW_LINE> htmlOutput=" " <NEW_LINE> jsonOutput="{ \"date\":\""+DTH.GetDate()+"\",\n \"portfolio\":[" <NEW_LINE> csvOutput="" <NEW_LINE> for portfolio in data_string["portfolio"]: <NEW_LINE> <INDENT> if portfolio["display"] == "yes": <NEW_LINE> <INDENT> jsonOutput+="{\"portfolioname\":\"" + portfolio["portfolioName"]+"\", \"portfolioStocks\":[" <NEW_LINE> print('==================----------',portfolio["portfolioName"],'----------==================') <NEW_LINE> TCC.DefaultColorCoding() <NEW_LINE> H.PrintHeader2() <NEW_LINE> cumulative=Accumulator.Accumulator() <NEW_LINE> emailReportMsg+=portfolio["portfolioName"] <NEW_LINE> for data in portfolio["portfolioStocks"]: <NEW_LINE> <INDENT> data["taxBracketFile"]=taxbracketfile <NEW_LINE> stock=S.Stock(data,UniqTickers.tickerDict[data["ticker"]]) <NEW_LINE> cumulative.Add(stock.totalpurchaseprice, stock.commission_to_buy, stock.dollarGain,stock.dailyChange_func() ,stock.currentWorth_func() ) <NEW_LINE> stock.PrintColorized3() <NEW_LINE> message=stock.PrintForTxtMessage() <NEW_LINE> emailReportMsg+=stock.JSON() <NEW_LINE> jsonOutput+=stock.JSON()+"," <NEW_LINE> csvOutput+=stock.CSV()+"\n" <NEW_LINE> <DEDENT> jsonOutput=jsonOutput.rstrip(',') <NEW_LINE> jsonOutput+="]," <NEW_LINE> jsonOutput+="\n" <NEW_LINE> jsonOutput+="\"cumulative Result\":"+cumulative.JSONify()+"}," <NEW_LINE> emailReportMsg+=cumulative.JSONify()+",\n" <NEW_LINE> cumulative.Print() <NEW_LINE> TCC.DefaultColorCoding() <NEW_LINE> <DEDENT> <DEDENT> jsonOutput=jsonOutput.rstrip(',') <NEW_LINE> jsonOutput+="] }" <NEW_LINE> input.close() <NEW_LINE> FileOutputHelper.WriteToDisk( FileOutputHelper.CreateOutputFilename(inputfilename,".out.json"),jsonOutput,'w') <NEW_LINE> FileOutputHelper.WriteToDisk( FileOutputHelper.CreateOutputFilename(inputfilename,"csv"),csvOutput,'a') <NEW_LINE> return stock.getDictionary()
#This methoad traverses a portfolio and outputs the performance of each stock in the portfolio #as well as overall performance.
625941add7e4931a7ee9dc1c
def save(self): <NEW_LINE> <INDENT> self.config.set_space_placement(self.choice.GetStringSelection())
Write all parameters to the config.
625941ad6aa9bd52df036aa6
def debug(self): <NEW_LINE> <INDENT> print(str(self)) <NEW_LINE> print('<internal> %s: %s' % ('src', getattr(self, 'src', None))) <NEW_LINE> print('<internal> %s: %s' % ('meta', getattr(self, 'meta', None))) <NEW_LINE> for k, v in self._attrs.items(): <NEW_LINE> <INDENT> if k: <NEW_LINE> <INDENT> print('%s: %s' % (k, v))
Print all attributes
625941ae6aa9bd52df036aaa
def __init__(self, name, title, description, datatype, gateway=None, device=None): <NEW_LINE> <INDENT> if name in parameters and isinstance(parameters[name], Parameter): <NEW_LINE> <INDENT> raise Exception('parameter with same name already exists') <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.title = title <NEW_LINE> self.description = description <NEW_LINE> self.datatype = datatype <NEW_LINE> self.gateway = gateway <NEW_LINE> self.device = device <NEW_LINE> self.connection = None <NEW_LINE> if name in parameters: <NEW_LINE> <INDENT> self._referenced = parameters[name] <NEW_LINE> <DEDENT> parameters[name] = self
init the object parameter. :param gateway: optional, in case of device or asset relative to a gateway :param device: optional, in case of asset relative to a device :param name: name of the parameter, Should be unique, within the application (checked), used to identify the value. :param title: a human readable title :param description: a user readable description. :param datatype: the datatype of the varaible. Currently supported values: 'asset', 'sensor', 'actuator', 'device', 'gateway'
625941ae6aa9bd52df036ab0
def get_recommendations(srs): <NEW_LINE> <INDENT> assert len(srs) == 1 and srs[0].__class__ == Subreddit <NEW_LINE> sr = srs[0] <NEW_LINE> recs = _get_recommendations(sr._id36) <NEW_LINE> if not recs: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> srs = Subreddit._byID36(recs, return_dict=True, data=True) <NEW_LINE> return srs
Return the subreddits recommended if you like the given subreddit
625941ae6aa9bd52df036ab8
def MakePrediction(self, X, parameters, hidden_func, output_func): <NEW_LINE> <INDENT> X = X.T <NEW_LINE> AL, _ = self.ForwardProp(X, parameters, hidden_func, output_func) <NEW_LINE> if output_func=='softmax': <NEW_LINE> <INDENT> y_pred = np.argmax(AL, axis=0) <NEW_LINE> <DEDENT> if output_func=='sigmoid': <NEW_LINE> <INDENT> y_pred = ((AL >0.5)*1).reshape(-1) <NEW_LINE> <DEDENT> return y_pred
Make prediction of the data X --------- Input - X : Input data (m, n_x) - parameters : parameters W, b of each layers of the NN model Output - y_pred : Predicted labels for X (m,)
625941ae293b9510aa2c2fb3
def on_disconnect(self): <NEW_LINE> <INDENT> logger.error('Disconnected from server!')
Handler for disconnection events
625941ae6aa9bd52df036abc
def op_2(program, pc, opcode): <NEW_LINE> <INDENT> multiplicand1, multiplicand2 = get_parameters(program, pc, opcode, 2) <NEW_LINE> output_register = program[pc + 3] <NEW_LINE> program[output_register] = multiplicand1 * multiplicand2 <NEW_LINE> return 4
Multiplication Operation
625941ae293b9510aa2c2fb5
def draw_random_bst(n): <NEW_LINE> <INDENT> from random import randint <NEW_LINE> nums = set() <NEW_LINE> max_num = 10 * n <NEW_LINE> if 0 < n < MAX_HEIGHT: <NEW_LINE> <INDENT> while len(nums) != n: <NEW_LINE> <INDENT> nums.add(randint(1, max_num)) <NEW_LINE> <DEDENT> <DEDENT> draw_bst(list(nums))
Draw random binary search tree of n nodes
625941ae3317a56b86939981
def reset(self): <NEW_LINE> <INDENT> self._timestep = np.array([0])
See base class (Policy) for description. Subclasses are expected to feed self._timestep to their q_func graphs and increment self._timestep in `sample_action`.
625941aed7e4931a7ee9dc38
def test_cut(self): <NEW_LINE> <INDENT> url = self.tpl + '/cut' <NEW_LINE> res = requests.post(url, data=self.data) <NEW_LINE> self.assertEqual(res.status_code, 200) <NEW_LINE> result = res.json() <NEW_LINE> self.assertEqual(result['text'], self.text) <NEW_LINE> self.assertEqual(''.join(result['words']), self.text)
测试默认分词模式
625941ae6aa9bd52df036ac1
def execute(self, env, out): <NEW_LINE> <INDENT> raise NotImplementedError('abstract')
Appends output to out, or returns the result if an expression.
625941ae3346ee7daa2b2a87
def Realize(self): <NEW_LINE> <INDENT> status = True <NEW_LINE> self._collapse_stack = [] <NEW_LINE> for child in self.GetChildren(): <NEW_LINE> <INDENT> if not isinstance(child, RibbonControl): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not child.Realize(): <NEW_LINE> <INDENT> status = False <NEW_LINE> <DEDENT> child.SetSize(wx.Size(*child.GetMinSize())) <NEW_LINE> <DEDENT> x, y = self.GetSize() <NEW_LINE> if x > 0 and y > 0: <NEW_LINE> <INDENT> status = self.Layout() and status <NEW_LINE> <DEDENT> return status
Perform a full re-layout of all panels on the page. Should be called after panels are added to the page, or the sizing behaviour of a panel on the page changes (i.e. due to children being added to it). Usually called automatically when :meth:`RibbonBar.Realize() <lib.agw.ribbon.bar.RibbonBar.Realize>` is called. Will invoke :meth:`RibbonPanel.Realize() <lib.agw.ribbon.panel.RibbonPanel.Realize>` for all child panels. :note: Reimplemented from :class:`~wx.lib.agw.ribbon.control.RibbonControl`.
625941aed7e4931a7ee9dc3e
def __init__(self, data): <NEW_LINE> <INDENT> question_url, question_title, question_desc, question_stats, answers, comments, dup_url = data <NEW_LINE> self.dup_url = dup_url <NEW_LINE> self.question_title = question_title <NEW_LINE> self.question_desc = question_desc <NEW_LINE> self.question_stats = question_stats <NEW_LINE> self.url = question_url <NEW_LINE> self.answer_text = AnswerText(answers, comments) <NEW_LINE> answer_frame = self.make_frame() <NEW_LINE> urwid.WidgetWrap.__init__(self, answer_frame)
Construct the Question Page. :param data: tuple of (question_url, question_title, question_desc, question_stats, answers, comments, dup_url)
625941ae3317a56b86939989
def draw_hull(self): <NEW_LINE> <INDENT> plt.plot(self._points[:,0], self._points[:,1], 'o') <NEW_LINE> for simplex in self.hull.simplices: <NEW_LINE> <INDENT> plt.plot(self._points[simplex, 0], self._points[simplex, 1], 'k-')
Draws, but does not display, all points along with an outlined convex hull. A call to plt.show() must be made to show this shape.
625941ae3346ee7daa2b2a8d
def resolve(self, max_rounds: int = 10) -> Set[InstallRequirement]: <NEW_LINE> <INDENT> if self.clear_caches: <NEW_LINE> <INDENT> self.dependency_cache.clear() <NEW_LINE> self.repository.clear_caches() <NEW_LINE> <DEDENT> with update_env_context_manager(PIP_EXISTS_ACTION="i"): <NEW_LINE> <INDENT> for current_round in count(start=1): <NEW_LINE> <INDENT> if current_round > max_rounds: <NEW_LINE> <INDENT> raise RuntimeError( "No stable configuration of concrete packages " "could be found for the given constraints after " "{max_rounds} rounds of resolving.\n" "This is likely a bug.".format(max_rounds=max_rounds) ) <NEW_LINE> <DEDENT> log.debug("") <NEW_LINE> log.debug(magenta(f"{f'ROUND {current_round}':^60}")) <NEW_LINE> has_changed, best_matches = self._resolve_one_round() <NEW_LINE> log.debug("-" * 60) <NEW_LINE> log.debug( "Result of round {}: {}".format( current_round, "not stable" if has_changed else "stable, done", ) ) <NEW_LINE> if not has_changed: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> results = {req for req in best_matches if not req.constraint} <NEW_LINE> self.unsafe_constraints = set() <NEW_LINE> if not self.allow_unsafe: <NEW_LINE> <INDENT> reverse_dependencies = self.reverse_dependencies(results) <NEW_LINE> for req in results.copy(): <NEW_LINE> <INDENT> required_by = reverse_dependencies.get(req.name.lower(), set()) <NEW_LINE> if req.name in UNSAFE_PACKAGES or ( required_by and all(name in UNSAFE_PACKAGES for name in required_by) ): <NEW_LINE> <INDENT> self.unsafe_constraints.add(req) <NEW_LINE> results.remove(req) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return results
Finds concrete package versions for all the given InstallRequirements and their recursive dependencies. The end result is a flat list of (name, version) tuples. (Or an editable package.) Resolves constraints one round at a time, until they don't change anymore. Protects against infinite loops by breaking out after a max number rounds.
625941af6aa9bd52df036acb
def clear_journal_entry_terms(self): <NEW_LINE> <INDENT> raise errors.Unimplemented()
Clears all journal entry terms. *compliance: mandatory -- This method must be implemented.*
625941af3317a56b8693998d
def stack_to_scaled(self, stack_coords, tgt_zoom, src_zoom=0): <NEW_LINE> <INDENT> return { dim: self.stack_to_scaled_coord(dim, proj_coord, tgt_zoom, src_zoom) for dim, proj_coord in stack_coords.items() }
Convert a point in stack space into a point in stack space at a different zoom level. Whether z coordinates are scaled is controlled by the `scale_z` constructor argument/ instance variable. Parameters ---------- stack_coords : dict x, y, and/or z coordinates in stack / voxel space tgt_zoom : float Desired zoom level out of the output coordinates src_zoom : float Zoom level of the given coordinates (default 0) Returns ------- dict Rescaled coordinates
625941af5166f23b2e1a4e82
def get_endpoint_profiles(self, kaauser, group_id): <NEW_LINE> <INDENT> url = 'http://{}:{}/kaaAdmin/rest/api/endpointProfileBodyByGroupId?endpointGroupId={}'.format(self.host, self.port, str(group_id)) <NEW_LINE> req = requests.get(url, auth=(kaauser.name, kaauser.password)) <NEW_LINE> if req.status_code != requests.codes.ok: <NEW_LINE> <INDENT> raise KaaNodeError('Unable to get endpoint profiles. ' 'Return code: {}'.format(req.status_code)) <NEW_LINE> <DEDENT> return req.json()
Returns a list of endpoint profiles by the the endpoint group ID. :param kaauser: The Kaa server IP address. :type kaauser: KaaUser. :param group_id: A unique endpoint group identifier.
625941af6aa9bd52df036acd
def noun_stem (s): <NEW_LINE> <INDENT> if s in identical_plurals: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> elif s[-3:] == "man": <NEW_LINE> <INDENT> return s[:-2] + "en" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return verb_stem(s)
extracts the stem from a plural noun, or returns empty string
625941af3317a56b8693998f
def delete(self): <NEW_LINE> <INDENT> selectedItems = self.piirtoalusta.scene.selectedItems() <NEW_LINE> command = CommandDelete(self.piirtoalusta.scene, selectedItems) <NEW_LINE> self.piirtoalusta.undoStack.push(command)
Poistaa valitut esineet ja/tai ryhmät
625941af293b9510aa2c2fc7
def testdx(self): <NEW_LINE> <INDENT> gr = GaussianRestraint(*self.all) <NEW_LINE> for i in range(100): <NEW_LINE> <INDENT> map(self.change_value, self.all) <NEW_LINE> map(self.change_sign, self.locations) <NEW_LINE> gr.evaluate(True) <NEW_LINE> self.assertAlmostEqual(Nuisance(self.x).get_nuisance_derivative(), self.deriv_x(*self.all))
Test GaussianRestraint(13) x derivative
625941af293b9510aa2c2fc9
def setCalibParsDefault (self) : <NEW_LINE> <INDENT> for type in self.list_of_clib_types : <NEW_LINE> <INDENT> self.cpars[type] = self.getCalibParsDefault (type) <NEW_LINE> self.cpars_status[type] = 'DEFAULT'
Loads default calibration parameters from singleton object.
625941afadb09d7d5db6c4c5
def value_interval_cmp(value, interval): <NEW_LINE> <INDENT> if interval[0] <= value <= interval[1]: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif value < interval[0]: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1
Comparator that indicates if a value is within, higher or lower than an interval
625941afd7e4931a7ee9dc4a
def download_file(url: str, local_file: str) -> str: <NEW_LINE> <INDENT> log('Downloading {} to {}'.format(url, local_file)) <NEW_LINE> with requests.get(url, stream=True) as r: <NEW_LINE> <INDENT> r.raise_for_status() <NEW_LINE> with local_file.open(mode='wb') as f: <NEW_LINE> <INDENT> for chunk in r.iter_content(chunk_size=8192): <NEW_LINE> <INDENT> f.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return local_file
Download a file from an arbitrary URL to a local file
625941afadb09d7d5db6c4c7
def __repr__(self): <NEW_LINE> <INDENT> return 'TippyGameState({}, {}, False)'.format(repr(self.next_player), repr(self.board))
(TippyGameState) -> str Return a string representation of TippyGameState self that evaluates to an equivalent TippyGameState >>> board = matrix(3) >>> t = TippyGameState('p1', board) >>> t TippyGameState('p1', [['_', '_', '_'], ['_', '_', '_'], ['_', '_', '_']], False)
625941af293b9510aa2c2fcd
def phase_by_window_spec(self,source,windowSpec,trial_type): <NEW_LINE> <INDENT> selection = [] <NEW_LINE> for specix,spec in enumerate(windowSpec): <NEW_LINE> <INDENT> ix = self._fetch_windowspec_indices([spec],trial_type,precision=precision) <NEW_LINE> if len(ix)>0: <NEW_LINE> <INDENT> selection.append(( self.windowsByPart[trial_type][ix[0]][0], self.timeSplitTrials[trial_type][ix[0]], self.templateSplitTrials[trial_type][ix[0]] )) <NEW_LINE> try: <NEW_LINE> <INDENT> if source=='subject' or source=='s': <NEW_LINE> <INDENT> data = pickle.load(open('%s/subject_phase_%s_%d.p'%(self.dr,trial_type,ix[0]),'rb')) <NEW_LINE> phases,vs = data['phases'],data['vs'] <NEW_LINE> <DEDENT> elif source=='template' or source=='t': <NEW_LINE> <INDENT> data = pickle.load(open('%s/template_phase_%s_%d.p'%(self.dr,trial_type,ix[0]),'rb')) <NEW_LINE> phases,vs = data['phases'],data['vs'] <NEW_LINE> <DEDENT> phases = [np.vstack(p) for p in phases] <NEW_LINE> selection.append(( self.windowsByPart[trial_type][ix[0]][0],phases )) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> print("Trial %d in trial type %s not found."%(ix[0],trial_type)) <NEW_LINE> <DEDENT> <DEDENT> if trial_type.isalpha(): <NEW_LINE> <INDENT> selection += self.phase_by_window_spec(source, [windowSpec[specix]], trial_type+'0', precision=precision) <NEW_LINE> <DEDENT> <DEDENT> return selection
Parameters ---------- source : str windowSpec : list trial_type : str
625941af925a0f43d2549ba5
def get_tdar_items_by_site_keyword_objs(self, keyword_objs): <NEW_LINE> <INDENT> output = False <NEW_LINE> keyword_uris = [] <NEW_LINE> if isinstance(keyword_objs, list): <NEW_LINE> <INDENT> for keyword_obj in keyword_objs: <NEW_LINE> <INDENT> if isinstance(keyword_obj, dict): <NEW_LINE> <INDENT> if 'id' in keyword_obj: <NEW_LINE> <INDENT> keyword_uris.append(keyword_obj['id']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if len(keyword_uris) > 0: <NEW_LINE> <INDENT> output = self.search_by_site_keyword_uris(keyword_uris, True) <NEW_LINE> <DEDENT> return output
gets site information by tdar keyword objects
625941af3317a56b86939997
def display_start_menu(update, context): <NEW_LINE> <INDENT> update.message.reply_text( lernen_menu_message(), reply_markup=lernen_menu(), parse_mode="HTML", )
Show learning journey articles
625941afadb09d7d5db6c4c9
def decideTheBestTree(test_set,prun_tree_list): <NEW_LINE> <INDENT> correct_rate_max = predictCorrectRate(test_set, prun_tree_list[0]) <NEW_LINE> best_tree = prun_tree_list[0] <NEW_LINE> for i in range(len(prun_tree_list)): <NEW_LINE> <INDENT> correct_rate = predictCorrectRate(test_set, prun_tree_list[i]) <NEW_LINE> if correct_rate > correct_rate_max: <NEW_LINE> <INDENT> correct_rate_max = correct_rate <NEW_LINE> best_tree = prun_tree_list[i] <NEW_LINE> <DEDENT> <DEDENT> print('对于该测试集,最佳剪枝树是:') <NEW_LINE> drawTree(best_tree) <NEW_LINE> return best_tree
函数功能:找到最优决策树 输入参数:test_set:测试集; prun_tree_list:剪枝树列表 输出参数:对于这个测试集而言最优的决策树
625941af925a0f43d2549ba7
def test_empty_list(self): <NEW_LINE> <INDENT> response = self.client.get(self.test_link) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertContains(response, "empty-message")
view has no showstoppers on returning empty list
625941af6aa9bd52df036ad7
def testHistogram(self): <NEW_LINE> <INDENT> self.testslicer.plotHistogram(self.metricdata, title='Mean of random test data', xlabel=None, ylabel='Area (1000s of square degrees)', fignum=None, legendLabel=None, addLegend=False, legendloc='upper left', bins=100, cumulative=False, histRange=None, logScale=False, flipXaxis=False, scale=None) <NEW_LINE> plt.figure() <NEW_LINE> plt.hist(self.metricdata.compressed(), bins=100) <NEW_LINE> plt.title('Histogram straight from metric data') <NEW_LINE> self.testslicer.plotHistogram(self.metricdata2, title='Random test data', xlabel=None, ylabel='Area (1000s of square degrees)', fignum=None, legendLabel=None, addLegend=False, legendloc='upper left', bins=100, cumulative=False, histRange=None, logScale=False, flipXaxis=False, scale=None)
Test plotting the histogram (mean of random data).
625941af3317a56b8693999a
def _get_query_string(self, params): <NEW_LINE> <INDENT> pairs = [(self._unicode_safe(k), self._unicode_safe(v)) for k, v in params.iteritems()] <NEW_LINE> pairs.sort() <NEW_LINE> query_string = '&'.join(['%s=%s' % pair for pair in pairs]) <NEW_LINE> return query_string
URL Encode Parameters as query string
625941af596a8972360897f8
def __init__(self, user, skip=None, *args, **kwargs): <NEW_LINE> <INDENT> if skip is None: <NEW_LINE> <INDENT> skip = [] <NEW_LINE> <DEDENT> super(AgentFilterForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['related_user'].required = True <NEW_LINE> self.fields['related_user'].label = _("Related user")
Sets allowed values
625941af046cf37aa974ca80
def test_ticket47553_moddn_staging_prod_1(topology): <NEW_LINE> <INDENT> topology.master1.log.info("\n\n######################### MOVE staging -> Prod (1) ######################\n") <NEW_LINE> _bind_normal(topology) <NEW_LINE> old_rdn = "cn=%s1" % NEW_ACCOUNT <NEW_LINE> old_dn = "%s,%s" % (old_rdn, STAGING_DN) <NEW_LINE> new_rdn = old_rdn <NEW_LINE> new_superior = PRODUCTION_DN <NEW_LINE> try: <NEW_LINE> <INDENT> topology.master1.log.info("Try to MODDN %s -> %s,%s" % (old_dn, new_rdn, new_superior)) <NEW_LINE> topology.master1.rename_s(old_dn, new_rdn, newsuperior=new_superior) <NEW_LINE> assert 0 <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> topology.master1.log.info("Exception (not really expected exception but that is fine as it fails to rename)") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> topology.master1.log.info("Exception (expected): %s" % type(e).__name__) <NEW_LINE> assert isinstance(e, ldap.INSUFFICIENT_ACCESS) <NEW_LINE> <DEDENT> topology.master1.log.info("\n\n######################### MOVE to substring/ from equality filter ######################\n") <NEW_LINE> _bind_manager(topology) <NEW_LINE> _moddn_aci_staging_to_production(topology, mod_type=ldap.MOD_ADD, target_from=STAGING_DN, target_to=PRODUCTION_PATTERN) <NEW_LINE> _bind_normal(topology) <NEW_LINE> topology.master1.log.info("Try to MODDN %s -> %s,%s" % (old_dn, new_rdn, new_superior)) <NEW_LINE> topology.master1.rename_s(old_dn, new_rdn, newsuperior=new_superior) <NEW_LINE> _bind_manager(topology) <NEW_LINE> _moddn_aci_staging_to_production(topology, mod_type=ldap.MOD_DELETE, target_from=STAGING_DN, target_to=PRODUCTION_PATTERN) <NEW_LINE> _bind_normal(topology)
This test case MOVE entry NEW_ACCOUNT1 from staging to prod target_to/target_from: substring/equality filter
625941af6aa9bd52df036ad9
def download_artifacts(job_name, local_fname): <NEW_LINE> <INDENT> sm_client = boto3.Session().client('sagemaker') <NEW_LINE> response = sm_client.describe_trial_component(TrialComponentName=job_name) <NEW_LINE> model_artifacts_full_path = response['OutputArtifacts']['SageMaker.ModelArtifact']['Value'] <NEW_LINE> p = re.compile('(?<=s3://).*?/') <NEW_LINE> s = p.search(model_artifacts_full_path) <NEW_LINE> object_name_start = s.span()[1] <NEW_LINE> object_name = model_artifacts_full_path[object_name_start:] <NEW_LINE> bucket_name = s.group()[:-1] <NEW_LINE> s3 = boto3.client('s3') <NEW_LINE> s3.download_file(bucket_name, object_name, local_fname)
Given a trial name in a SageMaker Experiment, extract the model file and download it locally
625941afd164cc6175782a87
def recalc_face_normals(bm, faces): <NEW_LINE> <INDENT> pass
Computes an “outside” normal for the specified input faces. :param bm: The bmesh to operate on. :type bm: bmesh.types.BMesh :param faces: Undocumented. :type faces: list of (bmesh.types.BMFace)
625941af3346ee7daa2b2aa1
def test_w_refseqs_file(self): <NEW_LINE> <INDENT> inseqs = parse_fasta(self.inseqs1) <NEW_LINE> actual = qiime_blast_seqs(inseqs, refseqs=self.refseqs1) <NEW_LINE> self.assertEqual(len(actual), 5) <NEW_LINE> self.assertEqual(actual['s2_like_seq'][0][0]['SUBJECT ID'], 's2') <NEW_LINE> self.assertEqual(actual['s105'][0][2]['SUBJECT ID'], 's1')
qiime_blast_seqs functions with refseqs file
625941af293b9510aa2c2fd3
def __stop(self): <NEW_LINE> <INDENT> self._running = False
Stop console loop
625941af3317a56b869399a0
def __fetch_and_update_regions(self): <NEW_LINE> <INDENT> regions = list( set([row['location'] for row in self.__rows])) <NEW_LINE> for region in regions: <NEW_LINE> <INDENT> get_or_create(db.session, Region, name=region)
Fetches and updates regions table
625941af6aa9bd52df036ae1
def dice_coeff(Q, D): <NEW_LINE> <INDENT> Q, D = set(Q), set(D) <NEW_LINE> try: <NEW_LINE> <INDENT> return 2.0 * len(Q & D) / (len(Q) + len(D)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass
Similarity
625941af3317a56b869399a4
def parse_module(self, program_text: str, incremental_step: int = 0) -> List[Tuple[str, str, str]]: <NEW_LINE> <INDENT> m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) <NEW_LINE> if incremental_step > 1: <NEW_LINE> <INDENT> alt_regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step) <NEW_LINE> alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) <NEW_LINE> if alt_m is not None: <NEW_LINE> <INDENT> m = alt_m <NEW_LINE> <DEDENT> <DEDENT> if m: <NEW_LINE> <INDENT> module_names = m.group(1) <NEW_LINE> out = [] <NEW_LINE> search_paths = SearchPaths((test_temp_dir,), (), (), ()) <NEW_LINE> cache = FindModuleCache(search_paths) <NEW_LINE> for module_name in module_names.split(' '): <NEW_LINE> <INDENT> path = cache.find_module(module_name) <NEW_LINE> assert isinstance(path, str), "Can't find ad hoc case file: %s" % module_name <NEW_LINE> with open(path, encoding='utf8') as f: <NEW_LINE> <INDENT> program_text = f.read() <NEW_LINE> <DEDENT> out.append((module_name, path, program_text)) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [('__main__', 'main', program_text)]
Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text).
625941afd164cc6175782a8c
def thanks(year=None): <NEW_LINE> <INDENT> nov_first = datetime.date(_year, 11, 1) if not year else datetime.date(int(year), 11, 1) <NEW_LINE> weekday_seq = nov_first.weekday() <NEW_LINE> if weekday_seq > 3: <NEW_LINE> <INDENT> current_day = 32 - weekday_seq <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_day = 25 - weekday_seq <NEW_LINE> <DEDENT> return datetime.date(nov_first.year, 11, current_day)
4rd Thursday in Nov :param year: int :return: Thanksgiving Day
625941af596a897236089804
def test_restart_cf4(run_in_tmpdir): <NEW_LINE> <INDENT> op = dummy_operator.DummyOperator() <NEW_LINE> output_dir = "test_restart_cf4" <NEW_LINE> op.output_dir = output_dir <NEW_LINE> dt = [0.75] <NEW_LINE> power = 1.0 <NEW_LINE> openmc.deplete.cf4(op, dt, power, print_out=False) <NEW_LINE> prev_res = openmc.deplete.ResultsList(op.output_dir / "depletion_results.h5") <NEW_LINE> op = dummy_operator.DummyOperator(prev_res) <NEW_LINE> op.output_dir = output_dir <NEW_LINE> openmc.deplete.cf4(op, dt, power, print_out=False) <NEW_LINE> res = openmc.deplete.ResultsList(op.output_dir / "depletion_results.h5") <NEW_LINE> _, y1 = res.get_atoms("1", "1") <NEW_LINE> _, y2 = res.get_atoms("1", "2") <NEW_LINE> s1 = [2.06101629, 1.37783588] <NEW_LINE> s2 = [2.57241318, 2.63731630] <NEW_LINE> assert y1[1] == approx(s1[0]) <NEW_LINE> assert y2[1] == approx(s1[1]) <NEW_LINE> assert y1[3] == approx(s2[0]) <NEW_LINE> assert y2[3] == approx(s2[1])
Integral regression test of integrator algorithm using CF4.
625941af046cf37aa974ca8c
def pipeline(self,processing_col): <NEW_LINE> <INDENT> self.lowercase(processing_col,'lowercase') <NEW_LINE> self.url_removal('lowercase','urlremoval') <NEW_LINE> self.html_tag('urlremoval','html_tag') <NEW_LINE> self.rm_punc('html_tag','rmpunc') <NEW_LINE> self.remove_emoji('rmpunc','rmemoji') <NEW_LINE> self.word_token('rmemoji','tokens') <NEW_LINE> self.lemmatiz('tokens','lemma') <NEW_LINE> self.rm_stopwords('lemma','rmstopwords') <NEW_LINE> self.freq_words('rmstopwords') <NEW_LINE> self.rm_freq_rare_words_('rmstopwords')
This is the default pipeline we can use in most of the text data. In this first we are lowercasing the text data, after lower case we are removing the url, html tag and punctuations. Once punctuations are removed, we are removing the emojis. After removal of emojis we are tokenizing it into word tokens, And lemmatizing it. Once lemma is created we are removing the stop words and after that we are calculating the frequency distribution. Based on frequency distribution we are making 2 columns - rm_freq_word and rm_rare_word in both of them we are trying to remove frequent word and rare words. We can further add contract expansion and spell checking based on the project we are doing.
625941af2c8b7c6e89b35506
def display(self): <NEW_LINE> <INDENT> if self.dim == 2: <NEW_LINE> <INDENT> self.image = plt.imshow(self.config) <NEW_LINE> plt.show()
Plot the current spin configuration, if `self.dim == 2`.
625941af4a966d76dd550d4c
def get_function_or_wrapper(self, func, before_change_func, wrapper, is_method): <NEW_LINE> <INDENT> if self.is_forbidden(func): <NEW_LINE> <INDENT> return func <NEW_LINE> <DEDENT> if self.is_decorator(before_change_func): <NEW_LINE> <INDENT> if self.is_method(before_change_func): <NEW_LINE> <INDENT> self.remove(before_change_func) <NEW_LINE> self.add(wrapper, func, is_method=is_method) <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not is_method: <NEW_LINE> <INDENT> self.remove(before_change_func) <NEW_LINE> self.add(wrapper, func, is_method=is_method) <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return before_change_func <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.remove(func) <NEW_LINE> self.add(wrapper, func, is_method=is_method) <NEW_LINE> return wrapper
Здесь принимается решение, декорировать функцию или оставить оригинал. Если декорирование функции ранее было запрещено, вернется оригинал. Если функция ранее уже была задекорирована, тут надо смотреть на приоритеты. У декоратора класса приоритет ниже, чем у декоратора функций. Если функция ранее была задекорирована через декоратор класса, она передекорируется в любом случае. Если через декоратор функций - то только в том случае, если сейчас ее декорируют не через декоратор класса.
625941afd7e4931a7ee9dc5d
def get_nom(self): <NEW_LINE> <INDENT> return self.nom
:return: Retorna el nom del jugador
625941af5166f23b2e1a4e9b
def reset_energie(self): <NEW_LINE> <INDENT> self.energie_courante = self.energie_depart
Remet l’énergie courante du personnage à sa valeur de départ.
625941af097d151d1a222b9f
def enable_voice(self): <NEW_LINE> <INDENT> self._jarvis.enable_voice = True
Use text to speech for every text passed to jarvis.say()
625941af7b180e01f3dc454c
def calcTempInEV(self, expFitCoeffWithVoltUnits): <NEW_LINE> <INDENT> eV=1.60218e-19; <NEW_LINE> q=1.6e-19 <NEW_LINE> return q*expFitCoeffWithVoltUnits/eV
Calulates temperature from langmuir exp fit Parameters ---------- expFitCoeffWithVoltUnits : float
625941afd164cc6175782a8f
def __init__(self, data, input_dims=None, output_dims=None): <NEW_LINE> <INDENT> if isinstance(data, (list, np.ndarray)): <NEW_LINE> <INDENT> ptm = np.asarray(data, dtype=complex) <NEW_LINE> dout, din = ptm.shape <NEW_LINE> if input_dims: <NEW_LINE> <INDENT> input_dim = np.product(input_dims) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> input_dim = int(np.sqrt(din)) <NEW_LINE> <DEDENT> if output_dims: <NEW_LINE> <INDENT> output_dim = np.product(input_dims) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_dim = int(np.sqrt(dout)) <NEW_LINE> <DEDENT> if output_dim**2 != dout or input_dim**2 != din or input_dim != output_dim: <NEW_LINE> <INDENT> raise QiskitError("Invalid shape for PTM matrix.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(data, (QuantumCircuit, Instruction)): <NEW_LINE> <INDENT> data = SuperOp._init_instruction(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self._init_transformer(data) <NEW_LINE> <DEDENT> input_dim, output_dim = data.dim <NEW_LINE> rep = getattr(data, '_channel_rep', 'Operator') <NEW_LINE> ptm = _to_ptm(rep, data._data, input_dim, output_dim) <NEW_LINE> if input_dims is None: <NEW_LINE> <INDENT> input_dims = data.input_dims() <NEW_LINE> <DEDENT> if output_dims is None: <NEW_LINE> <INDENT> output_dims = data.output_dims() <NEW_LINE> <DEDENT> <DEDENT> num_qubits = int(np.log2(input_dim)) <NEW_LINE> if 2**num_qubits != input_dim: <NEW_LINE> <INDENT> raise QiskitError("Input is not an n-qubit Pauli transfer matrix.") <NEW_LINE> <DEDENT> input_dims = self._automatic_dims(input_dims, input_dim) <NEW_LINE> output_dims = self._automatic_dims(output_dims, output_dim) <NEW_LINE> super().__init__(ptm, input_dims, output_dims, 'PTM')
Initialize a PTM quantum channel operator. Args: data (QuantumCircuit or Instruction or BaseOperator or matrix): data to initialize superoperator. input_dims (tuple): the input subsystem dimensions. [Default: None] output_dims (tuple): the output subsystem dimensions. [Default: None] Raises: QiskitError: if input data is not an N-qubit channel or cannot be initialized as a PTM. Additional Information: If the input or output dimensions are None, they will be automatically determined from the input data. The PTM representation is only valid for N-qubit channels.
625941af8a43f66fc4b53daf
def set_app_lifetime(self, env, app_lifetime): <NEW_LINE> <INDENT> env.app_lifetime = app_lifetime
sets the lifetime of the application Input: env: AutomotiveEnvironmentSpec: specification of the Environment Output: -
625941af596a897236089808
def test_get_trajectory_lengths(self): <NEW_LINE> <INDENT> TF_TEST_MASK = tf.constant(NP_TEST_MASK) <NEW_LINE> TF_TEST_MASK_TF_F64 = tf.cast(TF_TEST_MASK, tf.float64) <NEW_LINE> NP_TEST_MASK_NP_F64 = NP_TEST_MASK.astype(np.float64) <NEW_LINE> ALL_MASKS = [ TF_TEST_MASK, NP_TEST_MASK, TF_TEST_MASK_TF_F64, NP_TEST_MASK_NP_F64 ] <NEW_LINE> for mask in ALL_MASKS: <NEW_LINE> <INDENT> computed_lengths = tbs.get_trajectory_lengths(mask) <NEW_LINE> self.assertTrue(np.allclose(computed_lengths, self.expected_lengths))
Checks if the length of each trajectory in the batch is correct.
625941afd7e4931a7ee9dc61
def set_undo_callback(self, callback): <NEW_LINE> <INDENT> raise NotImplementedError
Define the callback function that is called whenever an undo operation is executed. The callback function receives a single argument that is a text string that defines the operation.
625941af925a0f43d2549bb9
def get_ip_lb_services(ovh_client): <NEW_LINE> <INDENT> return ovh_client.get('/ip/loadBalancing')
Get all legacy load balancing services GET /ip/LoadBalancing
625941af6aa9bd52df036ae9
@api.route('/comments/<int:id>/like/', methods=["GET", "POST", "DELETE"]) <NEW_LINE> @auth.login_required <NEW_LINE> def new_comments_id_like(id): <NEW_LINE> <INDENT> comment = Comments.query.get_or_404(id) <NEW_LINE> if request.method == "POST": <NEW_LINE> <INDENT> if comment.liked: <NEW_LINE> <INDENT> return jsonify({ 'error': '你已经点赞过该评论' }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> comment.user.append(g.current_user) <NEW_LINE> db.session.add(comment) <NEW_LINE> db.session.commit() <NEW_LINE> comment.likes = len(comment.user.all()) <NEW_LINE> db.session.add(comment) <NEW_LINE> db.session.commit() <NEW_LINE> comment = Comments.query.get_or_404(id) <NEW_LINE> return jsonify({ 'likes': comment.likes }), 201 <NEW_LINE> <DEDENT> <DEDENT> elif request.method == "DELETE": <NEW_LINE> <INDENT> if comment.liked: <NEW_LINE> <INDENT> comment.user.remove(g.current_user) <NEW_LINE> db.session.add(comment) <NEW_LINE> db.session.commit() <NEW_LINE> comment.likes = len(comment.user.all()) <NEW_LINE> db.session.add(comment) <NEW_LINE> db.session.commit() <NEW_LINE> comment = Comments.query.get_or_404(id) <NEW_LINE> return jsonify( comment.to_json() ), 200 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return jsonify({ "error": "你还没有点赞这个评论哦!" }), 403
点赞特定id的课程 :param id: :return:
625941af3346ee7daa2b2aad
def timestamp_2_readable(time_stamp): <NEW_LINE> <INDENT> return datetime.fromtimestamp(time_stamp).strftime('%Y-%m-%d %H:%M:%S')
时间戳转换为可读时间 :param time_stamp: 时间戳,当前时间:time.time() :return: 可读时间字符串
625941b073bcbd0ca4b2bdbd