code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def set_boundaries(self, x, y, width, height): <NEW_LINE> <INDENT> _lib.caca_set_canvas_boundaries.argtypes = [ _Canvas, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int ] <NEW_LINE> _lib.caca_set_canvas_boundaries.restype = ctypes.c_int <NEW_LINE> try: <NEW_LINE> <INDENT> ret = _lib.caca_set_canvas_boundaries(self, x, y, width, height) <NEW_LINE> <DEDENT> except ctypes.ArgumentError: <NEW_LINE> <INDENT> raise CanvasError("Specified coordinate or size is invalid") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if ret == -1: <NEW_LINE> <INDENT> err = ctypes.c_int.in_dll(_lib, "errno") <NEW_LINE> if err.value == errno.EINVAL: <NEW_LINE> <INDENT> raise CanvasError("Specified width or height is invalid") <NEW_LINE> <DEDENT> elif err.value == errno.EBUSY: <NEW_LINE> <INDENT> raise CanvasError("The canvas is in use by a display driver" " and cannot be resized") <NEW_LINE> <DEDENT> elif err.value == errno.ENOMEM: <NEW_LINE> <INDENT> raise CanvasError("Not enough memory for the requested" " canvas size") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return ret | Set a canvas' new boundaries.
x -- X coordinate of the top-left corner
y -- Y coordinate of the top-left corner
width -- width of the box
height -- height of the box | 625941b0b5575c28eb68dd62 |
def detect_format(output_format, data): <NEW_LINE> <INDENT> from pycrossva.transform import SUPPORTED_INPUTS <NEW_LINE> from pycrossva.configuration import Configuration, CrossVA <NEW_LINE> config_file_path = os.path.join(os.path.split(__file__)[0], "resources/mapping_configuration_files/") <NEW_LINE> proportions = {} <NEW_LINE> for input_format in SUPPORTED_INPUTS: <NEW_LINE> <INDENT> translation_file = (f"{config_file_path}{input_format}_to_{output_format}.csv") <NEW_LINE> if os.path.isfile(translation_file): <NEW_LINE> <INDENT> mapping_data = pd.read_csv(translation_file) <NEW_LINE> mapping_config = Configuration(config_data=mapping_data, process_strings=False) <NEW_LINE> cross_va = CrossVA(data, mapping_config) <NEW_LINE> mapped_data_column_ids = cross_va.data.columns <NEW_LINE> data_column_ids = data.columns <NEW_LINE> proportions[input_format] = len(mapped_data_column_ids) / len(data_column_ids) <NEW_LINE> <DEDENT> <DEDENT> return max(proportions, key=proportions.get) | Detects the format of the input data, determining the closest match
Args:
output_format (string): The output format, needed for loading the configuration files to test each
data (Pandas DataFrame): The data being processed where we wish to determine the most likely format
Returns:
str: the best matching format for the input data
Examples:
Can determine the format of a data file:
>>> detect_format("InsillicoVA", flexible_read("resources/sample_data/2016WHO_mock_data_1.csv"))
'2016WHOv141' | 625941b0925a0f43d2549bd9 |
def glGetClipPlane(*argv): <NEW_LINE> <INDENT> pass | no string
| 625941b097e22403b379ccfe |
def determine_ellipse(self, num_points=500, suspected_radius=None, interactive=False, plot=False): <NEW_LINE> <INDENT> if isinstance(suspected_radius, float): <NEW_LINE> <INDENT> suspected_radius = self.axes_manager.signal_axes[-3].value2index(suspected_radius) <NEW_LINE> <DEDENT> center, lengths, angle = solve_ellipse(self.sum().data, num_points=num_points, interactive=interactive, plot=plot) <NEW_LINE> self.metadata.set_item("Signal.Ellipticity.center", center) <NEW_LINE> self.metadata.set_item("Signal.Ellipticity.angle", angle) <NEW_LINE> self.metadata.set_item("Signal.Ellipticity.lengths", lengths) <NEW_LINE> self.metadata.set_item("Signal.Ellipticity.calibrated", True) <NEW_LINE> return center, lengths, angle | Determine the elliptical nature of the diffraction pattern.
Parameters
-------
interactive : Boolean
'interactive' nature means that points are chosen to create a ring
axis : int
'axis' to determine ellipse along
num_points : int
number of points to define ellipse by (only used if interactive = False)
plot : Boolean
Weather or not to plot the ellipse
Returns
-------
center : list of int
the center of the ellipse
lengths : list of int
the length in pixels of the major and minor axes
angle : float
the angle of the major axes | 625941b08a43f66fc4b53dd1 |
def _fire_bullet(self): <NEW_LINE> <INDENT> if len(self.bullets) < self.settings.bullets_allowed: <NEW_LINE> <INDENT> new_bullet = Bullet(self) <NEW_LINE> self.bullets.add(new_bullet) <NEW_LINE> self.bullet_sound() | Create bullet, add it to the bullets group and play sound effect. | 625941b0e64d504609d745a6 |
def create_bow_dataset(num_rows=None): <NEW_LINE> <INDENT> train = pd.read_csv('./data/interim/trimmed_train_train.csv', nrows=num_rows) <NEW_LINE> val = pd.read_csv('./data/interim/trimmed_train_validation.csv', nrows=num_rows) <NEW_LINE> train, val = replace_text_columns_with_bow(train, val) <NEW_LINE> train.to_csv("./data/processed/train_with_bow.csv", index=False) <NEW_LINE> val.to_csv("./data/processed/validation_with_bow.csv", index=False) | Creates a dataset using bag of words and saves it in processed.
Args:
min_word_freq: Minimum frequency of a word for it to be included system
the bag of words. | 625941b0293b9510aa2c2ff9 |
def addCategoryObject (self, category, local_name, named_object): <NEW_LINE> <INDENT> name_map = self.categoryMap(category) <NEW_LINE> old_object = name_map.get(local_name) <NEW_LINE> if (old_object is not None) and (old_object != named_object): <NEW_LINE> <INDENT> raise pyxb.NamespaceUniquenessError(self, '%s: name %s used for multiple values in %s' % (self, local_name, category)) <NEW_LINE> <DEDENT> name_map[local_name] = named_object <NEW_LINE> return named_object | Allow access to the named_object by looking up the local_name in
the given category.
Raises pyxb.NamespaceUniquenessError if an object with the same name
already exists in the category. | 625941b0627d3e7fe0d68bad |
def create_hashvalue(message): <NEW_LINE> <INDENT> hash_obj = hashlib.sha256() <NEW_LINE> hash_obj.update(message.encode("utf-8")) <NEW_LINE> return hash_obj.hexdigest() | Create hash value | 625941b0507cdc57c6306a30 |
def general_format(fig, axis): <NEW_LINE> <INDENT> fig.tight_layout() <NEW_LINE> scientific_notation(axis) | General function that will format a Matplotlib subplot to remove
some common annoyances that figures have. | 625941b021a7993f00bc7a4e |
def valid_password_first_star(digits: List[int]) -> bool: <NEW_LINE> <INDENT> found_double = False <NEW_LINE> for index in range(1, 6): <NEW_LINE> <INDENT> previous = digits[index - 1] <NEW_LINE> if digits[index] < previous: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if digits[index] == digits[index - 1]: <NEW_LINE> <INDENT> found_double = True <NEW_LINE> <DEDENT> <DEDENT> return found_double | For a given 6-digit number, validate that:
A) The digits do not decrease from left to right.
B) There exists at least one pair of equal digits. | 625941b0046cf37aa974cab2 |
def get_hash_savepoint(hashtag): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(get_hash_file_id(hashtag), "r") as file: <NEW_LINE> <INDENT> savepoint = file.read() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> savepoint = "" <NEW_LINE> logging.warning("No savepoint found. Bot is now searching for results") <NEW_LINE> <DEDENT> return savepoint | Gets the last retweet id of the passed hashtag if it exists | 625941b00a50d4780f666bf6 |
def includeme(config): <NEW_LINE> <INDENT> settings = config.registry.settings <NEW_LINE> config.set_request_property(get_is_authenticated, 'is_authenticated', reify=True) <NEW_LINE> config.set_request_property(get_authenticated_user, 'user', reify=True) <NEW_LINE> config.set_request_property(get_user_json, 'user_json', reify=True) <NEW_LINE> config.set_request_property(get_is_post_login, 'is_post_login', reify=True) <NEW_LINE> config.set_request_property(get_is_post_signup, 'is_post_signup', reify=True) <NEW_LINE> prefix = settings.get('simpleauth.url_prefix', 'auth') <NEW_LINE> path = '{0}/*traverse'.format(prefix) <NEW_LINE> config.add_route('simpleauth', path, factory=AuthRoot, use_global_views=True) <NEW_LINE> if asbool(settings.get('simpleauth.set_default_permission', True)): <NEW_LINE> <INDENT> permission = settings.get('simpleauth.default_permission', 'view') <NEW_LINE> config.set_default_permission(permission) <NEW_LINE> <DEDENT> if asbool(settings.get('simpleauth.set_auth_policies', True)): <NEW_LINE> <INDENT> authn_policy = SessionAuthenticationPolicy(callback=get_roles) <NEW_LINE> authz_policy = ACLAuthorizationPolicy() <NEW_LINE> config.set_authorization_policy(authz_policy) <NEW_LINE> config.set_authentication_policy(authn_policy) <NEW_LINE> <DEDENT> config.scan('pyramid_simpleauth', ignore='pyramid_simpleauth.tests') | Allow developers to use ``config.include('pyramid_simpleauth')``.
Setup::
>>> from mock import Mock
>>> mock_config = Mock()
>>> mock_config.registry.settings = {}
Adds properties to the request::
>>> includeme(mock_config)
>>> args = (get_is_authenticated, 'is_authenticated')
>>> mock_config.set_request_property.assert_any_call(*args,
... reify=True)
>>> args = (get_authenticated_user, 'user')
>>> mock_config.set_request_property.assert_any_call(*args,
... reify=True)
>>> args = (get_user_json, 'user_json')
>>> mock_config.set_request_property.assert_any_call(*args,
... reify=True)
>>> args = (get_is_post_login, 'is_post_login')
>>> mock_config.set_request_property.assert_any_call(*args,
... reify=True)
>>> args = (get_is_post_signup, 'is_post_signup')
>>> mock_config.set_request_property.assert_any_call(*args,
... reify=True)
Exposes the authentication views::
>>> args = ('simpleauth', 'auth/*traverse')
>>> kwargs = {
... 'factory': AuthRoot,
... 'use_global_views': True
... }
>>> mock_config.add_route.assert_called_with(*args, **kwargs)
Locks down everything by default::
>>> mock_config.set_default_permission.assert_called_with('view')
>>> mock_config = Mock()
>>> mock_config.registry.settings = {
... 'simpleauth.set_default_permission': False
... }
>>> includeme(mock_config)
>>> mock_config.set_default_permission.called
False
Sets up authentication and authorisation policies by default::
>>> mock_config.set_authentication_policy.called
True
>>> mock_config.set_authorization_policy.called
True
>>> mock_config = Mock()
>>> mock_config.registry.settings = {
... 'simpleauth.set_auth_policies': False
... }
>>> includeme(mock_config)
>>> mock_config.set_authentication_policy.called
False
>>> mock_config.set_authorization_policy.called
False | 625941b03317a56b869399cc |
def get_stock_basics(self, expire=60*6): <NEW_LINE> <INDENT> if not os.path.exists(CT.BASICS_DIR): <NEW_LINE> <INDENT> os.makedirs(CT.BASICS_DIR) <NEW_LINE> <DEDENT> basics_file_path = CT.BASICS_DIR + './basics.csv' <NEW_LINE> expired = date_time.check_file_expired(basics_file_path, expire) <NEW_LINE> if expired or not os.path.exists(basics_file_path): <NEW_LINE> <INDENT> d = ts.get_stock_basics() <NEW_LINE> d = d.sort_index() <NEW_LINE> d.to_csv(CT.BASICS_DIR + './basics.csv', sep='\t') <NEW_LINE> all_stock_symbol = open(CT.BASICS_DIR + './symbols.csv', 'w') <NEW_LINE> stock_symbol = [] <NEW_LINE> for symbol in d['name'].index: <NEW_LINE> <INDENT> stock_symbol.append(symbol + '\n') <NEW_LINE> <DEDENT> all_stock_symbol.writelines(stock_symbol) <NEW_LINE> all_stock_symbol.close() <NEW_LINE> <DEDENT> d = pd.read_csv(basics_file_path, sep='\t', index_col=0) <NEW_LINE> return d | 获取沪深上市公司基本情况
:param expire: 本地数据失效时间(分),超过时间更新本地数据,强制更新传0
@result:
code,代码
name,名称
industry,所属行业
area,地区
pe,市盈率
outstanding,流通股本
totals,总股本(万)
totalAssets,总资产(万)
liquidAssets,流动资产
fixedAssets,固定资产
reserved,公积金
reservedPerShare,每股公积金
eps,每股收益
bvps,每股净资
pb,市净率
timeToMarket,上市日期 | 625941b08a349b6b435e7edd |
def main(): <NEW_LINE> <INDENT> if save_as == 'no_output': <NEW_LINE> <INDENT> df = mine_and_mergeXUTs(pickle_df = False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df = mine_and_mergeXUTs() | Main entry point of the script | 625941b0b57a9660fec335de |
def mm(density, u, step, x=vrend.Identity): <NEW_LINE> <INDENT> return (vrend.Sum, sl(density, u, step, x), error(density, u, step, x)) | Modified MacCormack advection. | 625941b0be7bc26dc91cd36e |
def get_emhidf(self) -> pd.DataFrame: <NEW_LINE> <INDENT> city_code = self._get_city_code("emhi") <NEW_LINE> query_base = r"http://www.ilmateenistus.ee/wp-content/themes/emhi2013/meteogram.php?locationId={}" <NEW_LINE> query_url = query_base.format(city_code) <NEW_LINE> emhi_data = requests.get(query_url).text.replace("callback(", "").replace(");", "") <NEW_LINE> emhi_json = json.loads(emhi_data)["forecast"]["tabular"]["time"] <NEW_LINE> data = [self._return_emhi_hour_data(hour) for hour in emhi_json] <NEW_LINE> df = pd.DataFrame(data) <NEW_LINE> df = df.rename(columns={'phenomen': 'symbol'}) <NEW_LINE> df = self.convert_df_dtypes(df) <NEW_LINE> df['symbol'] = df.apply(self._convert_emhi_symbol, axis=1) <NEW_LINE> return df | Returns emhi (ilmateenistus.ee) 2-day weather forecast as a dataframe.
:return: pd.DataFrame[["end", "precipitation", "pressure", "start", "symbol",
"temperature", "windDirection", "windSpeed"]] | 625941b02c8b7c6e89b3552c |
def _asset_to_fixture(asset: str) -> Path: <NEW_LINE> <INDENT> package_name, file_name = resolve_asset_spec(asset) <NEW_LINE> if package_name: <NEW_LINE> <INDENT> package = __import__(package_name) <NEW_LINE> path = Path(package_path(package), file_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = Path(file_name) <NEW_LINE> <DEDENT> if not path.is_dir(): <NEW_LINE> <INDENT> msg = 'This is not a directory {}'.format(asset) <NEW_LINE> raise ConfigurationError(details=msg) <NEW_LINE> <DEDENT> return path | Translate :term:`asset` to absolute fixture path. | 625941b016aa5153ce3621e0 |
def version_cmp(version1, version2): <NEW_LINE> <INDENT> def normalize(v): <NEW_LINE> <INDENT> return [int(x) for x in re.sub(r'\+','', v).split(".")] <NEW_LINE> <DEDENT> return normalize(version1) < normalize(version2) | Return True if version1 is less greater than version2 | 625941b08e71fb1e9831d516 |
def bubble_sort(numbers): <NEW_LINE> <INDENT> length = len(numbers) <NEW_LINE> for value in range(length - 1, 0, -1): <NEW_LINE> <INDENT> for i in range(0, value): <NEW_LINE> <INDENT> if numbers[i] > numbers[i + 1]: <NEW_LINE> <INDENT> value = numbers[i] <NEW_LINE> numbers[i] = numbers[i + 1] <NEW_LINE> numbers[i + 1] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return numbers | Puts a list of numbers in order.
Args:
numbers(num): A numeric type that reads in numbers.
Returns:
list: returns a list in numerical order
Example:
>>>bubble_sort([3, 2, 1])
[1, 2, 3] | 625941b18a43f66fc4b53dd3 |
@click.command() <NEW_LINE> @click.option('--amount', type=click.FLOAT, help='Amount to convert.') <NEW_LINE> @click.option('--input_currency', help='The currency to convert from.') <NEW_LINE> @click.option('--output_currency', default=None, help='Optional. The currency to convert to.') <NEW_LINE> @click.option('--host', default='localhost', help='Optional. The host hosting the conversion API.') <NEW_LINE> def convert(amount, input_currency, output_currency, host): <NEW_LINE> <INDENT> if amount is None: <NEW_LINE> <INDENT> raise click.UsageError('Amount to convert argument required.') <NEW_LINE> <DEDENT> if input_currency is None: <NEW_LINE> <INDENT> raise click.UsageError('Input currency argument required.') <NEW_LINE> <DEDENT> params = { 'amount': amount, 'input_currency': input_currency } <NEW_LINE> if output_currency is not None: <NEW_LINE> <INDENT> params['output_currency'] = output_currency <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> resp = requests.get(URL_BASE.format(host), params=params, timeout=0.5) <NEW_LINE> resp.raise_for_status() <NEW_LINE> <DEDENT> except requests.HTTPError: <NEW_LINE> <INDENT> raise click.UsageError('Did you enter the right amount and ' 'correct currency codes?') <NEW_LINE> <DEDENT> except requests.exceptions.Timeout: <NEW_LINE> <INDENT> raise click.UsageError('Timeout. Did you provide a correct host?') <NEW_LINE> <DEDENT> ordered_resp = json.loads(resp.text, object_pairs_hook=OrderedDict) <NEW_LINE> print(json.dumps(ordered_resp, indent=4, separators=(',', ': '))) | "A program for converting currencies. | 625941b145492302aab5e028 |
def list_repo_issues(self, owner, repository, milestone=None, state='', assignee='', mentioned='', labels='', sort='', direction='', since=''): <NEW_LINE> <INDENT> issues = [] <NEW_LINE> if owner and repository: <NEW_LINE> <INDENT> repo = self.repository(owner, repository) <NEW_LINE> issues = repo.list_issues(milestone, state, assignee, mentioned, labels, sort, direction, since) <NEW_LINE> <DEDENT> return issues | List issues on owner/repository. Only owner and repository are
required.
:param str owner: login of the owner of the repository
:param str repository: name of the repository
:param int milestone: None, '*', or ID of milestone
:param str state: accepted values: ('open', 'closed')
api-default: 'open'
:param str assignee: '*' or login of the user
:param str mentioned: login of the user
:param str labels: comma-separated list of label names, e.g.,
'bug,ui,@high'
:param str sort: accepted values: ('created', 'updated', 'comments')
api-default: created
:param str direction: accepted values: ('asc', 'desc')
api-default: desc
:param str since: ISO 8601 formatted timestamp, e.g.,
2012-05-20T23:10:27Z
:returns: list of :class:`Issue <github3.issues.Issue>`\ s | 625941b1293b9510aa2c2ffb |
def p_expression(self, p): <NEW_LINE> <INDENT> if len(p) == 2 and type(p[1]) == AST.Const: <NEW_LINE> <INDENT> p[0] = p[1] <NEW_LINE> <DEDENT> elif len(p) == 2: <NEW_LINE> <INDENT> p[0] = AST.ID(p[1]) <NEW_LINE> <DEDENT> elif len(p) == 4 and p[1] == '(': <NEW_LINE> <INDENT> p[0] = p[2] <NEW_LINE> <DEDENT> elif len(p) == 4: <NEW_LINE> <INDENT> p[0] = AST.BinaryExpression(p[1], p[2], p[3]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p[0] = AST.CallFunction(p[1], p[3]) | expression : const
| ID
| expression '+' expression
| expression '-' expression
| expression '*' expression
| expression '/' expression
| expression '%' expression
| expression '|' expression
| expression '&' expression
| expression '^' expression
| expression AND expression
| expression OR expression
| expression SHL expression
| expression SHR expression
| expression EQ expression
| expression NEQ expression
| expression '>' expression
| expression '<' expression
| expression LE expression
| expression GE expression
| '(' expression ')'
| '(' error ')'
| ID '(' expr_list_or_empty ')'
| ID '(' error ')' | 625941b182261d6c526ab205 |
def get_MetadataURL(self): <NEW_LINE> <INDENT> return super(IRemoteMetadataName, self).get_MetadataURL() | Method IRemoteMetadataName.get_MetadataURL
OUTPUT
pUrl : BSTR* | 625941b124f1403a926008d4 |
def copyDataFrom (self, other): <NEW_LINE> <INDENT> self.valueOpM1=other.valueOpM1 <NEW_LINE> self._myHasValueOpM1=other._myHasValueOpM1 | This method will copy to self only the leaves & descendant that and their "has" values from ***other***.
It will leave "requested" fields unchanged | 625941b16aa9bd52df036b03 |
def get_modules_stats(self, server_info): <NEW_LINE> <INDENT> if not server_info.has_key("modules"): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> modules = server_info["modules"] <NEW_LINE> return { "loggers" : modules.get("loggers", None), "handlers" : modules.get("handlers", None), "encoders" : modules.get("encoders", None), "validators" : modules.get("validators", None), "generic" : modules.get("generic", None), "balancers" : modules.get("balancers", None), "rules" : modules.get("rules", None), "cryptors" : modules.get("cryptors", None), "vrules" : modules.get("vrules", None), "collectors" : modules.get("collectors", None), } | Title: Modules
loggers
handlers
encoders
validators
generic
balancers
rules
cryptors
vrules
collectors | 625941b121a7993f00bc7a50 |
def div(x, y, min_n = -1075, max_p = 53): <NEW_LINE> <INDENT> if x.is_zero(): <NEW_LINE> <INDENT> e = x.e - y.e <NEW_LINE> return Sink(c = 0, exp = e + 1, negative = x.negative != y.negative, inexact = not x.is_exactly_zero(), sided = False, full = False) <NEW_LINE> <DEDENT> elif y.is_zero(): <NEW_LINE> <INDENT> raise ZeroDivisionError('division by zero: {} / {}'.format(repr(x), repr(y))) <NEW_LINE> <DEDENT> result = withnprec(gmp.div, x.to_mpfr(), y.to_mpfr(), min_n=min_n, max_p=max_p) <NEW_LINE> inexact = x.inexact or y.inexact or result.inexact <NEW_LINE> return Sink(result, inexact=inexact, full=False, sided=False) | Divide to sinks x / y, rounding according to min_n and max_p.
TODO: rounding modes | 625941b16aa9bd52df036b04 |
def CreateSession( registration_info: SimulatorInterface, config_client: BonsaiClientConfig ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> print( "config: {}, {}".format(config_client.server, config_client.workspace) ) <NEW_LINE> registered_session: SimulatorSessionResponse = client.session.create( workspace_name=config_client.workspace, body=registration_info ) <NEW_LINE> print("Registered simulator. {}".format(registered_session.session_id)) <NEW_LINE> return registered_session, 1 <NEW_LINE> <DEDENT> except HttpResponseError as ex: <NEW_LINE> <INDENT> print( "HttpResponseError in Registering session: StatusCode: {}, Error: {}, Exception: {}".format( ex.status_code, ex.error.message, ex ) ) <NEW_LINE> raise ex <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> print( "UnExpected error: {}, Most likely, it's some network connectivity issue, make sure you are able to reach bonsai platform from your network.".format( ex ) ) <NEW_LINE> raise ex | Creates a new Simulator Session and returns new session, sequenceId
| 625941b1046cf37aa974cab4 |
def new_docs(bot, update, args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> amount = int(args[0]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> amount = NEW_DOCS_DEFAULT <NEW_LINE> <DEDENT> if amount < 0 or amount > UP_BOARD_FOR_OUTPUT: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> res = DB.select_newest_docs(amount) <NEW_LINE> for d in res: <NEW_LINE> <INDENT> update.message.reply_text(str(d.time) + ' ' + d.name + '\n' + d.link) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> update.message.reply_text("Неправильный аргумент") | Отправляет новейшие документы
:param bot:
:param update:
:param args: количество документов
:return: void | 625941b176d4e153a657e899 |
def detach_port_from_bgpvpn(self, context, port_bgpvpn_info, host=None): <NEW_LINE> <INDENT> if not host or host == cfg.CONF.host: <NEW_LINE> <INDENT> self.bgpvpn_port_detach(context, port_bgpvpn_info) | Handle RPC cast from service plugin to detach port from BGP VPN | 625941b13317a56b869399ce |
def gameOver(SCREEN, BACKGROUND1, BACKGROUND2, ball_list): <NEW_LINE> <INDENT> pause = True <NEW_LINE> WHITE = pygame.Color(250, 250, 250) <NEW_LINE> SCREEN.fill(WHITE) <NEW_LINE> ball_list.clear(SCREEN, BACKGROUND2) <NEW_LINE> SCREEN.blit(BACKGROUND1,(0,0)) <NEW_LINE> SCREEN.blit(BACKGROUND2,(60,50)) <NEW_LINE> while pause: <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type == pygame.QUIT: <NEW_LINE> <INDENT> pygame.quit() <NEW_LINE> quit() <NEW_LINE> <DEDENT> if event.type == KEYDOWN: <NEW_LINE> <INDENT> if event.key == K_SPACE: <NEW_LINE> <INDENT> main(2, 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> draw_text(SCREEN, "Game Over!", 320, 230) <NEW_LINE> draw_text(SCREEN, "Press Space Bar to Play again.", 240, 250) <NEW_LINE> pygame.display.update() | Function responsible for displaying the game over screen after a player loses
:param screen: passes the pygame screen
:param BACKGROUND1: passes background 1 for redraw
:param BACKGROUND2: passes background 2 for redraw
:param ball_list: passes ball_list for redraw | 625941b18a349b6b435e7edf |
def err_div(x, y, ex, ey): <NEW_LINE> <INDENT> Q = x / y <NEW_LINE> dQ = np.abs(Q) * np.sqrt((ex / x) ** 2 + (ey / y) ** 2) <NEW_LINE> return Q, dQ | error propogation for division | 625941b1b57a9660fec335e0 |
def _getKeys (self, maapiList): <NEW_LINE> <INDENT> timeElapsor = _TimeElapsor(self._log) <NEW_LINE> timeElapsor.set() <NEW_LINE> rc = maapiList.readListKeys() <NEW_LINE> self._log("get-keys-elapsed-msec").debug2("reading the list keys took over %s msecs", timeElapsor.getElapsedMsecs()) <NEW_LINE> if rc != ReturnCodes.kOk: <NEW_LINE> <INDENT> self._log("get-keys-failed").error("failed reading keys for maapi list") <NEW_LINE> return None <NEW_LINE> <DEDENT> timeElapsor.set() <NEW_LINE> keys = maapiList.getListKeys() <NEW_LINE> self._log("get-keys-elapsed-msec").debug2("getting the list keys took over %s msecs", timeElapsor.getElapsedMsecs()) <NEW_LINE> self._log("get-keys").debug2("read keys from maapi list. keys = %s", keys) <NEW_LINE> return keys | Takes an uninitialized MAAPI List object (generated by Blinky), and tries to read its keys.
If read is successfull - returns a list of keys.
If fails - returns None
To be used by extension classes for convinient MAAPI access. | 625941b17b180e01f3dc4572 |
def __init__(self, url, protected=True): <NEW_LINE> <INDENT> self._url = url <NEW_LINE> self.protected = protected | :param url: yarl.URL instance
:param protected: bool | 625941b1d8ef3951e32432a7 |
def __tree(self): <NEW_LINE> <INDENT> target = self.data['target'] <NEW_LINE> volume = self.get_volume(target) <NEW_LINE> self.response['tree'] = volume.tree(target) | Handles the 'tree' command.
Sets response['tree'] - a list of children of the specified
target Directory. | 625941b1925a0f43d2549bdd |
def messenger(): <NEW_LINE> <INDENT> return "Don't harm the messenger" | Send a message. | 625941b16fece00bbac2d4a0 |
def find_towns_affected(self, stations): <NEW_LINE> <INDENT> self.towns = [] <NEW_LINE> for station in self.stations_in_warning(stations): <NEW_LINE> <INDENT> self.towns.append(station.town) <NEW_LINE> <DEDENT> return self.towns | Find towns in the flood warning region with a monitoring station.
Parameters
----------
stations : list[MonitoringStations]
produced using stationdata.build_station_list().
Returns
-------
list[String]
list of names of the towns affected. | 625941b1d164cc6175782ab0 |
def get_current_job(): <NEW_LINE> <INDENT> return _job_stack.top | 获取当前正在执行的Job | 625941b197e22403b379cd02 |
def test_delete_line_object_incorrect_id(self): <NEW_LINE> <INDENT> with self.client: <NEW_LINE> <INDENT> response = self.client.delete( '/api/v1/users/1/lines/99', content_type='application/json' ) <NEW_LINE> data = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> self.assertIn('99 does not exist.', data['message']) <NEW_LINE> self.assertIn('fail', data['status']) | Ensure error is thrown if the id does not exist. | 625941b1bf627c535bc12f3a |
@app.route("/software") <NEW_LINE> def software(): <NEW_LINE> <INDENT> return render_template("software.html") | Method for python flask, to set path for software page of the web frontend;
there one can download the one-click VNC applications for his/her os. | 625941b1627d3e7fe0d68bb1 |
def _get_location_from_entity(self, entity_id): <NEW_LINE> <INDENT> state = self.hass.states.get(entity_id) <NEW_LINE> if state is None: <NEW_LINE> <INDENT> _LOGGER.error("Unable to find entity %s", entity_id) <NEW_LINE> return None <NEW_LINE> <DEDENT> if location.has_location(state): <NEW_LINE> <INDENT> _LOGGER.debug("Getting %s location", entity_id) <NEW_LINE> return _get_location_from_attributes(state) <NEW_LINE> <DEDENT> zone_state = self.hass.states.get('zone.{}'.format(state.state)) <NEW_LINE> if location.has_location(zone_state): <NEW_LINE> <INDENT> _LOGGER.debug( "%s is in %s, getting zone location", entity_id, zone_state.entity_id ) <NEW_LINE> return _get_location_from_attributes(zone_state) <NEW_LINE> <DEDENT> if entity_id.startswith('sensor.'): <NEW_LINE> <INDENT> return state.state <NEW_LINE> <DEDENT> return None | Get the location from the entity_id. | 625941b1507cdc57c6306a34 |
def handle(self, *args, **kwargs): <NEW_LINE> <INDENT> fn = kwargs['filename'] <NEW_LINE> with open(fn) as f: <NEW_LINE> <INDENT> reader = csv.reader(f) <NEW_LINE> reader.next() <NEW_LINE> for i, row in enumerate(reader): <NEW_LINE> <INDENT> ref = reimport_reactions.convert_legacy_reference(row[0]) <NEW_LINE> self.stdout.write( '{}: Making reference {} public'.format(i, ref)) <NEW_LINE> ps = PerformedReaction.objects.filter(convertedLegacyRef=ref) <NEW_LINE> if ps.count() == 0: <NEW_LINE> <INDENT> if ref.startswith('xxx'): <NEW_LINE> <INDENT> unmunged_ref = ref + '0' <NEW_LINE> self.stdout.write( '{}: Making UNMUNGED reference {} public'.format(i, unmunged_ref)) <NEW_LINE> ps = PerformedReaction.objects.filter( convertedLegacyRef=unmunged_ref) <NEW_LINE> if ps.count() != 1: <NEW_LINE> <INDENT> ps.update(public=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError( 'Found {} reactions with reference {}'.format(ps.count(), ref)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ps.update(public=True) | Handle the call for this command. | 625941b121a7993f00bc7a52 |
def __init__(self, nodes_dict): <NEW_LINE> <INDENT> super().__init__(edges=self._get_edges_from_nodes(nodes_dict.values()), variables=list(nodes_dict.keys()), cpds=[node.cpd for node in nodes_dict.values()]) <NEW_LINE> self.nodes_dict = nodes_dict | Args
nodes_dict: dict
a dict key, value pair as {label_id: instance_of_node_class_or_subclass} | 625941b1627d3e7fe0d68bb2 |
def start(self): <NEW_LINE> <INDENT> Module.start(self) | all modules have been loaded and initialized by now. we can bend the rules here. | 625941b14f88993c3716bdd9 |
def __getattr__(self, attributeName): <NEW_LINE> <INDENT> self.refreshSelectors() <NEW_LINE> if attributeName in self.selectors: <NEW_LINE> <INDENT> attribute = self.getChild(relativePath=self.selectors[attributeName]) <NEW_LINE> <DEDENT> elif attributeName in ['hasFirstPage', 'hasPreviousPage', 'page', 'hasNextPage', 'hasLastPage', 'pages']: <NEW_LINE> <INDENT> attribute = self.getPageDetails()[attributeName] <NEW_LINE> <DEDENT> elif attributeName in ['firstRow', 'lastRow', 'rows']: <NEW_LINE> <INDENT> attribute = self.getRowDetails()[attributeName] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('Invalid attribute:', attributeName) <NEW_LINE> <DEDENT> return attribute | Attempt to return Element instance by given element name (ex. 'btnNextPage') | 625941b10a50d4780f666bfa |
def __mapbase_iteritems__ ( self ) : <NEW_LINE> <INDENT> _size = len ( self ) <NEW_LINE> _index = 0 <NEW_LINE> while _index < _size : <NEW_LINE> <INDENT> _key = self.key_at ( _index ) <NEW_LINE> yield ( _key , self.at ( _key ) ) <NEW_LINE> _index +=1 | The iterator for MapBase-based containers
>>> m = ... ## the map
>>> for key,value in m.iteritems() : print key, value | 625941b176d4e153a657e89b |
def __init__(self, parent, coord_expression=None, name=None, latex_name=None, is_isomorphism=False, is_identity=False): <NEW_LINE> <INDENT> if coord_expression is None: <NEW_LINE> <INDENT> coord_functions = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(coord_expression, dict): <NEW_LINE> <INDENT> raise TypeError("{} is not a dictionary".format( coord_expression)) <NEW_LINE> <DEDENT> param_chart = parent.domain().canonical_chart() <NEW_LINE> coord_functions = {} <NEW_LINE> for chart, expr in coord_expression.items(): <NEW_LINE> <INDENT> if isinstance(chart, tuple): <NEW_LINE> <INDENT> coord_functions[chart] = expr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> coord_functions[(param_chart, chart)] = expr <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> DiffMap.__init__(self, parent, coord_functions=coord_functions, name=name, latex_name=latex_name, is_isomorphism=is_isomorphism, is_identity=is_identity) | Construct a curve.
TESTS::
sage: M = Manifold(2, 'M')
sage: X.<x,y> = M.chart()
sage: R.<t> = manifolds.RealLine()
sage: I = R.open_interval(0, 2*pi)
sage: c = Hom(I,M)({X: (cos(t), sin(2*t))}, name='c') ; c
Curve c in the 2-dimensional differentiable manifold M
sage: TestSuite(c).run()
The identity of interval ``I``::
sage: c = Hom(I,I)({}, is_identity=True) ; c
Identity map Id_(0, 2*pi) of the Real interval (0, 2*pi)
sage: TestSuite(c).run() | 625941b10a366e3fb873e580 |
def __init__(self, network, participant=None): <NEW_LINE> <INDENT> if network.failed: <NEW_LINE> <INDENT> raise ValueError("Cannot create node in {} as it has failed" .format(network)) <NEW_LINE> <DEDENT> if participant is not None and participant.failed: <NEW_LINE> <INDENT> raise ValueError("{} cannot create a node as it has failed" .format(participant)) <NEW_LINE> <DEDENT> if participant is not None and participant.status != "working": <NEW_LINE> <INDENT> raise ValueError("{} cannot create a node as they are not working" .format(participant)) <NEW_LINE> <DEDENT> self.network = network <NEW_LINE> self.network_id = network.id <NEW_LINE> network.calculate_full() <NEW_LINE> if participant is not None: <NEW_LINE> <INDENT> self.participant = participant <NEW_LINE> self.participant_id = participant.id | Create a node. | 625941b12c8b7c6e89b35530 |
def resetAll(self): <NEW_LINE> <INDENT> for row, dev in enumerate(self.devices): <NEW_LINE> <INDENT> val = self.startValues[dev.eid] <NEW_LINE> state = self.ui.tableWidget.item(row, 5).checkState() <NEW_LINE> if state == 2: <NEW_LINE> <INDENT> dev.set_value(val) | Resets all PVs with a box checked.
Rewrote this function to only change selected rows, not all rows. | 625941b18e71fb1e9831d51a |
def __init__(self, message:str, delay:float=0.1) -> None: <NEW_LINE> <INDENT> self.spinner = itertools.cycle(['-', '/', '|', '\\']) <NEW_LINE> self.delay = delay <NEW_LINE> self.busy = False <NEW_LINE> self.spinnerVisible = False <NEW_LINE> sys.stdout.write(message) <NEW_LINE> return | Parameters
----------
message : str
The message to be displayed before the spinner
delay : float, optional
The delay in s between each step of the spinners cycle (default = 0.1)
Returns
-------
None | 625941b197e22403b379cd04 |
def lookups(self, request, model_admin): <NEW_LINE> <INDENT> return [ (unicode(i), unicode(i)) for i in range(6, 13) ] + [(u'13', _(u'older'))] | Returns the list from 6 to 13.
| 625941b15166f23b2e1a4ec5 |
def get_tx_for_gene(self, gene): <NEW_LINE> <INDENT> return self._fetchall(self._queries['tx_for_gene'], [gene]) | return transcript info records for supplied gene, in order of decreasing length
:param gene: HGNC gene name
:type gene: str | 625941b18a43f66fc4b53dd7 |
def get(self, endpoint: str = None, code: int = 200, token: str = None, **kwargs): <NEW_LINE> <INDENT> return self.do_wrap( requests.get, endpoint=endpoint, code=code, token=token, **kwargs ) | Wrapper emulating the requests.get method with custom error handling.
Keyword Arguments:
endpoint {str} -- API endpoint. (default: {None})
code {int} -- Status code indicating success. (default: {200})
token {str} -- JWT access token. (default: {None})
Returns:
requests.Response -- HTTP Response. | 625941b13346ee7daa2b2ad2 |
def decode(self, shortUrl): <NEW_LINE> <INDENT> return self.dict[int(shortUrl)] | Decodes a shortened URL to its original URL.
:type shortUrl: str
:rtype: str | 625941b1e5267d203edcda0e |
@pytest.mark.parametrize('grid_size', [10, 25, 50]) <NEW_LINE> @pytest.mark.parametrize('initial_sample_size', [10, 250, 500]) <NEW_LINE> def test_sim_evaluate_returns_expected_results_from_data(initial_sample_size, data_input, grid_size, models_from_data): <NEW_LINE> <INDENT> grid = np.linspace(8, 25, grid_size) <NEW_LINE> cdfw_level1 = CDFWrapperModel(models_from_data[0], grid) <NEW_LINE> cdfw_level2 = CDFWrapperModel(models_from_data[1], grid) <NEW_LINE> cdfw_level3 = CDFWrapperModel(models_from_data[2], grid) <NEW_LINE> cdfws = [cdfw_level1, cdfw_level2, cdfw_level3] <NEW_LINE> mlmc_simulator = MLMCSimulator(data_input, cdfws) <NEW_LINE> mlmc_simulator._caching_enabled = False <NEW_LINE> cdf, sample_sizes, variances = mlmc_simulator.simulate(epsilon=2.5e-2, initial_sample_sizes=initial_sample_size) <NEW_LINE> assert np.all(cdf >= 0.) <NEW_LINE> assert np.count_nonzero(cdf[1: -1] >= cdf[0: -2]) > grid_size * .75 <NEW_LINE> cdf_sum = np.sum(cdf[1:-1] - cdf[:-2]) <NEW_LINE> assert np.isclose(cdf_sum, 1., atol=.05) | Tests MLMC Simulator with CDF Wrapper models and Spring Mass Data to
ensure outputs are valid. Uses a wide variety of initial sample sizes and
grid sizes. | 625941b176d4e153a657e89d |
def test_comment_dialog(self): <NEW_LINE> <INDENT> template_path = self.topdir.joinpath('./mustache_templates_comments') <NEW_LINE> for f in template_path.iterdir(): <NEW_LINE> <INDENT> if f.suffix == '.dialog': <NEW_LINE> <INDENT> self.stache.load_template_file(f.name, str(f.absolute())) <NEW_LINE> results = [line.strip() for line in f.with_suffix('.result').open('r')] <NEW_LINE> for index, line in enumerate(results): <NEW_LINE> <INDENT> self.assertEqual(self.stache.render(f.name, index=index), line.strip()) | Test the loading and filling of valid mustache dialogs
where a dialog file contains multiple text versions | 625941b16aa9bd52df036b08 |
def _clip_color(c): <NEW_LINE> <INDENT> r, g, b = c <NEW_LINE> L = lum(c) <NEW_LINE> n = _min3(c) <NEW_LINE> x = _max3(c) <NEW_LINE> def fn(c): <NEW_LINE> <INDENT> return (n < 0) * ((L * (c - n)) / (L - n)) + (n >= 0) * c <NEW_LINE> <DEDENT> def fx(c): <NEW_LINE> <INDENT> return (x > 255) * (L + ((c - L) * (255 - L)) / (x - L)) + (x <= 255) * c <NEW_LINE> <DEDENT> r = fx(fn(r)) <NEW_LINE> g = fx(fn(g)) <NEW_LINE> b = fx(fn(b)) <NEW_LINE> return (r, g, b) | Returns clipped color as ImageMath operands.
The formula is defined as:
ClipColor(C)
L = Lum(C)
n = min(Cred, Cgreen, Cblue)
x = max(Cred, Cgreen, Cblue)
if(n < 0)
C = L + (((C - L) * L) / (L - n))
if(x > 1)
C = L + (((C - L) * (1 - L)) / (x - L))
return C
See: https://www.w3.org/TR/compositing-1/#blendingnonseparable
Arguments:
c: A tuple/list of 3 ImageMath operands. The color.
Returns:
A tuple/list of 3 ImageMath operands. The clipped color. | 625941b15e10d32532c5ec97 |
def subsample(inputs, factor, scope=None): <NEW_LINE> <INDENT> if factor == 1: <NEW_LINE> <INDENT> return inputs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return layers.max_pool2d(inputs, [1, 1], stride=factor, scope=scope) | Subsamples the input along the spatial dimensions.
Args:
inputs: A `Tensor` of size [batch, height_in, width_in, channels].
factor: The subsampling factor.
scope: Optional variable_scope.
Returns:
output: A `Tensor` of size [batch, height_out, width_out, channels] with the
input, either intact (if factor == 1) or subsampled (if factor > 1). | 625941b1046cf37aa974cab8 |
def add(self, comment, line, start, old, new, error=None): <NEW_LINE> <INDENT> self._line_to_edit[line].append( _FileEditTuple(comment, line, start, old, new)) <NEW_LINE> if error: <NEW_LINE> <INDENT> self._errors.append("%s:%d: %s" % (self._filename, line, error)) | Add a new change that is needed.
Args:
comment: A description of what was changed
line: Line number (1 indexed)
start: Column offset (0 indexed)
old: old text
new: new text
error: this "edit" is something that cannot be fixed automatically
Returns:
None | 625941b18a349b6b435e7ee3 |
def read_files(inputs): <NEW_LINE> <INDENT> input_text = '' <NEW_LINE> for input_ in inputs: <NEW_LINE> <INDENT> with open(input_, 'r') as input_file: <NEW_LINE> <INDENT> input_text += input_file.read().replace('\n', ' ').replace('"', '').replace('-', ' ') + ' ' <NEW_LINE> <DEDENT> <DEDENT> return input_text.lower() | Parses the input files and returns a single large input string for later processing | 625941b13539df3088e2e0b6 |
def lmap(fn, *args): <NEW_LINE> <INDENT> return list(map(fn, args)) | Basically a wrapper for `map` that returns a list rather than a
generator. This is such a common pattern that I think it deserves its own
function (think of it as a concise alternative to a list comprehension).
One slight difference is that we use *args instead of passing in an
iterable. This adds a slight convenience for the intended use case (fast
prototyping). See the `Examples` for more on this.
Parameters
----------
args: any
Returns
-------
list
Examples
--------
Consider these three equivalent syntax options:
lmap(fn, x, y)
[fn(obj) for obj in (x, y)]
list(map(fn, (x, y))
When quickly iterating, option 1 saves a bit of typing. The extra
parentheses that options 2 and 3 require to put x and y in a temporary
data structure can get messy as we add more complex logic. | 625941b163b5f9789fde6e54 |
def absent(name, onlyif=None, unless=None): <NEW_LINE> <INDENT> ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} <NEW_LINE> retcode = __salt__['cmd.retcode'] <NEW_LINE> instance = __salt__['cloud.action'](fun='show_instance', names=[name]) <NEW_LINE> if not instance or ('Not Actioned/Not Running' in ret and name in ret['Not Actioned/Not Running']): <NEW_LINE> <INDENT> ret['result'] = True <NEW_LINE> ret['comment'] = 'Instance {0} already absent'.format(name) <NEW_LINE> return ret <NEW_LINE> <DEDENT> if __opts__['test']: <NEW_LINE> <INDENT> ret['comment'] = 'Instance {0} needs to be destroyed'.format(name) <NEW_LINE> return ret <NEW_LINE> <DEDENT> if onlyif is not None: <NEW_LINE> <INDENT> if not isinstance(onlyif, string_types): <NEW_LINE> <INDENT> if not onlyif: <NEW_LINE> <INDENT> return _valid(name, comment='onlyif execution failed') <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(onlyif, string_types): <NEW_LINE> <INDENT> if retcode(onlyif) != 0: <NEW_LINE> <INDENT> return _valid(name, comment='onlyif execution failed') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if unless is not None: <NEW_LINE> <INDENT> if not isinstance(unless, string_types): <NEW_LINE> <INDENT> if unless: <NEW_LINE> <INDENT> return _valid(name, comment='unless execution succeeded') <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(unless, string_types): <NEW_LINE> <INDENT> if retcode(unless) == 0: <NEW_LINE> <INDENT> return _valid(name, comment='unless execution succeeded') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> info = __salt__['cloud.destroy'](name) <NEW_LINE> if info and not 'Error' in info: <NEW_LINE> <INDENT> ret['changes'] = info <NEW_LINE> ret['result'] = True <NEW_LINE> ret['comment'] = ('Destroyed instance {0}').format( name, ) <NEW_LINE> <DEDENT> elif 'Error' in info: <NEW_LINE> <INDENT> ret['result'] = False <NEW_LINE> ret['comment'] = ('Failed to destroy instance {0}: {1}').format( name, info['Error'], ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret['result'] = False <NEW_LINE> ret['comment'] = 'Failed to destroy instance {0}'.format(name) <NEW_LINE> <DEDENT> return ret | Ensure that no instances with the specified names exist.
CAUTION: This is a destructive state, which will search all
configured cloud providers for the named instance,
and destroy it.
name
The name of the instance to destroy
onlyif
Do run the state only if is unless succeed
unless
Do not run the state at least unless succeed | 625941b18c3a873295158124 |
def create_userstyle(self): <NEW_LINE> <INDENT> return self.get_or_create_element('sld', 'UserStyle') | Create a L{UserStyle} for this named layer.
@rtype: L{UserStyle}
@return: A newly created user style, attached to this node. | 625941b1d164cc6175782ab4 |
def printer(self, count): <NEW_LINE> <INDENT> print("\n[[ Top %i longest word files ]]" % (count)) <NEW_LINE> if (count > len(self.results)): count = len(self.results) <NEW_LINE> for x in range(count): <NEW_LINE> <INDENT> print(' {0:>7} {1}'.format(self.results[x]["value"], self.results[x]["filename"])) <NEW_LINE> <DEDENT> return | Print the top signature count match files for a given search | 625941b1d268445f265b4bdb |
def standard_env(): <NEW_LINE> <INDENT> import math, operator as op <NEW_LINE> env = Env() <NEW_LINE> env.update(vars(math)) <NEW_LINE> env.update({ '+': lambda *x: reduce(lambda z, y: z + y, x), '-':lambda *x: reduce(lambda z, y: z - y, x), '*':lambda *x: reduce(lambda z, y: z * y, x), '/':lambda *x: reduce(lambda z, y: z / y, x), '>':op.gt, '<':op.lt, '>=':op.ge, '<=':op.le, '=':op.eq, 'abs': abs, 'append': op.add, 'apply': apply, 'begin': lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:], 'cons': lambda x,y: [x] + y, 'eq?': op.is_, 'equal?': op.eq, 'length': len, 'list': lambda *x: list(x), 'list?': lambda x: isinstance(x,list), 'exec': lambda x: eval(compile(x,'None','single')), 'map': map, 'max': max, 'min': min, 'not': op.not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x, Number), 'procedure?': callable, 'round': round, 'symbol?': lambda x: isinstance(x, Symbol), }) <NEW_LINE> return env | An environment with some Scheme standard procedures. | 625941b13c8af77a43ae350e |
def main(self, *args): <NEW_LINE> <INDENT> Launcher.main(self) <NEW_LINE> s = SimServer(self.build_opt().copy()) <NEW_LINE> s.start() | Launch a SimServer simulation
:param args: see cli.Application | 625941b10a366e3fb873e583 |
def createDisplayBoard(self, xOffset, yOffset): <NEW_LINE> <INDENT> BLOCK_SIZE = 40 <NEW_LINE> grid = [] <NEW_LINE> for y in range(9): <NEW_LINE> <INDENT> row = [] <NEW_LINE> for x in range(9): <NEW_LINE> <INDENT> rect = pygame.Rect(xOffset + y*BLOCK_SIZE, x*BLOCK_SIZE + yOffset, BLOCK_SIZE, BLOCK_SIZE ) <NEW_LINE> pygame.draw.rect(self.screen,blue,rect,1) <NEW_LINE> row.append([rect, blue]) <NEW_LINE> <DEDENT> grid.append(row) <NEW_LINE> <DEDENT> return grid | createDisplayBoard Method
Parameters: xOffset, yOffset (distance from top left corner in pixels)
Returns: empty water grid that is created
Preconditions: N/A
Postconditions: N/A | 625941b14a966d76dd550d79 |
def test_BaseModel(self): <NEW_LINE> <INDENT> self.my_model.name = "Holbie" <NEW_LINE> self.my_model.my_number = 100 <NEW_LINE> self.my_model.save() <NEW_LINE> my_model_json = self.my_model.to_dict() <NEW_LINE> self.assertEqual(self.my_model.name, my_model_json['name']) <NEW_LINE> self.assertEqual(self.my_model.my_number, my_model_json['my_number']) <NEW_LINE> self.assertEqual('BaseModel', my_model_json['__class__']) <NEW_LINE> self.assertEqual(self.my_model.id, my_model_json['id']) | check attributes values in a BaseModel | 625941b16aa9bd52df036b09 |
def usage(self): <NEW_LINE> <INDENT> self.log.output( "usage: %s {%s}\n" % ( self.name, "|".join(self.handlers.actions) ) ) | Prints the agent's usage information including all implemented handlers | 625941b126238365f5f0ebd8 |
def test_mv_already_moved_file(self): <NEW_LINE> <INDENT> self.build_tree(['a']) <NEW_LINE> tree = self.make_branch_and_tree('.') <NEW_LINE> tree.add(['a']) <NEW_LINE> osutils.rename('a', 'b') <NEW_LINE> self.run_bzr('mv a b') <NEW_LINE> self.assertMoved('a', 'b') | Test brz mv original_file to moved_file.
Tests if a file which has allready been moved by an external tool,
is handled correctly by brz mv.
Setup: a is in the working tree, b does not exist.
User does: mv a b; brz mv a b | 625941b1d486a94d0b98deb8 |
def __delete_tail(self): <NEW_LINE> <INDENT> if self.size == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> node = self.header <NEW_LINE> prev = None <NEW_LINE> while node.next is not None: <NEW_LINE> <INDENT> prev = node <NEW_LINE> node = node.next <NEW_LINE> <DEDENT> prev.next = None <NEW_LINE> self.size -= 1 | 删除尾节点
| 625941b1507cdc57c6306a39 |
def verifieSymetrique(listePaires): <NEW_LINE> <INDENT> for x, y in listePaires: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> listePaires.index((y, x)) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return (x, y) <NEW_LINE> <DEDENT> <DEDENT> return None | Vérifie si les paires fournies dans la liste passée en paramètre respectent la propriété de symétrie
:param listePaires: liste des paires
:return: première paire qui ne vérifie pas la propriété ou null | 625941b1b545ff76a8913b8a |
def __iter__(self): <NEW_LINE> <INDENT> return self.results.__iter__() | Return iterator. | 625941b14f88993c3716bdde |
def Polymorphisms(self, *args): <NEW_LINE> <INDENT> return _egglib_binding.NucleotideDiversity_Polymorphisms(self, *args) | Polymorphisms(NucleotideDiversity self, unsigned int pop) -> unsigned int | 625941b116aa5153ce3621e8 |
def testFlagConfigApprovalRequestResponse(self): <NEW_LINE> <INDENT> pass | Test FlagConfigApprovalRequestResponse | 625941b1e8904600ed9f1c98 |
def failUnlessEval(self, xmlfile, evalString, msg=None): <NEW_LINE> <INDENT> env = feedparser.parse(xmlfile) <NEW_LINE> if not eval(evalString, globals(), env): <NEW_LINE> <INDENT> failure=(msg or 'not eval(%s) \nWITH env(%s)' % (evalString, pprint.pformat(env))) <NEW_LINE> raise self.failureException(failure) <NEW_LINE> <DEDENT> if not everythingIsUnicode(env): <NEW_LINE> <INDENT> raise self.failureException("not everything is unicode \nWITH env(%s)" % (pprint.pformat(env), )) | Fail unless eval(evalString, env) | 625941b17b180e01f3dc4578 |
def test_is_boolean_valid(self): <NEW_LINE> <INDENT> phrase = '' <NEW_LINE> self.assertFalse(is_boolean(phrase)) <NEW_LINE> phrase = 'terre' <NEW_LINE> self.assertFalse(is_boolean(phrase)) <NEW_LINE> phrase = 'False' <NEW_LINE> self.assertTrue(is_boolean(phrase)) <NEW_LINE> phrase = 'True' <NEW_LINE> self.assertTrue(is_boolean(phrase)) <NEW_LINE> phrase = 'false' <NEW_LINE> self.assertTrue(is_boolean(phrase)) <NEW_LINE> phrase = 'true' <NEW_LINE> self.assertTrue(is_boolean(phrase)) | Test that is_boolean parses correctly. | 625941b1d7e4931a7ee9dc8b |
def prepare(self, exchange=_marker, topic=_marker, namespace=_marker, version=_marker, server=_marker, fanout=_marker, timeout=_marker, version_cap=_marker, retry=_marker, call_monitor_timeout=_marker): <NEW_LINE> <INDENT> return _CallContext._prepare(self, exchange, topic, namespace, version, server, fanout, timeout, version_cap, retry, call_monitor_timeout) | Prepare a method invocation context.
Use this method to override client properties for an individual method
invocation. For example::
def test(self, ctxt, arg):
cctxt = self.prepare(version='2.5')
return cctxt.call(ctxt, 'test', arg=arg)
:param exchange: see Target.exchange
:type exchange: str
:param topic: see Target.topic
:type topic: str
:param namespace: see Target.namespace
:type namespace: str
:param version: requirement the server must support, see Target.version
:type version: str
:param server: send to a specific server, see Target.server
:type server: str
:param fanout: send to all servers on topic, see Target.fanout
:type fanout: bool
:param timeout: an optional default timeout (in seconds) for call()s
:type timeout: int or float
:param version_cap: raise a RPCVersionCapError version exceeds this cap
:type version_cap: str
:param retry: an optional connection retries configuration:
None or -1 means to retry forever.
0 means no retry is attempted.
N means attempt at most N retries.
:type retry: int
:param call_monitor_timeout: an optional timeout (in seconds) for
active call heartbeating. If specified,
requires the server to heartbeat
long-running calls at this interval
(less than the overall timeout
parameter).
:type call_monitor_timeout: int | 625941b163b5f9789fde6e56 |
@app.route("/load-data") <NEW_LINE> def load_data(): <NEW_LINE> <INDENT> register_name = subdomain(request) <NEW_LINE> register = registers.get(register_name) <NEW_LINE> try: <NEW_LINE> <INDENT> if not register: <NEW_LINE> <INDENT> register = Register(register_name.capitalize(), current_app.config['MONGO_URI']) <NEW_LINE> registers[register_name] = register <NEW_LINE> <DEDENT> zip_url = '%s/%s.register/archive/master.zip' % (current_app.config['GITHUB_ORG'], register_name) <NEW_LINE> register.load_remote(zip_url) <NEW_LINE> flash('Loaded data into register') <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> log_traceback(current_app.logger, ex) <NEW_LINE> flash('Problem loading data into register', 'error') <NEW_LINE> <DEDENT> return redirect(url_for('entries', _external=True)) | This loads data for a register repository
e.g. https://github.com/openregister/registername.register.
It will then load the data contained in the repository and
load it into the register. Currently that means loading the data
into the mongodb for the register. | 625941b173bcbd0ca4b2bde5 |
def getGLObjectType(overlayType): <NEW_LINE> <INDENT> from . import glvolume <NEW_LINE> from . import glrgbvolume <NEW_LINE> from . import glcomplex <NEW_LINE> from . import glmask <NEW_LINE> from . import glrgbvector <NEW_LINE> from . import gllinevector <NEW_LINE> from . import glmesh <NEW_LINE> from . import gllabel <NEW_LINE> from . import gltensor <NEW_LINE> from . import glsh <NEW_LINE> from . import glmip <NEW_LINE> from . import gltractogram <NEW_LINE> typeMap = { 'volume' : glvolume .GLVolume, 'mask' : glmask .GLMask, 'rgbvector' : glrgbvector .GLRGBVector, 'linevector' : gllinevector.GLLineVector, 'mesh' : glmesh .GLMesh, 'label' : gllabel .GLLabel, 'tensor' : gltensor .GLTensor, 'sh' : glsh .GLSH, 'mip' : glmip .GLMIP, 'rgb' : glrgbvolume .GLRGBVolume, 'complex' : glcomplex .GLComplex, 'tractogram' : gltractogram.GLTractogram } <NEW_LINE> return typeMap.get(overlayType, None) | This function returns an appropriate :class:`GLObject` type for the
given :attr:`.Display.overlayType` value. | 625941b1b5575c28eb68dd6c |
def guessNumber(self, n): <NEW_LINE> <INDENT> mid = n // 2 <NEW_LINE> while guess(mid) != 0: <NEW_LINE> <INDENT> if guess(mid) == 1: <NEW_LINE> <INDENT> if n - mid == 1: <NEW_LINE> <INDENT> mid += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mid = (n - mid) // 2 + mid <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> n = mid <NEW_LINE> mid = mid // 2 <NEW_LINE> <DEDENT> <DEDENT> return mid | :type n: int
:rtype: int
24ms beats 99.21% | 625941b197e22403b379cd09 |
def _init_threads(self): <NEW_LINE> <INDENT> readTh = ReadThread(self.serialCom,self.historyFile) <NEW_LINE> self.threads.append(readTh) <NEW_LINE> writeTh = WriteThread(self.inPs[0], self.serialCom, self.historyFile) <NEW_LINE> self.threads.append(writeTh) | Initializes the read and the write thread.
| 625941b1293b9510aa2c3003 |
def draw_arrowhead(ctx, angle, p, scale, l=8, w=4): <NEW_LINE> <INDENT> ctx.save() <NEW_LINE> l /= scale <NEW_LINE> w /= scale <NEW_LINE> h = sqrt((l ** 2) + ((w / 2.0) ** 2)) <NEW_LINE> a_offset = atan((w / 2.0) / l) <NEW_LINE> ctx.set_source_rgb(0, 0, 0) <NEW_LINE> ctx.move_to(*p.t()) <NEW_LINE> for offset in [a_offset, -a_offset]: <NEW_LINE> <INDENT> ctx.line_to(*ptoc(angle + pi + offset, h, p).t()) <NEW_LINE> <DEDENT> ctx.close_path() <NEW_LINE> ctx.fill() <NEW_LINE> ctx.restore() | Draws an arrowhead at a specified point and angle.
Arguments:
ctx -- a Pycairo context
p -- Point instance for the tip of the arrowhead
angle -- angle in radians in which the arrowhead is to point, with
0 being right along the x-axis, angles increasing counter-clockwise.
scale -- scale factor, to ensure arrowheads remain same size
l -- default length of the arrowhead, along the notional line
w -- default width of the arrowhead at the base. | 625941b1e64d504609d745b0 |
def datetime_of_ampm(ampm): <NEW_LINE> <INDENT> if " " in ampm: <NEW_LINE> <INDENT> return datetime.datetime.strptime(ampm, "%I %M%p") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return datetime.datetime.strptime(ampm, "%I%p") | Parse a string like '7pm' to a datetime | 625941b1e5267d203edcda12 |
def get_user_uk(cookie, tokens): <NEW_LINE> <INDENT> url = 'http://yun.baidu.com' <NEW_LINE> req = net.urlopen(url, headers={'Cookie': cookie.header_output()}) <NEW_LINE> if req: <NEW_LINE> <INDENT> content = req.data.decode() <NEW_LINE> match = re.findall('/share/home\?uk=(\d+)" target=', content) <NEW_LINE> if len(match) == 1: <NEW_LINE> <INDENT> return match[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warn('pcs.get_user_uk(), failed to parse uk, %s' % url) <NEW_LINE> <DEDENT> <DEDENT> return None | 获取用户的uk | 625941b14a966d76dd550d7b |
def createNode(self, node_class, pos): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> node_module = None <NEW_LINE> for module in MODULES: <NEW_LINE> <INDENT> instance = module.instance() <NEW_LINE> if node_class in instance.nodes(): <NEW_LINE> <INDENT> node_module = instance <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not node_module: <NEW_LINE> <INDENT> raise ModuleError("Could not find any module for {}".format(node_class)) <NEW_LINE> <DEDENT> server = node_module.allocateServer(node_class) <NEW_LINE> if not server.connected(): <NEW_LINE> <INDENT> self._thread = WaitForConnectionThread(server.host, server.port) <NEW_LINE> progress_dialog = ProgressDialog(self._thread, "Server", "Connecting to server {} on port {}...".format(server.host, server.port), "Cancel", busy=True, parent=self) <NEW_LINE> progress_dialog.show() <NEW_LINE> if progress_dialog.exec_() is False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> node = node_module.createNode(node_class, server) <NEW_LINE> node.error_signal.connect(self._main_window.uiConsoleTextEdit.writeError) <NEW_LINE> node.warning_signal.connect(self._main_window.uiConsoleTextEdit.writeWarning) <NEW_LINE> node.server_error_signal.connect(self._main_window.uiConsoleTextEdit.writeServerError) <NEW_LINE> node_item = NodeItem(node) <NEW_LINE> node_module.setupNode(node) <NEW_LINE> <DEDENT> except ModuleError as e: <NEW_LINE> <INDENT> QtGui.QMessageBox.critical(self, "Node creation", "{}".format(e)) <NEW_LINE> return <NEW_LINE> <DEDENT> node_item.setPos(self.mapToScene(pos)) <NEW_LINE> self.scene().addItem(node_item) <NEW_LINE> x = node_item.pos().x() - (node_item.boundingRect().width() / 2) <NEW_LINE> y = node_item.pos().y() - (node_item.boundingRect().height() / 2) <NEW_LINE> node_item.setPos(x, y) <NEW_LINE> self._topology.addNode(node) <NEW_LINE> self._main_window.uiTopologySummaryTreeWidget.addNode(node) | Creates a new node on the scene.
:param node_class: node class to be instanciated
:param pos: position of the drop event | 625941b10383005118ecf354 |
def normalize(prediction): <NEW_LINE> <INDENT> x = int(prediction) <NEW_LINE> y = prediction - x <NEW_LINE> if y > 0.7: <NEW_LINE> <INDENT> result = x + 1 <NEW_LINE> <DEDENT> elif y < 0.3: <NEW_LINE> <INDENT> result = x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = x + 0.5 <NEW_LINE> <DEDENT> return result <NEW_LINE> def diskCacheIt(self, what, did, data): <NEW_LINE> <INDENT> f = self.cacheDir+"/"+what+"."+str(did)+".pk" <NEW_LINE> out = open(f, 'wb') <NEW_LINE> cPickle.dump(data, out, -1) <NEW_LINE> out.close() <NEW_LINE> <DEDENT> def diskGetCached(self, what, did): <NEW_LINE> <INDENT> f = self.cacheDir+"/"+what+"."+str(did)+".pk" <NEW_LINE> try: <NEW_LINE> <INDENT> inp = open(f, 'rb') <NEW_LINE> try: <NEW_LINE> <INDENT> data = cPickle.load(inp) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> inp.close() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return data | Rounds predictions to an integer value or to .5 | 625941b126238365f5f0ebda |
def add_arguments(parser): <NEW_LINE> <INDENT> usage() <NEW_LINE> exit(0) | adds arguments for the help command | 625941b182261d6c526ab20d |
def test_serial_lock_file(port, brutal = False): <NEW_LINE> <INDENT> devicename = port.split('/')[-1] <NEW_LINE> filename = '/var/lock/LCK..'+devicename <NEW_LINE> print('serial_lock_file(): filename:' + str(filename)) <NEW_LINE> try: <NEW_LINE> <INDENT> flock = open(filename,'r') <NEW_LINE> pid_str = flock.readline() <NEW_LINE> flock.close() <NEW_LINE> print('test_serial_lock_file(): PID:' + pid_str) <NEW_LINE> PID_EXIST=None <NEW_LINE> try: <NEW_LINE> <INDENT> pid = int(pid_str) <NEW_LINE> PID_EXIST = psutil.pid_exists(pid) <NEW_LINE> pid_ex = ' does not exist.' <NEW_LINE> if(PID_EXIST): <NEW_LINE> <INDENT> pid_ex = ' exists.' <NEW_LINE> <DEDENT> print('Process with PID:' + pid_str[:-1] + pid_ex) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('No valid PID value' + str(e)) <NEW_LINE> <DEDENT> if(PID_EXIST == True): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif(PID_EXIST == False): <NEW_LINE> <INDENT> if(brutal == False): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Removing lock file, as it has a not existing PID') <NEW_LINE> os.remove(filename) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> elif(PID_EXIST == None): <NEW_LINE> <INDENT> if(brutal): <NEW_LINE> <INDENT> print('Removing lock file, as it no valid PID') <NEW_LINE> os.remove(filename) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('serial_lock_file():' + str(e)) <NEW_LINE> return False | Creates or removes a lock file for a serial port in linux
Args:
port: Device string
brutal: Remove lock file if a nonexisting PID was found or no PID at all within the file
Return:
True if port is already in use, False otherwise | 625941b1097d151d1a222bcd |
def get_for_user(self, user_obj): <NEW_LINE> <INDENT> user_ctype = ContentType.objects.get(app_label="auth", model="user") <NEW_LINE> return self.filter( content_type = user_ctype, object_id = user_obj.pk ) | Filter the ``QuerySet`` for a specific ``User``.
``user_obj`` should be ``django.contrib.auth.models.User`` | 625941b12c8b7c6e89b35535 |
def add_question(self, question_label): <NEW_LINE> <INDENT> self.questions.append((question_label)) | Add Question to (type of) exam questions list. | 625941b1b545ff76a8913b8c |
def loglikelihood(self, x): <NEW_LINE> <INDENT> return sum(geom_logpdf(self.p, x)) | The log-likelihood of the distribution w.r.t. all
samples contained in array x. | 625941b14527f215b584c1ce |
def skip(error, filename): <NEW_LINE> <INDENT> print("Cannot read file: {}".format(error)) <NEW_LINE> print("Skipping file '{}'".format(filename)) | Skip a file in case of an error
:error: exception
:filename: name of file to skip | 625941b1596a897236089835 |
def test_returns_renderer(self): <NEW_LINE> <INDENT> self.assertIs(SDL_GetRenderer(self.win), self.rdr) | Returns the correct SDL_Renderer | 625941b1adb09d7d5db6c506 |
def change_value(self, value): <NEW_LINE> <INDENT> if value == 0: <NEW_LINE> <INDENT> self.label.setPixmap(QPixmap('audio.ico')) <NEW_LINE> <DEDENT> elif 0 < value <= 30: <NEW_LINE> <INDENT> self.label.setPixmap(QPixmap('min.ico')) <NEW_LINE> <DEDENT> elif 30 < value < 80: <NEW_LINE> <INDENT> self.label.setPixmap(QPixmap('med.ico')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.label.setPixmap(QPixmap('max.ico')) | 模拟了一个音量控制。通过拖动滑块来改变标签上的图像 | 625941b16aa9bd52df036b0c |
def GetMeasurementVector(self, *args): <NEW_LINE> <INDENT> return _itkSamplePython.itkSampleFASL1_GetMeasurementVector(self, *args) | GetMeasurementVector(self, size_t id) -> itkFixedArraySL1 | 625941b176d4e153a657e8a0 |
def GetLowerBoundaryCropSize(self): <NEW_LINE> <INDENT> return _itkCropImageFilterPython.itkCropImageFilterIUS2IUS2_GetLowerBoundaryCropSize(self) | GetLowerBoundaryCropSize(self) -> itkSize2 | 625941b150812a4eaa59c099 |
def post(self, **params): <NEW_LINE> <INDENT> all_args = ['status', 'force_status_check', 'sync_all_folders', 'expunge_on_deleted_flag', 'password', 'provider_refresh_token', 'provider_consumer_key', 'status_callback_url' ] <NEW_LINE> params = Resource.sanitize_params(params, all_args) <NEW_LINE> status = self._request_uri('', method='POST', params=params) <NEW_LINE> if 'force_status_check' in params: <NEW_LINE> <INDENT> return bool(status['status']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return bool(status['success']) | Update a data source for an account.
Documentation: http://context.io/docs/2.0/accounts/sources#id-post
Optional Arguments:
status: integer - If the status of the source is TEMP_DISABLED or
DISABLED. You can do a POST/PUT with status set to 1 to reset
it.
force_status_check: integer - Creates an IMAP connection and
resets the source status to to one reported by the IMAP
backend. Don't combine this with other parameters.
sync_all_folders: integer - By default, we filter out some folders
like 'Deleted Items' and 'Drafts'. Set this parameter to 1 to
turn off this filtering and show every single folder.
expunge_on_deleted_flag: integer - By default, we don't filter out
messages flagged as deleted. Set this parameter to 1 to turn on
this filtering.
password: string - New password for this source. Ignored if any of
the provider_* parameters are set below.
provider_refresh_token: An OAuth2 refresh token obtained from the IMAP
account provider to authenticate this email account.
provider_consumer_key: string - The OAuth consumer key used to
obtain the the token and token secret above for that account.
That consumer key and secret must be configured in your
Context.IO account
status_callback_url: string - If specified, we'll make a POST request
to this URL if the connection status of the source changes.
Returns:
Bool | 625941b173bcbd0ca4b2bde7 |
def set_score(self, data, update=True): <NEW_LINE> <INDENT> if self._resource_type == ResourceType.EMAILS: <NEW_LINE> <INDENT> self._score = self._uni(data) <NEW_LINE> if update and self._phase == 0: <NEW_LINE> <INDENT> self._phase = 2 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError(ErrorCodes.e10290.value) | Read-Write group metadata | 625941b1d8ef3951e32432ad |
def arggroupby(values: Iterable) -> Iterator[tuple]: <NEW_LINE> <INDENT> values = np.asarray(values) <NEW_LINE> keys, counts = np.unique(values, return_counts=True) <NEW_LINE> return zip(keys, np.split(np.argsort(values), np.cumsum(counts))) | Generate unique keys with corresponding index arrays. | 625941b1d164cc6175782ab8 |
def ferret_compute(id, result, resbdf, inputs, inpbdfs): <NEW_LINE> <INDENT> pyferret.stats.assignresultsarray(DISTRIB_NAME, FUNC_NAME, result, resbdf, inputs, inpbdfs) | Result array assignment for the stats_exponweib_sf Ferret PyEF | 625941b1187af65679ca4e91 |