Unnamed: 0
int64 0
10k
| repository_name
stringlengths 7
54
| func_path_in_repository
stringlengths 5
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 100
30.3k
| language
stringclasses 1
value | func_code_string
stringlengths 100
30.3k
| func_code_tokens
stringlengths 138
33.2k
| func_documentation_string
stringlengths 1
15k
| func_documentation_tokens
stringlengths 5
5.14k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
|
---|---|---|---|---|---|---|---|---|---|---|---|
600 | honeybadger-io/honeybadger-python | honeybadger/contrib/flask.py | FlaskHoneybadger.init_app | def init_app(self, app, report_exceptions=False, reset_context_after_request=False):
"""
Initialize honeybadger and listen for errors.
:param Flask app: the Flask application object.
:param bool report_exceptions: whether to automatically report exceptions raised by Flask on requests
(i.e. by calling abort) or not.
:param bool reset_context_after_request: whether to reset honeybadger context after each request.
"""
from flask import request_tearing_down, got_request_exception
self.app = app
self.app.logger.info('Initializing Honeybadger')
self.report_exceptions = report_exceptions
self.reset_context_after_request = reset_context_after_request
self._initialize_honeybadger(app.config)
# Add hooks
if self.report_exceptions:
self._register_signal_handler('auto-reporting exceptions',
got_request_exception,
self._handle_exception)
if self.reset_context_after_request:
self._register_signal_handler('auto clear context on request end',
request_tearing_down,
self._reset_context)
logger.info('Honeybadger helper installed') | python | def init_app(self, app, report_exceptions=False, reset_context_after_request=False):
"""
Initialize honeybadger and listen for errors.
:param Flask app: the Flask application object.
:param bool report_exceptions: whether to automatically report exceptions raised by Flask on requests
(i.e. by calling abort) or not.
:param bool reset_context_after_request: whether to reset honeybadger context after each request.
"""
from flask import request_tearing_down, got_request_exception
self.app = app
self.app.logger.info('Initializing Honeybadger')
self.report_exceptions = report_exceptions
self.reset_context_after_request = reset_context_after_request
self._initialize_honeybadger(app.config)
# Add hooks
if self.report_exceptions:
self._register_signal_handler('auto-reporting exceptions',
got_request_exception,
self._handle_exception)
if self.reset_context_after_request:
self._register_signal_handler('auto clear context on request end',
request_tearing_down,
self._reset_context)
logger.info('Honeybadger helper installed') | ['def', 'init_app', '(', 'self', ',', 'app', ',', 'report_exceptions', '=', 'False', ',', 'reset_context_after_request', '=', 'False', ')', ':', 'from', 'flask', 'import', 'request_tearing_down', ',', 'got_request_exception', 'self', '.', 'app', '=', 'app', 'self', '.', 'app', '.', 'logger', '.', 'info', '(', "'Initializing Honeybadger'", ')', 'self', '.', 'report_exceptions', '=', 'report_exceptions', 'self', '.', 'reset_context_after_request', '=', 'reset_context_after_request', 'self', '.', '_initialize_honeybadger', '(', 'app', '.', 'config', ')', '# Add hooks', 'if', 'self', '.', 'report_exceptions', ':', 'self', '.', '_register_signal_handler', '(', "'auto-reporting exceptions'", ',', 'got_request_exception', ',', 'self', '.', '_handle_exception', ')', 'if', 'self', '.', 'reset_context_after_request', ':', 'self', '.', '_register_signal_handler', '(', "'auto clear context on request end'", ',', 'request_tearing_down', ',', 'self', '.', '_reset_context', ')', 'logger', '.', 'info', '(', "'Honeybadger helper installed'", ')'] | Initialize honeybadger and listen for errors.
:param Flask app: the Flask application object.
:param bool report_exceptions: whether to automatically report exceptions raised by Flask on requests
(i.e. by calling abort) or not.
:param bool reset_context_after_request: whether to reset honeybadger context after each request. | ['Initialize', 'honeybadger', 'and', 'listen', 'for', 'errors', '.', ':', 'param', 'Flask', 'app', ':', 'the', 'Flask', 'application', 'object', '.', ':', 'param', 'bool', 'report_exceptions', ':', 'whether', 'to', 'automatically', 'report', 'exceptions', 'raised', 'by', 'Flask', 'on', 'requests', '(', 'i', '.', 'e', '.', 'by', 'calling', 'abort', ')', 'or', 'not', '.', ':', 'param', 'bool', 'reset_context_after_request', ':', 'whether', 'to', 'reset', 'honeybadger', 'context', 'after', 'each', 'request', '.'] | train | https://github.com/honeybadger-io/honeybadger-python/blob/81519b40d3e446b62035f64e34900e08ff91938c/honeybadger/contrib/flask.py#L98-L127 |
601 | bwohlberg/sporco | sporco/prox/_lp.py | norm_2l2 | def norm_2l2(x, axis=None):
r"""Compute the squared :math:`\ell_2` norm
.. math::
\| \mathbf{x} \|_2^2 = \sum_i x_i^2
where :math:`x_i` is element :math:`i` of vector :math:`\mathbf{x}`.
Parameters
----------
x : array_like
Input array :math:`\mathbf{x}`
axis : `None` or int or tuple of ints, optional (default None)
Axes of `x` over which to compute the :math:`\ell_2` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct values are computed
over the indices of the remaining axes of input array `x`.
Returns
-------
nl2 : float or ndarray
Norm of `x`, or array of norms treating specified axes of `x`
as a vector.
"""
nl2 = np.sum(x**2, axis=axis, keepdims=True)
# If the result has a single element, convert it to a scalar
if nl2.size == 1:
nl2 = nl2.ravel()[0]
return nl2 | python | def norm_2l2(x, axis=None):
r"""Compute the squared :math:`\ell_2` norm
.. math::
\| \mathbf{x} \|_2^2 = \sum_i x_i^2
where :math:`x_i` is element :math:`i` of vector :math:`\mathbf{x}`.
Parameters
----------
x : array_like
Input array :math:`\mathbf{x}`
axis : `None` or int or tuple of ints, optional (default None)
Axes of `x` over which to compute the :math:`\ell_2` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct values are computed
over the indices of the remaining axes of input array `x`.
Returns
-------
nl2 : float or ndarray
Norm of `x`, or array of norms treating specified axes of `x`
as a vector.
"""
nl2 = np.sum(x**2, axis=axis, keepdims=True)
# If the result has a single element, convert it to a scalar
if nl2.size == 1:
nl2 = nl2.ravel()[0]
return nl2 | ['def', 'norm_2l2', '(', 'x', ',', 'axis', '=', 'None', ')', ':', 'nl2', '=', 'np', '.', 'sum', '(', 'x', '**', '2', ',', 'axis', '=', 'axis', ',', 'keepdims', '=', 'True', ')', '# If the result has a single element, convert it to a scalar', 'if', 'nl2', '.', 'size', '==', '1', ':', 'nl2', '=', 'nl2', '.', 'ravel', '(', ')', '[', '0', ']', 'return', 'nl2'] | r"""Compute the squared :math:`\ell_2` norm
.. math::
\| \mathbf{x} \|_2^2 = \sum_i x_i^2
where :math:`x_i` is element :math:`i` of vector :math:`\mathbf{x}`.
Parameters
----------
x : array_like
Input array :math:`\mathbf{x}`
axis : `None` or int or tuple of ints, optional (default None)
Axes of `x` over which to compute the :math:`\ell_2` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct values are computed
over the indices of the remaining axes of input array `x`.
Returns
-------
nl2 : float or ndarray
Norm of `x`, or array of norms treating specified axes of `x`
as a vector. | ['r', 'Compute', 'the', 'squared', ':', 'math', ':', '\\', 'ell_2', 'norm'] | train | https://github.com/bwohlberg/sporco/blob/8946a04331106f4e39904fbdf2dc7351900baa04/sporco/prox/_lp.py#L183-L212 |
602 | caffeinehit/django-oauth2-provider | provider/oauth2/forms.py | ScopeChoiceField.validate | def validate(self, value):
"""
Validates that the input is a list or tuple.
"""
if self.required and not value:
raise OAuthValidationError({'error': 'invalid_request'})
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise OAuthValidationError({
'error': 'invalid_request',
'error_description': _("'%s' is not a valid scope.") % \
val}) | python | def validate(self, value):
"""
Validates that the input is a list or tuple.
"""
if self.required and not value:
raise OAuthValidationError({'error': 'invalid_request'})
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise OAuthValidationError({
'error': 'invalid_request',
'error_description': _("'%s' is not a valid scope.") % \
val}) | ['def', 'validate', '(', 'self', ',', 'value', ')', ':', 'if', 'self', '.', 'required', 'and', 'not', 'value', ':', 'raise', 'OAuthValidationError', '(', '{', "'error'", ':', "'invalid_request'", '}', ')', '# Validate that each value in the value list is in self.choices.', 'for', 'val', 'in', 'value', ':', 'if', 'not', 'self', '.', 'valid_value', '(', 'val', ')', ':', 'raise', 'OAuthValidationError', '(', '{', "'error'", ':', "'invalid_request'", ',', "'error_description'", ':', '_', '(', '"\'%s\' is not a valid scope."', ')', '%', 'val', '}', ')'] | Validates that the input is a list or tuple. | ['Validates', 'that', 'the', 'input', 'is', 'a', 'list', 'or', 'tuple', '.'] | train | https://github.com/caffeinehit/django-oauth2-provider/blob/6b5bc0d3ad706d2aaa47fa476f38406cddd01236/provider/oauth2/forms.py#L70-L83 |
603 | sarenji/pyrc | example.py | GangstaBot.bling | def bling(self, target, sender):
"will print yo"
if target.startswith("#"):
self.message(target, "%s: yo" % sender)
else:
self.message(sender, "yo") | python | def bling(self, target, sender):
"will print yo"
if target.startswith("#"):
self.message(target, "%s: yo" % sender)
else:
self.message(sender, "yo") | ['def', 'bling', '(', 'self', ',', 'target', ',', 'sender', ')', ':', 'if', 'target', '.', 'startswith', '(', '"#"', ')', ':', 'self', '.', 'message', '(', 'target', ',', '"%s: yo"', '%', 'sender', ')', 'else', ':', 'self', '.', 'message', '(', 'sender', ',', '"yo"', ')'] | will print yo | ['will', 'print', 'yo'] | train | https://github.com/sarenji/pyrc/blob/5e8377ddcda6e0ef4ba7d66cf400e243b1fb8f68/example.py#L11-L16 |
604 | collectiveacuity/jsonModel | jsonmodel/validators.py | jsonModel._validate_number | def _validate_number(self, input_number, path_to_root, object_title=''):
'''
a helper method for validating properties of a number
:return: input_number
'''
rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root)
input_criteria = self.keyMap[rules_path_to_root]
error_dict = {
'object_title': object_title,
'model_schema': self.schema,
'input_criteria': input_criteria,
'failed_test': 'value_datatype',
'input_path': path_to_root,
'error_value': input_number,
'error_code': 4001
}
if 'integer_data' in input_criteria.keys():
if input_criteria['integer_data'] and not isinstance(input_number, int):
error_dict['failed_test'] = 'integer_data'
error_dict['error_code'] = 4021
raise InputValidationError(error_dict)
if 'min_value' in input_criteria.keys():
if input_number < input_criteria['min_value']:
error_dict['failed_test'] = 'min_value'
error_dict['error_code'] = 4022
raise InputValidationError(error_dict)
if 'max_value' in input_criteria.keys():
if input_number > input_criteria['max_value']:
error_dict['failed_test'] = 'max_value'
error_dict['error_code'] = 4023
raise InputValidationError(error_dict)
if 'greater_than' in input_criteria.keys():
if input_number <= input_criteria['greater_than']:
error_dict['failed_test'] = 'greater_than'
error_dict['error_code'] = 4024
raise InputValidationError(error_dict)
if 'less_than' in input_criteria.keys():
if input_number >= input_criteria['less_than']:
error_dict['failed_test'] = 'less_than'
error_dict['error_code'] = 4025
raise InputValidationError(error_dict)
if 'equal_to' in input_criteria.keys():
if input_number != input_criteria['equal_to']:
error_dict['failed_test'] = 'equal_to'
error_dict['error_code'] = 4026
raise InputValidationError(error_dict)
if 'discrete_values' in input_criteria.keys():
if input_number not in input_criteria['discrete_values']:
error_dict['failed_test'] = 'discrete_values'
error_dict['error_code'] = 4041
raise InputValidationError(error_dict)
if 'excluded_values' in input_criteria.keys():
if input_number in input_criteria['excluded_values']:
error_dict['failed_test'] = 'excluded_values'
error_dict['error_code'] = 4042
raise InputValidationError(error_dict)
# TODO: validate number against identical to reference
# TODO: run lambda function and call validation url
return input_number | python | def _validate_number(self, input_number, path_to_root, object_title=''):
'''
a helper method for validating properties of a number
:return: input_number
'''
rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root)
input_criteria = self.keyMap[rules_path_to_root]
error_dict = {
'object_title': object_title,
'model_schema': self.schema,
'input_criteria': input_criteria,
'failed_test': 'value_datatype',
'input_path': path_to_root,
'error_value': input_number,
'error_code': 4001
}
if 'integer_data' in input_criteria.keys():
if input_criteria['integer_data'] and not isinstance(input_number, int):
error_dict['failed_test'] = 'integer_data'
error_dict['error_code'] = 4021
raise InputValidationError(error_dict)
if 'min_value' in input_criteria.keys():
if input_number < input_criteria['min_value']:
error_dict['failed_test'] = 'min_value'
error_dict['error_code'] = 4022
raise InputValidationError(error_dict)
if 'max_value' in input_criteria.keys():
if input_number > input_criteria['max_value']:
error_dict['failed_test'] = 'max_value'
error_dict['error_code'] = 4023
raise InputValidationError(error_dict)
if 'greater_than' in input_criteria.keys():
if input_number <= input_criteria['greater_than']:
error_dict['failed_test'] = 'greater_than'
error_dict['error_code'] = 4024
raise InputValidationError(error_dict)
if 'less_than' in input_criteria.keys():
if input_number >= input_criteria['less_than']:
error_dict['failed_test'] = 'less_than'
error_dict['error_code'] = 4025
raise InputValidationError(error_dict)
if 'equal_to' in input_criteria.keys():
if input_number != input_criteria['equal_to']:
error_dict['failed_test'] = 'equal_to'
error_dict['error_code'] = 4026
raise InputValidationError(error_dict)
if 'discrete_values' in input_criteria.keys():
if input_number not in input_criteria['discrete_values']:
error_dict['failed_test'] = 'discrete_values'
error_dict['error_code'] = 4041
raise InputValidationError(error_dict)
if 'excluded_values' in input_criteria.keys():
if input_number in input_criteria['excluded_values']:
error_dict['failed_test'] = 'excluded_values'
error_dict['error_code'] = 4042
raise InputValidationError(error_dict)
# TODO: validate number against identical to reference
# TODO: run lambda function and call validation url
return input_number | ['def', '_validate_number', '(', 'self', ',', 'input_number', ',', 'path_to_root', ',', 'object_title', '=', "''", ')', ':', 'rules_path_to_root', '=', 're', '.', 'sub', '(', "'\\[\\d+\\]'", ',', "'[0]'", ',', 'path_to_root', ')', 'input_criteria', '=', 'self', '.', 'keyMap', '[', 'rules_path_to_root', ']', 'error_dict', '=', '{', "'object_title'", ':', 'object_title', ',', "'model_schema'", ':', 'self', '.', 'schema', ',', "'input_criteria'", ':', 'input_criteria', ',', "'failed_test'", ':', "'value_datatype'", ',', "'input_path'", ':', 'path_to_root', ',', "'error_value'", ':', 'input_number', ',', "'error_code'", ':', '4001', '}', 'if', "'integer_data'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_criteria', '[', "'integer_data'", ']', 'and', 'not', 'isinstance', '(', 'input_number', ',', 'int', ')', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'integer_data'", 'error_dict', '[', "'error_code'", ']', '=', '4021', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'min_value'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', '<', 'input_criteria', '[', "'min_value'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'min_value'", 'error_dict', '[', "'error_code'", ']', '=', '4022', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'max_value'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', '>', 'input_criteria', '[', "'max_value'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'max_value'", 'error_dict', '[', "'error_code'", ']', '=', '4023', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'greater_than'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', '<=', 'input_criteria', '[', "'greater_than'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'greater_than'", 'error_dict', '[', "'error_code'", ']', '=', '4024', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'less_than'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', '>=', 'input_criteria', '[', "'less_than'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'less_than'", 'error_dict', '[', "'error_code'", ']', '=', '4025', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'equal_to'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', '!=', 'input_criteria', '[', "'equal_to'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'equal_to'", 'error_dict', '[', "'error_code'", ']', '=', '4026', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'discrete_values'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', 'not', 'in', 'input_criteria', '[', "'discrete_values'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'discrete_values'", 'error_dict', '[', "'error_code'", ']', '=', '4041', 'raise', 'InputValidationError', '(', 'error_dict', ')', 'if', "'excluded_values'", 'in', 'input_criteria', '.', 'keys', '(', ')', ':', 'if', 'input_number', 'in', 'input_criteria', '[', "'excluded_values'", ']', ':', 'error_dict', '[', "'failed_test'", ']', '=', "'excluded_values'", 'error_dict', '[', "'error_code'", ']', '=', '4042', 'raise', 'InputValidationError', '(', 'error_dict', ')', '# TODO: validate number against identical to reference', '# TODO: run lambda function and call validation url', 'return', 'input_number'] | a helper method for validating properties of a number
:return: input_number | ['a', 'helper', 'method', 'for', 'validating', 'properties', 'of', 'a', 'number'] | train | https://github.com/collectiveacuity/jsonModel/blob/1ea64c36d78add3faa7b85ff82c5ec685458c940/jsonmodel/validators.py#L1000-L1064 |
605 | IDSIA/sacred | sacred/ingredient.py | Ingredient.gather_named_configs | def gather_named_configs(self, ingredient):
"""Collect all named configs from this ingredient and its
sub-ingredients.
Yields
------
config_name: str
The full (dotted) name of the named config.
config: ConfigScope or ConfigDict or basestring
The corresponding named config.
"""
for config_name, config in ingredient.named_configs.items():
yield join_paths(ingredient.path, config_name), config | python | def gather_named_configs(self, ingredient):
"""Collect all named configs from this ingredient and its
sub-ingredients.
Yields
------
config_name: str
The full (dotted) name of the named config.
config: ConfigScope or ConfigDict or basestring
The corresponding named config.
"""
for config_name, config in ingredient.named_configs.items():
yield join_paths(ingredient.path, config_name), config | ['def', 'gather_named_configs', '(', 'self', ',', 'ingredient', ')', ':', 'for', 'config_name', ',', 'config', 'in', 'ingredient', '.', 'named_configs', '.', 'items', '(', ')', ':', 'yield', 'join_paths', '(', 'ingredient', '.', 'path', ',', 'config_name', ')', ',', 'config'] | Collect all named configs from this ingredient and its
sub-ingredients.
Yields
------
config_name: str
The full (dotted) name of the named config.
config: ConfigScope or ConfigDict or basestring
The corresponding named config. | ['Collect', 'all', 'named', 'configs', 'from', 'this', 'ingredient', 'and', 'its', 'sub', '-', 'ingredients', '.'] | train | https://github.com/IDSIA/sacred/blob/72633776bed9b5bddf93ae7d215188e61970973a/sacred/ingredient.py#L314-L326 |
606 | apple/turicreate | src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/text_format.py | MessageToString | def MessageToString(message,
as_utf8=False,
as_one_line=False,
pointy_brackets=False,
use_index_order=False,
float_format=None,
use_field_number=False,
descriptor_pool=None,
indent=0):
"""Convert protobuf message to text format.
Floating point values can be formatted compactly with 15 digits of
precision (which is the most that IEEE 754 "double" can guarantee)
using float_format='.15g'. To ensure that converting to text and back to a
proto will result in an identical value, float_format='.17g' should be used.
Args:
message: The protocol buffers message.
as_utf8: Produce text output in UTF8 format.
as_one_line: Don't introduce newlines between fields.
pointy_brackets: If True, use angle brackets instead of curly braces for
nesting.
use_index_order: If True, print fields of a proto message using the order
defined in source code instead of the field number. By default, use the
field number order.
float_format: If set, use this to specify floating point number formatting
(per the "Format Specification Mini-Language"); otherwise, str() is used.
use_field_number: If True, print field numbers instead of names.
descriptor_pool: A DescriptorPool used to resolve Any types.
indent: The indent level, in terms of spaces, for pretty print.
Returns:
A string of the text formatted protocol buffer message.
"""
out = TextWriter(as_utf8)
printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
use_index_order, float_format, use_field_number,
descriptor_pool)
printer.PrintMessage(message)
result = out.getvalue()
out.close()
if as_one_line:
return result.rstrip()
return result | python | def MessageToString(message,
as_utf8=False,
as_one_line=False,
pointy_brackets=False,
use_index_order=False,
float_format=None,
use_field_number=False,
descriptor_pool=None,
indent=0):
"""Convert protobuf message to text format.
Floating point values can be formatted compactly with 15 digits of
precision (which is the most that IEEE 754 "double" can guarantee)
using float_format='.15g'. To ensure that converting to text and back to a
proto will result in an identical value, float_format='.17g' should be used.
Args:
message: The protocol buffers message.
as_utf8: Produce text output in UTF8 format.
as_one_line: Don't introduce newlines between fields.
pointy_brackets: If True, use angle brackets instead of curly braces for
nesting.
use_index_order: If True, print fields of a proto message using the order
defined in source code instead of the field number. By default, use the
field number order.
float_format: If set, use this to specify floating point number formatting
(per the "Format Specification Mini-Language"); otherwise, str() is used.
use_field_number: If True, print field numbers instead of names.
descriptor_pool: A DescriptorPool used to resolve Any types.
indent: The indent level, in terms of spaces, for pretty print.
Returns:
A string of the text formatted protocol buffer message.
"""
out = TextWriter(as_utf8)
printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
use_index_order, float_format, use_field_number,
descriptor_pool)
printer.PrintMessage(message)
result = out.getvalue()
out.close()
if as_one_line:
return result.rstrip()
return result | ['def', 'MessageToString', '(', 'message', ',', 'as_utf8', '=', 'False', ',', 'as_one_line', '=', 'False', ',', 'pointy_brackets', '=', 'False', ',', 'use_index_order', '=', 'False', ',', 'float_format', '=', 'None', ',', 'use_field_number', '=', 'False', ',', 'descriptor_pool', '=', 'None', ',', 'indent', '=', '0', ')', ':', 'out', '=', 'TextWriter', '(', 'as_utf8', ')', 'printer', '=', '_Printer', '(', 'out', ',', 'indent', ',', 'as_utf8', ',', 'as_one_line', ',', 'pointy_brackets', ',', 'use_index_order', ',', 'float_format', ',', 'use_field_number', ',', 'descriptor_pool', ')', 'printer', '.', 'PrintMessage', '(', 'message', ')', 'result', '=', 'out', '.', 'getvalue', '(', ')', 'out', '.', 'close', '(', ')', 'if', 'as_one_line', ':', 'return', 'result', '.', 'rstrip', '(', ')', 'return', 'result'] | Convert protobuf message to text format.
Floating point values can be formatted compactly with 15 digits of
precision (which is the most that IEEE 754 "double" can guarantee)
using float_format='.15g'. To ensure that converting to text and back to a
proto will result in an identical value, float_format='.17g' should be used.
Args:
message: The protocol buffers message.
as_utf8: Produce text output in UTF8 format.
as_one_line: Don't introduce newlines between fields.
pointy_brackets: If True, use angle brackets instead of curly braces for
nesting.
use_index_order: If True, print fields of a proto message using the order
defined in source code instead of the field number. By default, use the
field number order.
float_format: If set, use this to specify floating point number formatting
(per the "Format Specification Mini-Language"); otherwise, str() is used.
use_field_number: If True, print field numbers instead of names.
descriptor_pool: A DescriptorPool used to resolve Any types.
indent: The indent level, in terms of spaces, for pretty print.
Returns:
A string of the text formatted protocol buffer message. | ['Convert', 'protobuf', 'message', 'to', 'text', 'format', '.'] | train | https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/text_format.py#L121-L164 |
607 | ratt-ru/PyMORESANE | pymoresane/iuwt.py | iuwt_recomposition | def iuwt_recomposition(in1, scale_adjust=0, mode='ser', core_count=1, store_on_gpu=False, smoothed_array=None):
"""
This function serves as a handler for the different implementations of the IUWT recomposition. It allows the
different methods to be used almost interchangeably.
INPUTS:
in1 (no default): Array on which the decomposition is to be performed.
scale_adjust (no default): Number of omitted scales.
mode (default='ser') Implementation of the IUWT to be used - 'ser', 'mp' or 'gpu'.
core_count (default=1) Additional option for multiprocessing - specifies core count.
store_on_gpu (default=False): Boolean specifier for whether the decomposition is stored on the gpu or not.
OUTPUTS:
Returns the recomposition.
"""
if mode=='ser':
return ser_iuwt_recomposition(in1, scale_adjust, smoothed_array)
elif mode=='mp':
return mp_iuwt_recomposition(in1, scale_adjust, core_count, smoothed_array)
elif mode=='gpu':
return gpu_iuwt_recomposition(in1, scale_adjust, store_on_gpu, smoothed_array) | python | def iuwt_recomposition(in1, scale_adjust=0, mode='ser', core_count=1, store_on_gpu=False, smoothed_array=None):
"""
This function serves as a handler for the different implementations of the IUWT recomposition. It allows the
different methods to be used almost interchangeably.
INPUTS:
in1 (no default): Array on which the decomposition is to be performed.
scale_adjust (no default): Number of omitted scales.
mode (default='ser') Implementation of the IUWT to be used - 'ser', 'mp' or 'gpu'.
core_count (default=1) Additional option for multiprocessing - specifies core count.
store_on_gpu (default=False): Boolean specifier for whether the decomposition is stored on the gpu or not.
OUTPUTS:
Returns the recomposition.
"""
if mode=='ser':
return ser_iuwt_recomposition(in1, scale_adjust, smoothed_array)
elif mode=='mp':
return mp_iuwt_recomposition(in1, scale_adjust, core_count, smoothed_array)
elif mode=='gpu':
return gpu_iuwt_recomposition(in1, scale_adjust, store_on_gpu, smoothed_array) | ['def', 'iuwt_recomposition', '(', 'in1', ',', 'scale_adjust', '=', '0', ',', 'mode', '=', "'ser'", ',', 'core_count', '=', '1', ',', 'store_on_gpu', '=', 'False', ',', 'smoothed_array', '=', 'None', ')', ':', 'if', 'mode', '==', "'ser'", ':', 'return', 'ser_iuwt_recomposition', '(', 'in1', ',', 'scale_adjust', ',', 'smoothed_array', ')', 'elif', 'mode', '==', "'mp'", ':', 'return', 'mp_iuwt_recomposition', '(', 'in1', ',', 'scale_adjust', ',', 'core_count', ',', 'smoothed_array', ')', 'elif', 'mode', '==', "'gpu'", ':', 'return', 'gpu_iuwt_recomposition', '(', 'in1', ',', 'scale_adjust', ',', 'store_on_gpu', ',', 'smoothed_array', ')'] | This function serves as a handler for the different implementations of the IUWT recomposition. It allows the
different methods to be used almost interchangeably.
INPUTS:
in1 (no default): Array on which the decomposition is to be performed.
scale_adjust (no default): Number of omitted scales.
mode (default='ser') Implementation of the IUWT to be used - 'ser', 'mp' or 'gpu'.
core_count (default=1) Additional option for multiprocessing - specifies core count.
store_on_gpu (default=False): Boolean specifier for whether the decomposition is stored on the gpu or not.
OUTPUTS:
Returns the recomposition. | ['This', 'function', 'serves', 'as', 'a', 'handler', 'for', 'the', 'different', 'implementations', 'of', 'the', 'IUWT', 'recomposition', '.', 'It', 'allows', 'the', 'different', 'methods', 'to', 'be', 'used', 'almost', 'interchangeably', '.'] | train | https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L43-L64 |
608 | tanghaibao/jcvi | jcvi/utils/aws.py | start | def start(args):
"""
%prog start
Launch ec2 instance through command line.
"""
p = OptionParser(start.__doc__)
p.add_option("--ondemand", default=False, action="store_true",
help="Do we want a more expensive on-demand instance")
p.add_option("--profile", default="mvrad-datasci-role", help="Profile name")
p.add_option("--price", default=4.0, type=float, help="Spot price")
opts, args = p.parse_args(args)
if len(args) != 0:
sys.exit(not p.print_help())
role(["htang"])
session = boto3.Session(profile_name=opts.profile)
client = session.client('ec2')
s = InstanceSkeleton()
# Make sure the instance id is empty
instance_id = s.instance_id
if instance_id != "":
logging.error("Instance exists {}".format(instance_id))
sys.exit(1)
launch_spec = s.launch_spec
instance_id = ""
if opts.ondemand:
# Launch on-demand instance
response = client.run_instances(
BlockDeviceMappings=s.block_device_mappings,
MaxCount=1, MinCount=1,
ImageId=s.image_id,
InstanceType=s.instance_type,
KeyName=s.key_name,
Placement={"AvailabilityZone": s.availability_zone},
SecurityGroupIds=s.security_group_ids,
SubnetId=s.subnet_id,
EbsOptimized=s.ebs_optimized,
IamInstanceProfile=s.iam_instance_profile,
)
instance_id = response["Instances"][0]["InstanceId"]
else:
# Launch spot instance
response = client.request_spot_instances(
SpotPrice=str(opts.price),
InstanceCount=1,
Type="one-time",
AvailabilityZoneGroup=s.availability_zone,
LaunchSpecification=launch_spec
)
request_id = response["SpotInstanceRequests"][0]["SpotInstanceRequestId"]
print("Request id {}".format(request_id), file=sys.stderr)
while not instance_id:
response = client.describe_spot_instance_requests(
SpotInstanceRequestIds=[request_id]
)
if "InstanceId" in response["SpotInstanceRequests"][0]:
instance_id = response["SpotInstanceRequests"][0]["InstanceId"]
else:
logging.debug("Waiting to be fulfilled ...")
time.sleep(10)
# Check if the instance is running
print("Instance id {}".format(instance_id), file=sys.stderr)
status = ""
while status != "running":
logging.debug("Waiting instance to run ...")
time.sleep(3)
response = client.describe_instance_status(InstanceIds=[instance_id])
if len(response["InstanceStatuses"]) > 0:
status = response["InstanceStatuses"][0]["InstanceState"]["Name"]
# Tagging
name = "htang-lx-ondemand" if opts.ondemand else "htang-lx-spot"
response = client.create_tags(
Resources=[instance_id],
Tags=[{"Key": k, "Value": v} for k, v in { \
"Name": name,
"owner": "htang",
"project": "mv-bioinformatics"
}.items()]
)
# Attach working volumes
volumes = s.volumes
for volume in volumes:
response = client.attach_volume(
VolumeId=volume["VolumeId"],
InstanceId=instance_id,
Device=volume["Device"]
)
# Save instance id and ip
response = client.describe_instances(InstanceIds=[instance_id])
ip_address = response["Reservations"][0]["Instances"][0]["PrivateIpAddress"]
print("IP address {}".format(ip_address), file=sys.stderr)
s.save_instance_id(instance_id, ip_address) | python | def start(args):
"""
%prog start
Launch ec2 instance through command line.
"""
p = OptionParser(start.__doc__)
p.add_option("--ondemand", default=False, action="store_true",
help="Do we want a more expensive on-demand instance")
p.add_option("--profile", default="mvrad-datasci-role", help="Profile name")
p.add_option("--price", default=4.0, type=float, help="Spot price")
opts, args = p.parse_args(args)
if len(args) != 0:
sys.exit(not p.print_help())
role(["htang"])
session = boto3.Session(profile_name=opts.profile)
client = session.client('ec2')
s = InstanceSkeleton()
# Make sure the instance id is empty
instance_id = s.instance_id
if instance_id != "":
logging.error("Instance exists {}".format(instance_id))
sys.exit(1)
launch_spec = s.launch_spec
instance_id = ""
if opts.ondemand:
# Launch on-demand instance
response = client.run_instances(
BlockDeviceMappings=s.block_device_mappings,
MaxCount=1, MinCount=1,
ImageId=s.image_id,
InstanceType=s.instance_type,
KeyName=s.key_name,
Placement={"AvailabilityZone": s.availability_zone},
SecurityGroupIds=s.security_group_ids,
SubnetId=s.subnet_id,
EbsOptimized=s.ebs_optimized,
IamInstanceProfile=s.iam_instance_profile,
)
instance_id = response["Instances"][0]["InstanceId"]
else:
# Launch spot instance
response = client.request_spot_instances(
SpotPrice=str(opts.price),
InstanceCount=1,
Type="one-time",
AvailabilityZoneGroup=s.availability_zone,
LaunchSpecification=launch_spec
)
request_id = response["SpotInstanceRequests"][0]["SpotInstanceRequestId"]
print("Request id {}".format(request_id), file=sys.stderr)
while not instance_id:
response = client.describe_spot_instance_requests(
SpotInstanceRequestIds=[request_id]
)
if "InstanceId" in response["SpotInstanceRequests"][0]:
instance_id = response["SpotInstanceRequests"][0]["InstanceId"]
else:
logging.debug("Waiting to be fulfilled ...")
time.sleep(10)
# Check if the instance is running
print("Instance id {}".format(instance_id), file=sys.stderr)
status = ""
while status != "running":
logging.debug("Waiting instance to run ...")
time.sleep(3)
response = client.describe_instance_status(InstanceIds=[instance_id])
if len(response["InstanceStatuses"]) > 0:
status = response["InstanceStatuses"][0]["InstanceState"]["Name"]
# Tagging
name = "htang-lx-ondemand" if opts.ondemand else "htang-lx-spot"
response = client.create_tags(
Resources=[instance_id],
Tags=[{"Key": k, "Value": v} for k, v in { \
"Name": name,
"owner": "htang",
"project": "mv-bioinformatics"
}.items()]
)
# Attach working volumes
volumes = s.volumes
for volume in volumes:
response = client.attach_volume(
VolumeId=volume["VolumeId"],
InstanceId=instance_id,
Device=volume["Device"]
)
# Save instance id and ip
response = client.describe_instances(InstanceIds=[instance_id])
ip_address = response["Reservations"][0]["Instances"][0]["PrivateIpAddress"]
print("IP address {}".format(ip_address), file=sys.stderr)
s.save_instance_id(instance_id, ip_address) | ['def', 'start', '(', 'args', ')', ':', 'p', '=', 'OptionParser', '(', 'start', '.', '__doc__', ')', 'p', '.', 'add_option', '(', '"--ondemand"', ',', 'default', '=', 'False', ',', 'action', '=', '"store_true"', ',', 'help', '=', '"Do we want a more expensive on-demand instance"', ')', 'p', '.', 'add_option', '(', '"--profile"', ',', 'default', '=', '"mvrad-datasci-role"', ',', 'help', '=', '"Profile name"', ')', 'p', '.', 'add_option', '(', '"--price"', ',', 'default', '=', '4.0', ',', 'type', '=', 'float', ',', 'help', '=', '"Spot price"', ')', 'opts', ',', 'args', '=', 'p', '.', 'parse_args', '(', 'args', ')', 'if', 'len', '(', 'args', ')', '!=', '0', ':', 'sys', '.', 'exit', '(', 'not', 'p', '.', 'print_help', '(', ')', ')', 'role', '(', '[', '"htang"', ']', ')', 'session', '=', 'boto3', '.', 'Session', '(', 'profile_name', '=', 'opts', '.', 'profile', ')', 'client', '=', 'session', '.', 'client', '(', "'ec2'", ')', 's', '=', 'InstanceSkeleton', '(', ')', '# Make sure the instance id is empty', 'instance_id', '=', 's', '.', 'instance_id', 'if', 'instance_id', '!=', '""', ':', 'logging', '.', 'error', '(', '"Instance exists {}"', '.', 'format', '(', 'instance_id', ')', ')', 'sys', '.', 'exit', '(', '1', ')', 'launch_spec', '=', 's', '.', 'launch_spec', 'instance_id', '=', '""', 'if', 'opts', '.', 'ondemand', ':', '# Launch on-demand instance', 'response', '=', 'client', '.', 'run_instances', '(', 'BlockDeviceMappings', '=', 's', '.', 'block_device_mappings', ',', 'MaxCount', '=', '1', ',', 'MinCount', '=', '1', ',', 'ImageId', '=', 's', '.', 'image_id', ',', 'InstanceType', '=', 's', '.', 'instance_type', ',', 'KeyName', '=', 's', '.', 'key_name', ',', 'Placement', '=', '{', '"AvailabilityZone"', ':', 's', '.', 'availability_zone', '}', ',', 'SecurityGroupIds', '=', 's', '.', 'security_group_ids', ',', 'SubnetId', '=', 's', '.', 'subnet_id', ',', 'EbsOptimized', '=', 's', '.', 'ebs_optimized', ',', 'IamInstanceProfile', '=', 's', '.', 'iam_instance_profile', ',', ')', 'instance_id', '=', 'response', '[', '"Instances"', ']', '[', '0', ']', '[', '"InstanceId"', ']', 'else', ':', '# Launch spot instance', 'response', '=', 'client', '.', 'request_spot_instances', '(', 'SpotPrice', '=', 'str', '(', 'opts', '.', 'price', ')', ',', 'InstanceCount', '=', '1', ',', 'Type', '=', '"one-time"', ',', 'AvailabilityZoneGroup', '=', 's', '.', 'availability_zone', ',', 'LaunchSpecification', '=', 'launch_spec', ')', 'request_id', '=', 'response', '[', '"SpotInstanceRequests"', ']', '[', '0', ']', '[', '"SpotInstanceRequestId"', ']', 'print', '(', '"Request id {}"', '.', 'format', '(', 'request_id', ')', ',', 'file', '=', 'sys', '.', 'stderr', ')', 'while', 'not', 'instance_id', ':', 'response', '=', 'client', '.', 'describe_spot_instance_requests', '(', 'SpotInstanceRequestIds', '=', '[', 'request_id', ']', ')', 'if', '"InstanceId"', 'in', 'response', '[', '"SpotInstanceRequests"', ']', '[', '0', ']', ':', 'instance_id', '=', 'response', '[', '"SpotInstanceRequests"', ']', '[', '0', ']', '[', '"InstanceId"', ']', 'else', ':', 'logging', '.', 'debug', '(', '"Waiting to be fulfilled ..."', ')', 'time', '.', 'sleep', '(', '10', ')', '# Check if the instance is running', 'print', '(', '"Instance id {}"', '.', 'format', '(', 'instance_id', ')', ',', 'file', '=', 'sys', '.', 'stderr', ')', 'status', '=', '""', 'while', 'status', '!=', '"running"', ':', 'logging', '.', 'debug', '(', '"Waiting instance to run ..."', ')', 'time', '.', 'sleep', '(', '3', ')', 'response', '=', 'client', '.', 'describe_instance_status', '(', 'InstanceIds', '=', '[', 'instance_id', ']', ')', 'if', 'len', '(', 'response', '[', '"InstanceStatuses"', ']', ')', '>', '0', ':', 'status', '=', 'response', '[', '"InstanceStatuses"', ']', '[', '0', ']', '[', '"InstanceState"', ']', '[', '"Name"', ']', '# Tagging', 'name', '=', '"htang-lx-ondemand"', 'if', 'opts', '.', 'ondemand', 'else', '"htang-lx-spot"', 'response', '=', 'client', '.', 'create_tags', '(', 'Resources', '=', '[', 'instance_id', ']', ',', 'Tags', '=', '[', '{', '"Key"', ':', 'k', ',', '"Value"', ':', 'v', '}', 'for', 'k', ',', 'v', 'in', '{', '"Name"', ':', 'name', ',', '"owner"', ':', '"htang"', ',', '"project"', ':', '"mv-bioinformatics"', '}', '.', 'items', '(', ')', ']', ')', '# Attach working volumes', 'volumes', '=', 's', '.', 'volumes', 'for', 'volume', 'in', 'volumes', ':', 'response', '=', 'client', '.', 'attach_volume', '(', 'VolumeId', '=', 'volume', '[', '"VolumeId"', ']', ',', 'InstanceId', '=', 'instance_id', ',', 'Device', '=', 'volume', '[', '"Device"', ']', ')', '# Save instance id and ip', 'response', '=', 'client', '.', 'describe_instances', '(', 'InstanceIds', '=', '[', 'instance_id', ']', ')', 'ip_address', '=', 'response', '[', '"Reservations"', ']', '[', '0', ']', '[', '"Instances"', ']', '[', '0', ']', '[', '"PrivateIpAddress"', ']', 'print', '(', '"IP address {}"', '.', 'format', '(', 'ip_address', ')', ',', 'file', '=', 'sys', '.', 'stderr', ')', 's', '.', 'save_instance_id', '(', 'instance_id', ',', 'ip_address', ')'] | %prog start
Launch ec2 instance through command line. | ['%prog', 'start'] | train | https://github.com/tanghaibao/jcvi/blob/d2e31a77b6ade7f41f3b321febc2b4744d1cdeca/jcvi/utils/aws.py#L135-L239 |
609 | FNNDSC/chrisapp | chrisapp/base.py | ChrisApp.add_argument | def add_argument(self, *args, **kwargs):
"""
Add a parameter to this app.
"""
if not (('action' in kwargs) and (kwargs['action'] == 'help')):
# make sure required parameter options were defined
try:
name = kwargs['dest']
param_type = kwargs['type']
optional = kwargs['optional']
except KeyError as e:
detail = "%s option required. " % e
raise KeyError(detail)
if optional and ('default' not in kwargs):
detail = "A default value is required for optional parameters %s." % name
raise KeyError(detail)
# grab the default and help values
default = None
if 'default' in kwargs:
default = kwargs['default']
param_help = ""
if 'help' in kwargs:
param_help = kwargs['help']
# set the ArgumentParser's action
if param_type not in (str, int, float, bool, ChrisApp.path):
detail = "unsupported type: '%s'" % param_type
raise ValueError(detail)
action = 'store'
if param_type == bool:
action = 'store_false' if default else 'store_true'
del kwargs['default'] # 'default' and 'type' not allowed for boolean actions
del kwargs['type']
kwargs['action'] = action
# store the parameters internally (param_type.__name__ to enable json serialization)
param = {'name': name, 'type': param_type.__name__, 'optional': optional,
'flag': args[0], 'action': action, 'help': param_help, 'default': default}
self._parameters.append(param)
# add the parameter to the parser
del kwargs['optional']
ArgumentParser.add_argument(self, *args, **kwargs) | python | def add_argument(self, *args, **kwargs):
"""
Add a parameter to this app.
"""
if not (('action' in kwargs) and (kwargs['action'] == 'help')):
# make sure required parameter options were defined
try:
name = kwargs['dest']
param_type = kwargs['type']
optional = kwargs['optional']
except KeyError as e:
detail = "%s option required. " % e
raise KeyError(detail)
if optional and ('default' not in kwargs):
detail = "A default value is required for optional parameters %s." % name
raise KeyError(detail)
# grab the default and help values
default = None
if 'default' in kwargs:
default = kwargs['default']
param_help = ""
if 'help' in kwargs:
param_help = kwargs['help']
# set the ArgumentParser's action
if param_type not in (str, int, float, bool, ChrisApp.path):
detail = "unsupported type: '%s'" % param_type
raise ValueError(detail)
action = 'store'
if param_type == bool:
action = 'store_false' if default else 'store_true'
del kwargs['default'] # 'default' and 'type' not allowed for boolean actions
del kwargs['type']
kwargs['action'] = action
# store the parameters internally (param_type.__name__ to enable json serialization)
param = {'name': name, 'type': param_type.__name__, 'optional': optional,
'flag': args[0], 'action': action, 'help': param_help, 'default': default}
self._parameters.append(param)
# add the parameter to the parser
del kwargs['optional']
ArgumentParser.add_argument(self, *args, **kwargs) | ['def', 'add_argument', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', '(', '(', "'action'", 'in', 'kwargs', ')', 'and', '(', 'kwargs', '[', "'action'", ']', '==', "'help'", ')', ')', ':', '# make sure required parameter options were defined', 'try', ':', 'name', '=', 'kwargs', '[', "'dest'", ']', 'param_type', '=', 'kwargs', '[', "'type'", ']', 'optional', '=', 'kwargs', '[', "'optional'", ']', 'except', 'KeyError', 'as', 'e', ':', 'detail', '=', '"%s option required. "', '%', 'e', 'raise', 'KeyError', '(', 'detail', ')', 'if', 'optional', 'and', '(', "'default'", 'not', 'in', 'kwargs', ')', ':', 'detail', '=', '"A default value is required for optional parameters %s."', '%', 'name', 'raise', 'KeyError', '(', 'detail', ')', '# grab the default and help values', 'default', '=', 'None', 'if', "'default'", 'in', 'kwargs', ':', 'default', '=', 'kwargs', '[', "'default'", ']', 'param_help', '=', '""', 'if', "'help'", 'in', 'kwargs', ':', 'param_help', '=', 'kwargs', '[', "'help'", ']', "# set the ArgumentParser's action", 'if', 'param_type', 'not', 'in', '(', 'str', ',', 'int', ',', 'float', ',', 'bool', ',', 'ChrisApp', '.', 'path', ')', ':', 'detail', '=', '"unsupported type: \'%s\'"', '%', 'param_type', 'raise', 'ValueError', '(', 'detail', ')', 'action', '=', "'store'", 'if', 'param_type', '==', 'bool', ':', 'action', '=', "'store_false'", 'if', 'default', 'else', "'store_true'", 'del', 'kwargs', '[', "'default'", ']', "# 'default' and 'type' not allowed for boolean actions", 'del', 'kwargs', '[', "'type'", ']', 'kwargs', '[', "'action'", ']', '=', 'action', '# store the parameters internally (param_type.__name__ to enable json serialization)', 'param', '=', '{', "'name'", ':', 'name', ',', "'type'", ':', 'param_type', '.', '__name__', ',', "'optional'", ':', 'optional', ',', "'flag'", ':', 'args', '[', '0', ']', ',', "'action'", ':', 'action', ',', "'help'", ':', 'param_help', ',', "'default'", ':', 'default', '}', 'self', '.', '_parameters', '.', 'append', '(', 'param', ')', '# add the parameter to the parser', 'del', 'kwargs', '[', "'optional'", ']', 'ArgumentParser', '.', 'add_argument', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')'] | Add a parameter to this app. | ['Add', 'a', 'parameter', 'to', 'this', 'app', '.'] | train | https://github.com/FNNDSC/chrisapp/blob/b176655f97206240fe173dfe86736f82f0d85bc4/chrisapp/base.py#L210-L253 |
610 | mardiros/pyshop | pyshop/models.py | User.by_ldap_credentials | def by_ldap_credentials(cls, session, login, password, settings):
"""if possible try to contact the LDAP for authentification if success
and login don't exist localy create one and return it
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: username
:type login: unicode
:param password: user password
:type password: unicode
:param settings: settings from self.request.registry.settings in views
:type settings: dict
:return: associated user
:rtype: :class:`pyshop.models.User`
"""
if not asbool(settings.get('pyshop.ldap.use_for_auth', 'False')):
return None
if ldap is None:
raise ImportError(
"no module name ldap. Install python-ldap package")
try:
if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
ldap.set_option(
ldap.OPT_X_TLS_CACERTDIR, '/etc/openldap/cacerts')
ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF)
ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON)
ldap.set_option(ldap.OPT_TIMEOUT, 20)
ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)
ldap.set_option(ldap.OPT_TIMELIMIT, 15)
ldap_server_type = settings.get('pyshop.ldap.type', 'ldap')
host = settings['pyshop.ldap.host'].strip()
port = settings.get('pyshop.ldap.port', None).strip()
if ldap_server_type in ["ldaps", "start_tls"]:
port = port or 689
ldap_type = "ldaps"
certreq = settings.get('pyshop.ldap.certreq', 'DEMAND').strip()
if certreq not in ['DEMAND', 'ALLOW', 'HARD', 'TRY', 'NEVER']:
certreq = 'DEMAND'
tls_cert = getattr(ldap, 'OPT_X_TLS_%s' % certreq)
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_cert)
else:
port = port or 389
ldap_type = 'ldap'
server_url = "{ldap_type}://{host}:{port}".format(
ldap_type=ldap_type, host=host, port=port)
server = ldap.initialize(server_url)
if ldap_server_type == "start_tls":
server.start_tls_s()
server.protocol = ldap.VERSION3
# bind the account if needed
if settings['pyshop.ldap.account'] and \
settings['pyshop.ldap.password']:
server.simple_bind_s(settings['pyshop.ldap.account'],
settings['pyshop.ldap.password'])
filter_ = settings['pyshop.ldap.search_filter'].format(
username=login)
results = server.search_ext_s(
settings['pyshop.ldap.bind_dn'],
getattr(ldap,
'SCOPE_%s' % settings['pyshop.ldap.search_scope']),
filter_)
if results is None:
log.debug("LDAP rejected password for user %s", (login))
return None
for (dn, _attrs) in results:
if dn is None:
continue
log.debug('Trying simple bind with %s', dn)
server.simple_bind_s(dn, password)
attrs = server.search_ext_s(
dn, ldap.SCOPE_BASE, '(objectClass=*)')[0][1]
break
else:
log.debug("No matching LDAP objects for authentication of "
"'%s'", login)
return None
log.debug('LDAP authentication OK')
# we may create a new user if it don't exist
user_ldap = User.by_login(session, login)
if user_ldap is None:
log.debug('create user %s', login)
user_ldap = User()
user_ldap.login = login
# when creating a User, do not copy the ldap password
user_ldap.password = ''
user_ldap.local = True
user_ldap.firstname = attrs[
settings['pyshop.ldap.first_name_attr']][0]
user_ldap.lastname = attrs[
settings['pyshop.ldap.last_name_attr']][0]
user_ldap.email = attrs[
settings['pyshop.ldap.email_attr']][0]
for groupname in ["developer", "installer"]:
user_ldap.groups.append(Group.by_name(session, groupname))
if user_ldap.validate(session):
session.add(user_ldap)
log.debug('User "%s" added', login)
transaction.commit()
# its OK
return user_ldap
except ldap.NO_SUCH_OBJECT:
log.debug("LDAP says no such user '%s'", login)
except ldap.SERVER_DOWN:
log.error("LDAP can't access authentication server")
except ldap.LDAPError:
log.error('ERROR while using LDAP connection')
except Exception as exc:
log.error('Unmanaged exception %s', exc, exc_info=True)
return None | python | def by_ldap_credentials(cls, session, login, password, settings):
"""if possible try to contact the LDAP for authentification if success
and login don't exist localy create one and return it
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: username
:type login: unicode
:param password: user password
:type password: unicode
:param settings: settings from self.request.registry.settings in views
:type settings: dict
:return: associated user
:rtype: :class:`pyshop.models.User`
"""
if not asbool(settings.get('pyshop.ldap.use_for_auth', 'False')):
return None
if ldap is None:
raise ImportError(
"no module name ldap. Install python-ldap package")
try:
if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
ldap.set_option(
ldap.OPT_X_TLS_CACERTDIR, '/etc/openldap/cacerts')
ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF)
ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON)
ldap.set_option(ldap.OPT_TIMEOUT, 20)
ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)
ldap.set_option(ldap.OPT_TIMELIMIT, 15)
ldap_server_type = settings.get('pyshop.ldap.type', 'ldap')
host = settings['pyshop.ldap.host'].strip()
port = settings.get('pyshop.ldap.port', None).strip()
if ldap_server_type in ["ldaps", "start_tls"]:
port = port or 689
ldap_type = "ldaps"
certreq = settings.get('pyshop.ldap.certreq', 'DEMAND').strip()
if certreq not in ['DEMAND', 'ALLOW', 'HARD', 'TRY', 'NEVER']:
certreq = 'DEMAND'
tls_cert = getattr(ldap, 'OPT_X_TLS_%s' % certreq)
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_cert)
else:
port = port or 389
ldap_type = 'ldap'
server_url = "{ldap_type}://{host}:{port}".format(
ldap_type=ldap_type, host=host, port=port)
server = ldap.initialize(server_url)
if ldap_server_type == "start_tls":
server.start_tls_s()
server.protocol = ldap.VERSION3
# bind the account if needed
if settings['pyshop.ldap.account'] and \
settings['pyshop.ldap.password']:
server.simple_bind_s(settings['pyshop.ldap.account'],
settings['pyshop.ldap.password'])
filter_ = settings['pyshop.ldap.search_filter'].format(
username=login)
results = server.search_ext_s(
settings['pyshop.ldap.bind_dn'],
getattr(ldap,
'SCOPE_%s' % settings['pyshop.ldap.search_scope']),
filter_)
if results is None:
log.debug("LDAP rejected password for user %s", (login))
return None
for (dn, _attrs) in results:
if dn is None:
continue
log.debug('Trying simple bind with %s', dn)
server.simple_bind_s(dn, password)
attrs = server.search_ext_s(
dn, ldap.SCOPE_BASE, '(objectClass=*)')[0][1]
break
else:
log.debug("No matching LDAP objects for authentication of "
"'%s'", login)
return None
log.debug('LDAP authentication OK')
# we may create a new user if it don't exist
user_ldap = User.by_login(session, login)
if user_ldap is None:
log.debug('create user %s', login)
user_ldap = User()
user_ldap.login = login
# when creating a User, do not copy the ldap password
user_ldap.password = ''
user_ldap.local = True
user_ldap.firstname = attrs[
settings['pyshop.ldap.first_name_attr']][0]
user_ldap.lastname = attrs[
settings['pyshop.ldap.last_name_attr']][0]
user_ldap.email = attrs[
settings['pyshop.ldap.email_attr']][0]
for groupname in ["developer", "installer"]:
user_ldap.groups.append(Group.by_name(session, groupname))
if user_ldap.validate(session):
session.add(user_ldap)
log.debug('User "%s" added', login)
transaction.commit()
# its OK
return user_ldap
except ldap.NO_SUCH_OBJECT:
log.debug("LDAP says no such user '%s'", login)
except ldap.SERVER_DOWN:
log.error("LDAP can't access authentication server")
except ldap.LDAPError:
log.error('ERROR while using LDAP connection')
except Exception as exc:
log.error('Unmanaged exception %s', exc, exc_info=True)
return None | ['def', 'by_ldap_credentials', '(', 'cls', ',', 'session', ',', 'login', ',', 'password', ',', 'settings', ')', ':', 'if', 'not', 'asbool', '(', 'settings', '.', 'get', '(', "'pyshop.ldap.use_for_auth'", ',', "'False'", ')', ')', ':', 'return', 'None', 'if', 'ldap', 'is', 'None', ':', 'raise', 'ImportError', '(', '"no module name ldap. Install python-ldap package"', ')', 'try', ':', 'if', 'hasattr', '(', 'ldap', ',', "'OPT_X_TLS_CACERTDIR'", ')', ':', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_X_TLS_CACERTDIR', ',', "'/etc/openldap/cacerts'", ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_REFERRALS', ',', 'ldap', '.', 'OPT_OFF', ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_RESTART', ',', 'ldap', '.', 'OPT_ON', ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_TIMEOUT', ',', '20', ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_NETWORK_TIMEOUT', ',', '10', ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_TIMELIMIT', ',', '15', ')', 'ldap_server_type', '=', 'settings', '.', 'get', '(', "'pyshop.ldap.type'", ',', "'ldap'", ')', 'host', '=', 'settings', '[', "'pyshop.ldap.host'", ']', '.', 'strip', '(', ')', 'port', '=', 'settings', '.', 'get', '(', "'pyshop.ldap.port'", ',', 'None', ')', '.', 'strip', '(', ')', 'if', 'ldap_server_type', 'in', '[', '"ldaps"', ',', '"start_tls"', ']', ':', 'port', '=', 'port', 'or', '689', 'ldap_type', '=', '"ldaps"', 'certreq', '=', 'settings', '.', 'get', '(', "'pyshop.ldap.certreq'", ',', "'DEMAND'", ')', '.', 'strip', '(', ')', 'if', 'certreq', 'not', 'in', '[', "'DEMAND'", ',', "'ALLOW'", ',', "'HARD'", ',', "'TRY'", ',', "'NEVER'", ']', ':', 'certreq', '=', "'DEMAND'", 'tls_cert', '=', 'getattr', '(', 'ldap', ',', "'OPT_X_TLS_%s'", '%', 'certreq', ')', 'ldap', '.', 'set_option', '(', 'ldap', '.', 'OPT_X_TLS_REQUIRE_CERT', ',', 'tls_cert', ')', 'else', ':', 'port', '=', 'port', 'or', '389', 'ldap_type', '=', "'ldap'", 'server_url', '=', '"{ldap_type}://{host}:{port}"', '.', 'format', '(', 'ldap_type', '=', 'ldap_type', ',', 'host', '=', 'host', ',', 'port', '=', 'port', ')', 'server', '=', 'ldap', '.', 'initialize', '(', 'server_url', ')', 'if', 'ldap_server_type', '==', '"start_tls"', ':', 'server', '.', 'start_tls_s', '(', ')', 'server', '.', 'protocol', '=', 'ldap', '.', 'VERSION3', '# bind the account if needed', 'if', 'settings', '[', "'pyshop.ldap.account'", ']', 'and', 'settings', '[', "'pyshop.ldap.password'", ']', ':', 'server', '.', 'simple_bind_s', '(', 'settings', '[', "'pyshop.ldap.account'", ']', ',', 'settings', '[', "'pyshop.ldap.password'", ']', ')', 'filter_', '=', 'settings', '[', "'pyshop.ldap.search_filter'", ']', '.', 'format', '(', 'username', '=', 'login', ')', 'results', '=', 'server', '.', 'search_ext_s', '(', 'settings', '[', "'pyshop.ldap.bind_dn'", ']', ',', 'getattr', '(', 'ldap', ',', "'SCOPE_%s'", '%', 'settings', '[', "'pyshop.ldap.search_scope'", ']', ')', ',', 'filter_', ')', 'if', 'results', 'is', 'None', ':', 'log', '.', 'debug', '(', '"LDAP rejected password for user %s"', ',', '(', 'login', ')', ')', 'return', 'None', 'for', '(', 'dn', ',', '_attrs', ')', 'in', 'results', ':', 'if', 'dn', 'is', 'None', ':', 'continue', 'log', '.', 'debug', '(', "'Trying simple bind with %s'", ',', 'dn', ')', 'server', '.', 'simple_bind_s', '(', 'dn', ',', 'password', ')', 'attrs', '=', 'server', '.', 'search_ext_s', '(', 'dn', ',', 'ldap', '.', 'SCOPE_BASE', ',', "'(objectClass=*)'", ')', '[', '0', ']', '[', '1', ']', 'break', 'else', ':', 'log', '.', 'debug', '(', '"No matching LDAP objects for authentication of "', '"\'%s\'"', ',', 'login', ')', 'return', 'None', 'log', '.', 'debug', '(', "'LDAP authentication OK'", ')', "# we may create a new user if it don't exist", 'user_ldap', '=', 'User', '.', 'by_login', '(', 'session', ',', 'login', ')', 'if', 'user_ldap', 'is', 'None', ':', 'log', '.', 'debug', '(', "'create user %s'", ',', 'login', ')', 'user_ldap', '=', 'User', '(', ')', 'user_ldap', '.', 'login', '=', 'login', '# when creating a User, do not copy the ldap password', 'user_ldap', '.', 'password', '=', "''", 'user_ldap', '.', 'local', '=', 'True', 'user_ldap', '.', 'firstname', '=', 'attrs', '[', 'settings', '[', "'pyshop.ldap.first_name_attr'", ']', ']', '[', '0', ']', 'user_ldap', '.', 'lastname', '=', 'attrs', '[', 'settings', '[', "'pyshop.ldap.last_name_attr'", ']', ']', '[', '0', ']', 'user_ldap', '.', 'email', '=', 'attrs', '[', 'settings', '[', "'pyshop.ldap.email_attr'", ']', ']', '[', '0', ']', 'for', 'groupname', 'in', '[', '"developer"', ',', '"installer"', ']', ':', 'user_ldap', '.', 'groups', '.', 'append', '(', 'Group', '.', 'by_name', '(', 'session', ',', 'groupname', ')', ')', 'if', 'user_ldap', '.', 'validate', '(', 'session', ')', ':', 'session', '.', 'add', '(', 'user_ldap', ')', 'log', '.', 'debug', '(', '\'User "%s" added\'', ',', 'login', ')', 'transaction', '.', 'commit', '(', ')', '# its OK', 'return', 'user_ldap', 'except', 'ldap', '.', 'NO_SUCH_OBJECT', ':', 'log', '.', 'debug', '(', '"LDAP says no such user \'%s\'"', ',', 'login', ')', 'except', 'ldap', '.', 'SERVER_DOWN', ':', 'log', '.', 'error', '(', '"LDAP can\'t access authentication server"', ')', 'except', 'ldap', '.', 'LDAPError', ':', 'log', '.', 'error', '(', "'ERROR while using LDAP connection'", ')', 'except', 'Exception', 'as', 'exc', ':', 'log', '.', 'error', '(', "'Unmanaged exception %s'", ',', 'exc', ',', 'exc_info', '=', 'True', ')', 'return', 'None'] | if possible try to contact the LDAP for authentification if success
and login don't exist localy create one and return it
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: username
:type login: unicode
:param password: user password
:type password: unicode
:param settings: settings from self.request.registry.settings in views
:type settings: dict
:return: associated user
:rtype: :class:`pyshop.models.User` | ['if', 'possible', 'try', 'to', 'contact', 'the', 'LDAP', 'for', 'authentification', 'if', 'success', 'and', 'login', 'don', 't', 'exist', 'localy', 'create', 'one', 'and', 'return', 'it'] | train | https://github.com/mardiros/pyshop/blob/b42510b9c3fa16e0e5710457401ac38fea5bf7a0/pyshop/models.py#L228-L347 |
611 | tk0miya/tk.phpautodoc | src/phply/phpparse.py | p_common_scalar_magic_method | def p_common_scalar_magic_method(p):
'common_scalar : METHOD_C'
p[0] = ast.MagicConstant(p[1].upper(), None, lineno=p.lineno(1)) | python | def p_common_scalar_magic_method(p):
'common_scalar : METHOD_C'
p[0] = ast.MagicConstant(p[1].upper(), None, lineno=p.lineno(1)) | ['def', 'p_common_scalar_magic_method', '(', 'p', ')', ':', 'p', '[', '0', ']', '=', 'ast', '.', 'MagicConstant', '(', 'p', '[', '1', ']', '.', 'upper', '(', ')', ',', 'None', ',', 'lineno', '=', 'p', '.', 'lineno', '(', '1', ')', ')'] | common_scalar : METHOD_C | ['common_scalar', ':', 'METHOD_C'] | train | https://github.com/tk0miya/tk.phpautodoc/blob/cf789f64abaf76351485cee231a075227e665fb6/src/phply/phpparse.py#L1218-L1220 |
612 | codelv/enaml-native | src/enamlnative/android/android_grid_layout.py | AndroidGridLayout.create_widget | def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = GridLayout(self.get_context(), None, d.style) | python | def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = GridLayout(self.get_context(), None, d.style) | ['def', 'create_widget', '(', 'self', ')', ':', 'd', '=', 'self', '.', 'declaration', 'self', '.', 'widget', '=', 'GridLayout', '(', 'self', '.', 'get_context', '(', ')', ',', 'None', ',', 'd', '.', 'style', ')'] | Create the underlying widget. | ['Create', 'the', 'underlying', 'widget', '.'] | train | https://github.com/codelv/enaml-native/blob/c33986e9eda468c508806e0a3e73c771401e5718/src/enamlnative/android/android_grid_layout.py#L48-L53 |
613 | tamasgal/km3pipe | km3pipe/stats.py | param_describe | def param_describe(params, quant=95, axis=0):
"""Get mean + quantile range from bootstrapped params."""
par = np.mean(params, axis=axis)
lo, up = perc(quant)
p_up = np.percentile(params, up, axis=axis)
p_lo = np.percentile(params, lo, axis=axis)
return par, p_lo, p_up | python | def param_describe(params, quant=95, axis=0):
"""Get mean + quantile range from bootstrapped params."""
par = np.mean(params, axis=axis)
lo, up = perc(quant)
p_up = np.percentile(params, up, axis=axis)
p_lo = np.percentile(params, lo, axis=axis)
return par, p_lo, p_up | ['def', 'param_describe', '(', 'params', ',', 'quant', '=', '95', ',', 'axis', '=', '0', ')', ':', 'par', '=', 'np', '.', 'mean', '(', 'params', ',', 'axis', '=', 'axis', ')', 'lo', ',', 'up', '=', 'perc', '(', 'quant', ')', 'p_up', '=', 'np', '.', 'percentile', '(', 'params', ',', 'up', ',', 'axis', '=', 'axis', ')', 'p_lo', '=', 'np', '.', 'percentile', '(', 'params', ',', 'lo', ',', 'axis', '=', 'axis', ')', 'return', 'par', ',', 'p_lo', ',', 'p_up'] | Get mean + quantile range from bootstrapped params. | ['Get', 'mean', '+', 'quantile', 'range', 'from', 'bootstrapped', 'params', '.'] | train | https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/stats.py#L190-L196 |
614 | omaraboumrad/mastool | mastool/practices.py | find_poor_default_arg | def find_poor_default_arg(node):
"""Finds poor default args"""
poor_defaults = [
ast.Call,
ast.Dict,
ast.DictComp,
ast.GeneratorExp,
ast.List,
ast.ListComp,
ast.Set,
ast.SetComp,
]
# pylint: disable=unidiomatic-typecheck
return (
isinstance(node, ast.FunctionDef)
and any((n for n in node.args.defaults if type(n) in poor_defaults))
) | python | def find_poor_default_arg(node):
"""Finds poor default args"""
poor_defaults = [
ast.Call,
ast.Dict,
ast.DictComp,
ast.GeneratorExp,
ast.List,
ast.ListComp,
ast.Set,
ast.SetComp,
]
# pylint: disable=unidiomatic-typecheck
return (
isinstance(node, ast.FunctionDef)
and any((n for n in node.args.defaults if type(n) in poor_defaults))
) | ['def', 'find_poor_default_arg', '(', 'node', ')', ':', 'poor_defaults', '=', '[', 'ast', '.', 'Call', ',', 'ast', '.', 'Dict', ',', 'ast', '.', 'DictComp', ',', 'ast', '.', 'GeneratorExp', ',', 'ast', '.', 'List', ',', 'ast', '.', 'ListComp', ',', 'ast', '.', 'Set', ',', 'ast', '.', 'SetComp', ',', ']', '# pylint: disable=unidiomatic-typecheck', 'return', '(', 'isinstance', '(', 'node', ',', 'ast', '.', 'FunctionDef', ')', 'and', 'any', '(', '(', 'n', 'for', 'n', 'in', 'node', '.', 'args', '.', 'defaults', 'if', 'type', '(', 'n', ')', 'in', 'poor_defaults', ')', ')', ')'] | Finds poor default args | ['Finds', 'poor', 'default', 'args'] | train | https://github.com/omaraboumrad/mastool/blob/0ec566de6717d03c6ec61affe5d1e9ff8d7e6ebd/mastool/practices.py#L154-L171 |
615 | symphonyoss/python-symphony | symphony/Pod/connections.py | Connections.sessioninfo | def sessioninfo(self):
''' session info '''
response, status_code = self.__pod__.Session.get_v2_sessioninfo(
sessionToken=self.__session__
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | python | def sessioninfo(self):
''' session info '''
response, status_code = self.__pod__.Session.get_v2_sessioninfo(
sessionToken=self.__session__
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | ['def', 'sessioninfo', '(', 'self', ')', ':', 'response', ',', 'status_code', '=', 'self', '.', '__pod__', '.', 'Session', '.', 'get_v2_sessioninfo', '(', 'sessionToken', '=', 'self', '.', '__session__', ')', '.', 'result', '(', ')', 'self', '.', 'logger', '.', 'debug', '(', "'%s: %s'", '%', '(', 'status_code', ',', 'response', ')', ')', 'return', 'status_code', ',', 'response'] | session info | ['session', 'info'] | train | https://github.com/symphonyoss/python-symphony/blob/b939f35fbda461183ec0c01790c754f89a295be0/symphony/Pod/connections.py#L19-L25 |
616 | StackStorm/pybind | pybind/slxos/v17r_1_01a/routing_system/route_map/content/set_/__init__.py | set_._set_weight | def _set_weight(self, v, load=False):
"""
Setter method for weight, mapped from YANG variable /routing_system/route_map/content/set/weight (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_weight() directly.
YANG Description: BGP weight for routing table
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """weight must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)""",
})
self.__weight = t
if hasattr(self, '_set'):
self._set() | python | def _set_weight(self, v, load=False):
"""
Setter method for weight, mapped from YANG variable /routing_system/route_map/content/set/weight (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_weight() directly.
YANG Description: BGP weight for routing table
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """weight must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)""",
})
self.__weight = t
if hasattr(self, '_set'):
self._set() | ['def', '_set_weight', '(', 'self', ',', 'v', ',', 'load', '=', 'False', ')', ':', 'if', 'hasattr', '(', 'v', ',', '"_utype"', ')', ':', 'v', '=', 'v', '.', '_utype', '(', 'v', ')', 'try', ':', 't', '=', 'YANGDynClass', '(', 'v', ',', 'base', '=', 'weight', '.', 'weight', ',', 'is_container', '=', "'container'", ',', 'presence', '=', 'False', ',', 'yang_name', '=', '"weight"', ',', 'rest_name', '=', '"weight"', ',', 'parent', '=', 'self', ',', 'path_helper', '=', 'self', '.', '_path_helper', ',', 'extmethods', '=', 'self', '.', '_extmethods', ',', 'register_paths', '=', 'True', ',', 'extensions', '=', '{', "u'tailf-common'", ':', '{', "u'info'", ':', "u'BGP weight for routing table'", ',', "u'cli-full-no'", ':', 'None', '}', '}', ',', 'namespace', '=', "'urn:brocade.com:mgmt:brocade-ip-policy'", ',', 'defining_module', '=', "'brocade-ip-policy'", ',', 'yang_type', '=', "'container'", ',', 'is_config', '=', 'True', ')', 'except', '(', 'TypeError', ',', 'ValueError', ')', ':', 'raise', 'ValueError', '(', '{', "'error-string'", ':', '"""weight must be of a type compatible with container"""', ',', "'defined-type'", ':', '"container"', ',', "'generated-type'", ':', '"""YANGDynClass(base=weight.weight, is_container=\'container\', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'BGP weight for routing table\', u\'cli-full-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-ip-policy\', defining_module=\'brocade-ip-policy\', yang_type=\'container\', is_config=True)"""', ',', '}', ')', 'self', '.', '__weight', '=', 't', 'if', 'hasattr', '(', 'self', ',', "'_set'", ')', ':', 'self', '.', '_set', '(', ')'] | Setter method for weight, mapped from YANG variable /routing_system/route_map/content/set/weight (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_weight() directly.
YANG Description: BGP weight for routing table | ['Setter', 'method', 'for', 'weight', 'mapped', 'from', 'YANG', 'variable', '/', 'routing_system', '/', 'route_map', '/', 'content', '/', 'set', '/', 'weight', '(', 'container', ')', 'If', 'this', 'variable', 'is', 'read', '-', 'only', '(', 'config', ':', 'false', ')', 'in', 'the', 'source', 'YANG', 'file', 'then', '_set_weight', 'is', 'considered', 'as', 'a', 'private', 'method', '.', 'Backends', 'looking', 'to', 'populate', 'this', 'variable', 'should', 'do', 'so', 'via', 'calling', 'thisObj', '.', '_set_weight', '()', 'directly', '.'] | train | https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17r_1_01a/routing_system/route_map/content/set_/__init__.py#L453-L476 |
617 | Azure/azure-cosmos-python | azure/cosmos/cosmos_client.py | CosmosClient.DeleteConflict | def DeleteConflict(self, conflict_link, options=None):
"""Deletes a conflict.
:param str conflict_link:
The link to the conflict.
:param dict options:
The request options for the request.
:return:
The deleted Conflict.
:rtype:
dict
"""
if options is None:
options = {}
path = base.GetPathFromLink(conflict_link)
conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link)
return self.DeleteResource(path,
'conflicts',
conflict_id,
None,
options) | python | def DeleteConflict(self, conflict_link, options=None):
"""Deletes a conflict.
:param str conflict_link:
The link to the conflict.
:param dict options:
The request options for the request.
:return:
The deleted Conflict.
:rtype:
dict
"""
if options is None:
options = {}
path = base.GetPathFromLink(conflict_link)
conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link)
return self.DeleteResource(path,
'conflicts',
conflict_id,
None,
options) | ['def', 'DeleteConflict', '(', 'self', ',', 'conflict_link', ',', 'options', '=', 'None', ')', ':', 'if', 'options', 'is', 'None', ':', 'options', '=', '{', '}', 'path', '=', 'base', '.', 'GetPathFromLink', '(', 'conflict_link', ')', 'conflict_id', '=', 'base', '.', 'GetResourceIdOrFullNameFromLink', '(', 'conflict_link', ')', 'return', 'self', '.', 'DeleteResource', '(', 'path', ',', "'conflicts'", ',', 'conflict_id', ',', 'None', ',', 'options', ')'] | Deletes a conflict.
:param str conflict_link:
The link to the conflict.
:param dict options:
The request options for the request.
:return:
The deleted Conflict.
:rtype:
dict | ['Deletes', 'a', 'conflict', '.'] | train | https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L2259-L2282 |
618 | Samreay/ChainConsumer | chainconsumer/diagnostic.py | Diagnostic.gelman_rubin | def gelman_rubin(self, chain=None, threshold=0.05):
r""" Runs the Gelman Rubin diagnostic on the supplied chains.
Parameters
----------
chain : int|str, optional
Which chain to run the diagnostic on. By default, this is `None`,
which will run the diagnostic on all chains. You can also
supply and integer (the chain index) or a string, for the chain
name (if you set one).
threshold : float, optional
The maximum deviation permitted from 1 for the final value
:math:`\hat{R}`
Returns
-------
float
whether or not the chains pass the test
Notes
-----
I follow PyMC in calculating the Gelman-Rubin statistic, where,
having :math:`m` chains of length :math:`n`, we compute
.. math::
B = \frac{n}{m-1} \sum_{j=1}^{m} \left(\bar{\theta}_{.j} - \bar{\theta}_{..}\right)^2
W = \frac{1}{m} \sum_{j=1}^{m} \left[ \frac{1}{n-1} \sum_{i=1}^{n} \left( \theta_{ij} - \bar{\theta_{.j}}\right)^2 \right]
where :math:`\theta` represents each model parameter. We then compute
:math:`\hat{V} = \frac{n_1}{n}W + \frac{1}{n}B`, and have our convergence ratio
:math:`\hat{R} = \sqrt{\frac{\hat{V}}{W}}`. We check that for all parameters,
this ratio deviates from unity by less than the supplied threshold.
"""
if chain is None:
return np.all([self.gelman_rubin(k, threshold=threshold) for k in range(len(self.parent.chains))])
index = self.parent._get_chain(chain)
assert len(index) == 1, "Please specify only one chain, have %d chains" % len(index)
chain = self.parent.chains[index[0]]
num_walkers = chain.walkers
parameters = chain.parameters
name = chain.name
data = chain.chain
chains = np.split(data, num_walkers)
assert num_walkers > 1, "Cannot run Gelman-Rubin statistic with only one walker"
m = 1.0 * len(chains)
n = 1.0 * chains[0].shape[0]
all_mean = np.mean(data, axis=0)
chain_means = np.array([np.mean(c, axis=0) for c in chains])
chain_var = np.array([np.var(c, axis=0, ddof=1) for c in chains])
b = n / (m - 1) * ((chain_means - all_mean)**2).sum(axis=0)
w = (1 / m) * chain_var.sum(axis=0)
var = (n - 1) * w / n + b / n
v = var + b / (n * m)
R = np.sqrt(v / w)
passed = np.abs(R - 1) < threshold
print("Gelman-Rubin Statistic values for chain %s" % name)
for p, v, pas in zip(parameters, R, passed):
param = "Param %d" % p if isinstance(p, int) else p
print("%s: %7.5f (%s)" % (param, v, "Passed" if pas else "Failed"))
return np.all(passed) | python | def gelman_rubin(self, chain=None, threshold=0.05):
r""" Runs the Gelman Rubin diagnostic on the supplied chains.
Parameters
----------
chain : int|str, optional
Which chain to run the diagnostic on. By default, this is `None`,
which will run the diagnostic on all chains. You can also
supply and integer (the chain index) or a string, for the chain
name (if you set one).
threshold : float, optional
The maximum deviation permitted from 1 for the final value
:math:`\hat{R}`
Returns
-------
float
whether or not the chains pass the test
Notes
-----
I follow PyMC in calculating the Gelman-Rubin statistic, where,
having :math:`m` chains of length :math:`n`, we compute
.. math::
B = \frac{n}{m-1} \sum_{j=1}^{m} \left(\bar{\theta}_{.j} - \bar{\theta}_{..}\right)^2
W = \frac{1}{m} \sum_{j=1}^{m} \left[ \frac{1}{n-1} \sum_{i=1}^{n} \left( \theta_{ij} - \bar{\theta_{.j}}\right)^2 \right]
where :math:`\theta` represents each model parameter. We then compute
:math:`\hat{V} = \frac{n_1}{n}W + \frac{1}{n}B`, and have our convergence ratio
:math:`\hat{R} = \sqrt{\frac{\hat{V}}{W}}`. We check that for all parameters,
this ratio deviates from unity by less than the supplied threshold.
"""
if chain is None:
return np.all([self.gelman_rubin(k, threshold=threshold) for k in range(len(self.parent.chains))])
index = self.parent._get_chain(chain)
assert len(index) == 1, "Please specify only one chain, have %d chains" % len(index)
chain = self.parent.chains[index[0]]
num_walkers = chain.walkers
parameters = chain.parameters
name = chain.name
data = chain.chain
chains = np.split(data, num_walkers)
assert num_walkers > 1, "Cannot run Gelman-Rubin statistic with only one walker"
m = 1.0 * len(chains)
n = 1.0 * chains[0].shape[0]
all_mean = np.mean(data, axis=0)
chain_means = np.array([np.mean(c, axis=0) for c in chains])
chain_var = np.array([np.var(c, axis=0, ddof=1) for c in chains])
b = n / (m - 1) * ((chain_means - all_mean)**2).sum(axis=0)
w = (1 / m) * chain_var.sum(axis=0)
var = (n - 1) * w / n + b / n
v = var + b / (n * m)
R = np.sqrt(v / w)
passed = np.abs(R - 1) < threshold
print("Gelman-Rubin Statistic values for chain %s" % name)
for p, v, pas in zip(parameters, R, passed):
param = "Param %d" % p if isinstance(p, int) else p
print("%s: %7.5f (%s)" % (param, v, "Passed" if pas else "Failed"))
return np.all(passed) | ['def', 'gelman_rubin', '(', 'self', ',', 'chain', '=', 'None', ',', 'threshold', '=', '0.05', ')', ':', 'if', 'chain', 'is', 'None', ':', 'return', 'np', '.', 'all', '(', '[', 'self', '.', 'gelman_rubin', '(', 'k', ',', 'threshold', '=', 'threshold', ')', 'for', 'k', 'in', 'range', '(', 'len', '(', 'self', '.', 'parent', '.', 'chains', ')', ')', ']', ')', 'index', '=', 'self', '.', 'parent', '.', '_get_chain', '(', 'chain', ')', 'assert', 'len', '(', 'index', ')', '==', '1', ',', '"Please specify only one chain, have %d chains"', '%', 'len', '(', 'index', ')', 'chain', '=', 'self', '.', 'parent', '.', 'chains', '[', 'index', '[', '0', ']', ']', 'num_walkers', '=', 'chain', '.', 'walkers', 'parameters', '=', 'chain', '.', 'parameters', 'name', '=', 'chain', '.', 'name', 'data', '=', 'chain', '.', 'chain', 'chains', '=', 'np', '.', 'split', '(', 'data', ',', 'num_walkers', ')', 'assert', 'num_walkers', '>', '1', ',', '"Cannot run Gelman-Rubin statistic with only one walker"', 'm', '=', '1.0', '*', 'len', '(', 'chains', ')', 'n', '=', '1.0', '*', 'chains', '[', '0', ']', '.', 'shape', '[', '0', ']', 'all_mean', '=', 'np', '.', 'mean', '(', 'data', ',', 'axis', '=', '0', ')', 'chain_means', '=', 'np', '.', 'array', '(', '[', 'np', '.', 'mean', '(', 'c', ',', 'axis', '=', '0', ')', 'for', 'c', 'in', 'chains', ']', ')', 'chain_var', '=', 'np', '.', 'array', '(', '[', 'np', '.', 'var', '(', 'c', ',', 'axis', '=', '0', ',', 'ddof', '=', '1', ')', 'for', 'c', 'in', 'chains', ']', ')', 'b', '=', 'n', '/', '(', 'm', '-', '1', ')', '*', '(', '(', 'chain_means', '-', 'all_mean', ')', '**', '2', ')', '.', 'sum', '(', 'axis', '=', '0', ')', 'w', '=', '(', '1', '/', 'm', ')', '*', 'chain_var', '.', 'sum', '(', 'axis', '=', '0', ')', 'var', '=', '(', 'n', '-', '1', ')', '*', 'w', '/', 'n', '+', 'b', '/', 'n', 'v', '=', 'var', '+', 'b', '/', '(', 'n', '*', 'm', ')', 'R', '=', 'np', '.', 'sqrt', '(', 'v', '/', 'w', ')', 'passed', '=', 'np', '.', 'abs', '(', 'R', '-', '1', ')', '<', 'threshold', 'print', '(', '"Gelman-Rubin Statistic values for chain %s"', '%', 'name', ')', 'for', 'p', ',', 'v', ',', 'pas', 'in', 'zip', '(', 'parameters', ',', 'R', ',', 'passed', ')', ':', 'param', '=', '"Param %d"', '%', 'p', 'if', 'isinstance', '(', 'p', ',', 'int', ')', 'else', 'p', 'print', '(', '"%s: %7.5f (%s)"', '%', '(', 'param', ',', 'v', ',', '"Passed"', 'if', 'pas', 'else', '"Failed"', ')', ')', 'return', 'np', '.', 'all', '(', 'passed', ')'] | r""" Runs the Gelman Rubin diagnostic on the supplied chains.
Parameters
----------
chain : int|str, optional
Which chain to run the diagnostic on. By default, this is `None`,
which will run the diagnostic on all chains. You can also
supply and integer (the chain index) or a string, for the chain
name (if you set one).
threshold : float, optional
The maximum deviation permitted from 1 for the final value
:math:`\hat{R}`
Returns
-------
float
whether or not the chains pass the test
Notes
-----
I follow PyMC in calculating the Gelman-Rubin statistic, where,
having :math:`m` chains of length :math:`n`, we compute
.. math::
B = \frac{n}{m-1} \sum_{j=1}^{m} \left(\bar{\theta}_{.j} - \bar{\theta}_{..}\right)^2
W = \frac{1}{m} \sum_{j=1}^{m} \left[ \frac{1}{n-1} \sum_{i=1}^{n} \left( \theta_{ij} - \bar{\theta_{.j}}\right)^2 \right]
where :math:`\theta` represents each model parameter. We then compute
:math:`\hat{V} = \frac{n_1}{n}W + \frac{1}{n}B`, and have our convergence ratio
:math:`\hat{R} = \sqrt{\frac{\hat{V}}{W}}`. We check that for all parameters,
this ratio deviates from unity by less than the supplied threshold. | ['r', 'Runs', 'the', 'Gelman', 'Rubin', 'diagnostic', 'on', 'the', 'supplied', 'chains', '.'] | train | https://github.com/Samreay/ChainConsumer/blob/902288e4d85c2677a9051a2172e03128a6169ad7/chainconsumer/diagnostic.py#L11-L76 |
619 | KelSolaar/Umbra | umbra/ui/widgets/codeEditor_QPlainTextEdit.py | LinesNumbers_QWidget.separator_width | def separator_width(self, value):
"""
Setter for **self.__separator_width** attribute.
:param value: Attribute value.
:type value: int
"""
if value is not None:
assert type(value) is int, "'{0}' attribute: '{1}' type is not 'int'!".format("separator_width", value)
assert value > 0, "'{0}' attribute: '{1}' need to be exactly positive!".format("separator_width", value)
self.__separator_width = value | python | def separator_width(self, value):
"""
Setter for **self.__separator_width** attribute.
:param value: Attribute value.
:type value: int
"""
if value is not None:
assert type(value) is int, "'{0}' attribute: '{1}' type is not 'int'!".format("separator_width", value)
assert value > 0, "'{0}' attribute: '{1}' need to be exactly positive!".format("separator_width", value)
self.__separator_width = value | ['def', 'separator_width', '(', 'self', ',', 'value', ')', ':', 'if', 'value', 'is', 'not', 'None', ':', 'assert', 'type', '(', 'value', ')', 'is', 'int', ',', '"\'{0}\' attribute: \'{1}\' type is not \'int\'!"', '.', 'format', '(', '"separator_width"', ',', 'value', ')', 'assert', 'value', '>', '0', ',', '"\'{0}\' attribute: \'{1}\' need to be exactly positive!"', '.', 'format', '(', '"separator_width"', ',', 'value', ')', 'self', '.', '__separator_width', '=', 'value'] | Setter for **self.__separator_width** attribute.
:param value: Attribute value.
:type value: int | ['Setter', 'for', '**', 'self', '.', '__separator_width', '**', 'attribute', '.'] | train | https://github.com/KelSolaar/Umbra/blob/66f45f08d9d723787f1191989f8b0dda84b412ce/umbra/ui/widgets/codeEditor_QPlainTextEdit.py#L171-L182 |
620 | SKA-ScienceDataProcessor/integration-prototype | sip/execution_control/configuration_db/sip_config_db/_events/pubsub.py | _get_event_id | def _get_event_id(object_type: str) -> str:
"""Return an event key for the event on the object type.
This must be a unique event id for the object.
Args:
object_type (str): Type of object
Returns:
str, event id
"""
key = _keys.event_counter(object_type)
DB.watch(key, pipeline=True)
count = DB.get_value(key)
DB.increment(key)
DB.execute()
if count is None:
count = 0
return '{}_event_{:08d}'.format(object_type, int(count)) | python | def _get_event_id(object_type: str) -> str:
"""Return an event key for the event on the object type.
This must be a unique event id for the object.
Args:
object_type (str): Type of object
Returns:
str, event id
"""
key = _keys.event_counter(object_type)
DB.watch(key, pipeline=True)
count = DB.get_value(key)
DB.increment(key)
DB.execute()
if count is None:
count = 0
return '{}_event_{:08d}'.format(object_type, int(count)) | ['def', '_get_event_id', '(', 'object_type', ':', 'str', ')', '->', 'str', ':', 'key', '=', '_keys', '.', 'event_counter', '(', 'object_type', ')', 'DB', '.', 'watch', '(', 'key', ',', 'pipeline', '=', 'True', ')', 'count', '=', 'DB', '.', 'get_value', '(', 'key', ')', 'DB', '.', 'increment', '(', 'key', ')', 'DB', '.', 'execute', '(', ')', 'if', 'count', 'is', 'None', ':', 'count', '=', '0', 'return', "'{}_event_{:08d}'", '.', 'format', '(', 'object_type', ',', 'int', '(', 'count', ')', ')'] | Return an event key for the event on the object type.
This must be a unique event id for the object.
Args:
object_type (str): Type of object
Returns:
str, event id | ['Return', 'an', 'event', 'key', 'for', 'the', 'event', 'on', 'the', 'object', 'type', '.'] | train | https://github.com/SKA-ScienceDataProcessor/integration-prototype/blob/8c8006de6ad71dcd44114b0338780738079c87d4/sip/execution_control/configuration_db/sip_config_db/_events/pubsub.py#L169-L188 |
621 | dmlc/xgboost | python-package/xgboost/core.py | DMatrix.feature_names | def feature_names(self, feature_names):
"""Set feature names (column labels).
Parameters
----------
feature_names : list or None
Labels for features. None will reset existing feature names
"""
if feature_names is not None:
# validate feature name
try:
if not isinstance(feature_names, str):
feature_names = [n for n in iter(feature_names)]
else:
feature_names = [feature_names]
except TypeError:
feature_names = [feature_names]
if len(feature_names) != len(set(feature_names)):
raise ValueError('feature_names must be unique')
if len(feature_names) != self.num_col():
msg = 'feature_names must have the same length as data'
raise ValueError(msg)
# prohibit to use symbols may affect to parse. e.g. []<
if not all(isinstance(f, STRING_TYPES) and
not any(x in f for x in set(('[', ']', '<')))
for f in feature_names):
raise ValueError('feature_names may not contain [, ] or <')
else:
# reset feature_types also
self.feature_types = None
self._feature_names = feature_names | python | def feature_names(self, feature_names):
"""Set feature names (column labels).
Parameters
----------
feature_names : list or None
Labels for features. None will reset existing feature names
"""
if feature_names is not None:
# validate feature name
try:
if not isinstance(feature_names, str):
feature_names = [n for n in iter(feature_names)]
else:
feature_names = [feature_names]
except TypeError:
feature_names = [feature_names]
if len(feature_names) != len(set(feature_names)):
raise ValueError('feature_names must be unique')
if len(feature_names) != self.num_col():
msg = 'feature_names must have the same length as data'
raise ValueError(msg)
# prohibit to use symbols may affect to parse. e.g. []<
if not all(isinstance(f, STRING_TYPES) and
not any(x in f for x in set(('[', ']', '<')))
for f in feature_names):
raise ValueError('feature_names may not contain [, ] or <')
else:
# reset feature_types also
self.feature_types = None
self._feature_names = feature_names | ['def', 'feature_names', '(', 'self', ',', 'feature_names', ')', ':', 'if', 'feature_names', 'is', 'not', 'None', ':', '# validate feature name', 'try', ':', 'if', 'not', 'isinstance', '(', 'feature_names', ',', 'str', ')', ':', 'feature_names', '=', '[', 'n', 'for', 'n', 'in', 'iter', '(', 'feature_names', ')', ']', 'else', ':', 'feature_names', '=', '[', 'feature_names', ']', 'except', 'TypeError', ':', 'feature_names', '=', '[', 'feature_names', ']', 'if', 'len', '(', 'feature_names', ')', '!=', 'len', '(', 'set', '(', 'feature_names', ')', ')', ':', 'raise', 'ValueError', '(', "'feature_names must be unique'", ')', 'if', 'len', '(', 'feature_names', ')', '!=', 'self', '.', 'num_col', '(', ')', ':', 'msg', '=', "'feature_names must have the same length as data'", 'raise', 'ValueError', '(', 'msg', ')', '# prohibit to use symbols may affect to parse. e.g. []<', 'if', 'not', 'all', '(', 'isinstance', '(', 'f', ',', 'STRING_TYPES', ')', 'and', 'not', 'any', '(', 'x', 'in', 'f', 'for', 'x', 'in', 'set', '(', '(', "'['", ',', "']'", ',', "'<'", ')', ')', ')', 'for', 'f', 'in', 'feature_names', ')', ':', 'raise', 'ValueError', '(', "'feature_names may not contain [, ] or <'", ')', 'else', ':', '# reset feature_types also', 'self', '.', 'feature_types', '=', 'None', 'self', '.', '_feature_names', '=', 'feature_names'] | Set feature names (column labels).
Parameters
----------
feature_names : list or None
Labels for features. None will reset existing feature names | ['Set', 'feature', 'names', '(', 'column', 'labels', ')', '.'] | train | https://github.com/dmlc/xgboost/blob/253fdd8a42d5ec6b819788199584d27bf9ea6253/python-package/xgboost/core.py#L843-L874 |
622 | nion-software/nionswift-io | nionswift_plugin/TIFF_IO/tifffile.py | validate_jhove | def validate_jhove(filename, jhove=None, ignore=None):
"""Validate TIFF file using jhove -m TIFF-hul.
Raise ValueError if jhove outputs an error message unless the message
contains one of the strings in 'ignore'.
JHOVE does not support bigtiff or more than 50 IFDs.
See `JHOVE TIFF-hul Module <http://jhove.sourceforge.net/tiff-hul.html>`_
"""
import subprocess # noqa: delayed import
if ignore is None:
ignore = ['More than 50 IFDs']
if jhove is None:
jhove = 'jhove'
out = subprocess.check_output([jhove, filename, '-m', 'TIFF-hul'])
if b'ErrorMessage: ' in out:
for line in out.splitlines():
line = line.strip()
if line.startswith(b'ErrorMessage: '):
error = line[14:].decode('utf8')
for i in ignore:
if i in error:
break
else:
raise ValueError(error)
break | python | def validate_jhove(filename, jhove=None, ignore=None):
"""Validate TIFF file using jhove -m TIFF-hul.
Raise ValueError if jhove outputs an error message unless the message
contains one of the strings in 'ignore'.
JHOVE does not support bigtiff or more than 50 IFDs.
See `JHOVE TIFF-hul Module <http://jhove.sourceforge.net/tiff-hul.html>`_
"""
import subprocess # noqa: delayed import
if ignore is None:
ignore = ['More than 50 IFDs']
if jhove is None:
jhove = 'jhove'
out = subprocess.check_output([jhove, filename, '-m', 'TIFF-hul'])
if b'ErrorMessage: ' in out:
for line in out.splitlines():
line = line.strip()
if line.startswith(b'ErrorMessage: '):
error = line[14:].decode('utf8')
for i in ignore:
if i in error:
break
else:
raise ValueError(error)
break | ['def', 'validate_jhove', '(', 'filename', ',', 'jhove', '=', 'None', ',', 'ignore', '=', 'None', ')', ':', 'import', 'subprocess', '# noqa: delayed import', 'if', 'ignore', 'is', 'None', ':', 'ignore', '=', '[', "'More than 50 IFDs'", ']', 'if', 'jhove', 'is', 'None', ':', 'jhove', '=', "'jhove'", 'out', '=', 'subprocess', '.', 'check_output', '(', '[', 'jhove', ',', 'filename', ',', "'-m'", ',', "'TIFF-hul'", ']', ')', 'if', "b'ErrorMessage: '", 'in', 'out', ':', 'for', 'line', 'in', 'out', '.', 'splitlines', '(', ')', ':', 'line', '=', 'line', '.', 'strip', '(', ')', 'if', 'line', '.', 'startswith', '(', "b'ErrorMessage: '", ')', ':', 'error', '=', 'line', '[', '14', ':', ']', '.', 'decode', '(', "'utf8'", ')', 'for', 'i', 'in', 'ignore', ':', 'if', 'i', 'in', 'error', ':', 'break', 'else', ':', 'raise', 'ValueError', '(', 'error', ')', 'break'] | Validate TIFF file using jhove -m TIFF-hul.
Raise ValueError if jhove outputs an error message unless the message
contains one of the strings in 'ignore'.
JHOVE does not support bigtiff or more than 50 IFDs.
See `JHOVE TIFF-hul Module <http://jhove.sourceforge.net/tiff-hul.html>`_ | ['Validate', 'TIFF', 'file', 'using', 'jhove', '-', 'm', 'TIFF', '-', 'hul', '.'] | train | https://github.com/nion-software/nionswift-io/blob/e9ae37f01faa9332c48b647f93afd5ef2166b155/nionswift_plugin/TIFF_IO/tifffile.py#L10684-L10711 |
623 | jbarlow83/OCRmyPDF | src/ocrmypdf/leptonica.py | Pix.scale | def scale(self, scale_xy):
"Returns the pix object rescaled according to the proportions given."
with _LeptonicaErrorTrap():
return Pix(lept.pixScale(self._cdata, scale_xy[0], scale_xy[1])) | python | def scale(self, scale_xy):
"Returns the pix object rescaled according to the proportions given."
with _LeptonicaErrorTrap():
return Pix(lept.pixScale(self._cdata, scale_xy[0], scale_xy[1])) | ['def', 'scale', '(', 'self', ',', 'scale_xy', ')', ':', 'with', '_LeptonicaErrorTrap', '(', ')', ':', 'return', 'Pix', '(', 'lept', '.', 'pixScale', '(', 'self', '.', '_cdata', ',', 'scale_xy', '[', '0', ']', ',', 'scale_xy', '[', '1', ']', ')', ')'] | Returns the pix object rescaled according to the proportions given. | ['Returns', 'the', 'pix', 'object', 'rescaled', 'according', 'to', 'the', 'proportions', 'given', '.'] | train | https://github.com/jbarlow83/OCRmyPDF/blob/79c84eefa353632a3d7ccddbd398c6678c1c1777/src/ocrmypdf/leptonica.py#L367-L370 |
624 | idlesign/uwsgiconf | uwsgiconf/options/applications.py | Applications.switch_into_lazy_mode | def switch_into_lazy_mode(self, affect_master=None):
"""Load apps in workers instead of master.
This option may have memory usage implications
as Copy-on-Write semantics can not be used.
.. note:: Consider using ``touch_chain_reload`` option in ``workers`` basic params
for lazy apps reloading.
:param bool affect_master: If **True** only workers will be
reloaded by uWSGI's reload signals; the master will remain alive.
.. warning:: uWSGI configuration changes are not picked up on reload by the master.
"""
self._set('lazy' if affect_master else 'lazy-apps', True, cast=bool)
return self._section | python | def switch_into_lazy_mode(self, affect_master=None):
"""Load apps in workers instead of master.
This option may have memory usage implications
as Copy-on-Write semantics can not be used.
.. note:: Consider using ``touch_chain_reload`` option in ``workers`` basic params
for lazy apps reloading.
:param bool affect_master: If **True** only workers will be
reloaded by uWSGI's reload signals; the master will remain alive.
.. warning:: uWSGI configuration changes are not picked up on reload by the master.
"""
self._set('lazy' if affect_master else 'lazy-apps', True, cast=bool)
return self._section | ['def', 'switch_into_lazy_mode', '(', 'self', ',', 'affect_master', '=', 'None', ')', ':', 'self', '.', '_set', '(', "'lazy'", 'if', 'affect_master', 'else', "'lazy-apps'", ',', 'True', ',', 'cast', '=', 'bool', ')', 'return', 'self', '.', '_section'] | Load apps in workers instead of master.
This option may have memory usage implications
as Copy-on-Write semantics can not be used.
.. note:: Consider using ``touch_chain_reload`` option in ``workers`` basic params
for lazy apps reloading.
:param bool affect_master: If **True** only workers will be
reloaded by uWSGI's reload signals; the master will remain alive.
.. warning:: uWSGI configuration changes are not picked up on reload by the master. | ['Load', 'apps', 'in', 'workers', 'instead', 'of', 'master', '.'] | train | https://github.com/idlesign/uwsgiconf/blob/475407acb44199edbf7e0a66261bfeb51de1afae/uwsgiconf/options/applications.py#L70-L88 |
625 | sanger-pathogens/circlator | circlator/merge.py | Merger._write_act_files | def _write_act_files(self, ref_fasta, qry_fasta, coords_file, outprefix):
'''Writes crunch file and shell script to start up ACT, showing comparison of ref and qry'''
if self.verbose:
print('Making ACT files from', ref_fasta, qry_fasta, coords_file)
ref_fasta = os.path.relpath(ref_fasta)
qry_fasta = os.path.relpath(qry_fasta)
coords_file = os.path.relpath(coords_file)
outprefix = os.path.relpath(outprefix)
self._index_fasta(ref_fasta)
self._index_fasta(qry_fasta)
crunch_file = outprefix + '.crunch'
pymummer.coords_file.convert_to_msp_crunch(
coords_file,
crunch_file,
ref_fai=ref_fasta + '.fai',
qry_fai=qry_fasta + '.fai'
)
bash_script = outprefix + '.start_act.sh'
with open(bash_script, 'w') as f:
print('#!/usr/bin/env bash', file=f)
print('act', ref_fasta, crunch_file, qry_fasta, file=f)
pyfastaq.utils.syscall('chmod +x ' + bash_script) | python | def _write_act_files(self, ref_fasta, qry_fasta, coords_file, outprefix):
'''Writes crunch file and shell script to start up ACT, showing comparison of ref and qry'''
if self.verbose:
print('Making ACT files from', ref_fasta, qry_fasta, coords_file)
ref_fasta = os.path.relpath(ref_fasta)
qry_fasta = os.path.relpath(qry_fasta)
coords_file = os.path.relpath(coords_file)
outprefix = os.path.relpath(outprefix)
self._index_fasta(ref_fasta)
self._index_fasta(qry_fasta)
crunch_file = outprefix + '.crunch'
pymummer.coords_file.convert_to_msp_crunch(
coords_file,
crunch_file,
ref_fai=ref_fasta + '.fai',
qry_fai=qry_fasta + '.fai'
)
bash_script = outprefix + '.start_act.sh'
with open(bash_script, 'w') as f:
print('#!/usr/bin/env bash', file=f)
print('act', ref_fasta, crunch_file, qry_fasta, file=f)
pyfastaq.utils.syscall('chmod +x ' + bash_script) | ['def', '_write_act_files', '(', 'self', ',', 'ref_fasta', ',', 'qry_fasta', ',', 'coords_file', ',', 'outprefix', ')', ':', 'if', 'self', '.', 'verbose', ':', 'print', '(', "'Making ACT files from'", ',', 'ref_fasta', ',', 'qry_fasta', ',', 'coords_file', ')', 'ref_fasta', '=', 'os', '.', 'path', '.', 'relpath', '(', 'ref_fasta', ')', 'qry_fasta', '=', 'os', '.', 'path', '.', 'relpath', '(', 'qry_fasta', ')', 'coords_file', '=', 'os', '.', 'path', '.', 'relpath', '(', 'coords_file', ')', 'outprefix', '=', 'os', '.', 'path', '.', 'relpath', '(', 'outprefix', ')', 'self', '.', '_index_fasta', '(', 'ref_fasta', ')', 'self', '.', '_index_fasta', '(', 'qry_fasta', ')', 'crunch_file', '=', 'outprefix', '+', "'.crunch'", 'pymummer', '.', 'coords_file', '.', 'convert_to_msp_crunch', '(', 'coords_file', ',', 'crunch_file', ',', 'ref_fai', '=', 'ref_fasta', '+', "'.fai'", ',', 'qry_fai', '=', 'qry_fasta', '+', "'.fai'", ')', 'bash_script', '=', 'outprefix', '+', "'.start_act.sh'", 'with', 'open', '(', 'bash_script', ',', "'w'", ')', 'as', 'f', ':', 'print', '(', "'#!/usr/bin/env bash'", ',', 'file', '=', 'f', ')', 'print', '(', "'act'", ',', 'ref_fasta', ',', 'crunch_file', ',', 'qry_fasta', ',', 'file', '=', 'f', ')', 'pyfastaq', '.', 'utils', '.', 'syscall', '(', "'chmod +x '", '+', 'bash_script', ')'] | Writes crunch file and shell script to start up ACT, showing comparison of ref and qry | ['Writes', 'crunch', 'file', 'and', 'shell', 'script', 'to', 'start', 'up', 'ACT', 'showing', 'comparison', 'of', 'ref', 'and', 'qry'] | train | https://github.com/sanger-pathogens/circlator/blob/a4befb8c9dbbcd4b3ad1899a95aa3e689d58b638/circlator/merge.py#L636-L659 |
626 | jreese/dotlink | dotlink/dotlink.py | Dotlink.scp | def scp(self, local_file, remote_path=''):
"""Copy a local file to the given remote path."""
if self.args.user:
upload_spec = '{0}@{1}:{2}'.format(self.args.user,
self.args.server,
remote_path)
else:
upload_spec = '{0}:{1}'.format(self.args.server, remote_path)
return self.sh('scp', local_file, upload_spec) | python | def scp(self, local_file, remote_path=''):
"""Copy a local file to the given remote path."""
if self.args.user:
upload_spec = '{0}@{1}:{2}'.format(self.args.user,
self.args.server,
remote_path)
else:
upload_spec = '{0}:{1}'.format(self.args.server, remote_path)
return self.sh('scp', local_file, upload_spec) | ['def', 'scp', '(', 'self', ',', 'local_file', ',', 'remote_path', '=', "''", ')', ':', 'if', 'self', '.', 'args', '.', 'user', ':', 'upload_spec', '=', "'{0}@{1}:{2}'", '.', 'format', '(', 'self', '.', 'args', '.', 'user', ',', 'self', '.', 'args', '.', 'server', ',', 'remote_path', ')', 'else', ':', 'upload_spec', '=', "'{0}:{1}'", '.', 'format', '(', 'self', '.', 'args', '.', 'server', ',', 'remote_path', ')', 'return', 'self', '.', 'sh', '(', "'scp'", ',', 'local_file', ',', 'upload_spec', ')'] | Copy a local file to the given remote path. | ['Copy', 'a', 'local', 'file', 'to', 'the', 'given', 'remote', 'path', '.'] | train | https://github.com/jreese/dotlink/blob/5e48c1493c20fc6df4ad0144e80563915ce339b6/dotlink/dotlink.py#L231-L240 |
627 | SavinaRoja/PyUserInput | pykeyboard/windows.py | PyKeyboard.release_key | def release_key(self, character=''):
"""
Release a given character key.
"""
try:
shifted = self.is_char_shifted(character)
except AttributeError:
win32api.keybd_event(character, 0, KEYEVENTF_KEYUP, 0)
else:
if shifted:
win32api.keybd_event(self.shift_key, 0, KEYEVENTF_KEYUP, 0)
char_vk = win32api.VkKeyScan(character)
win32api.keybd_event(char_vk, 0, KEYEVENTF_KEYUP, 0) | python | def release_key(self, character=''):
"""
Release a given character key.
"""
try:
shifted = self.is_char_shifted(character)
except AttributeError:
win32api.keybd_event(character, 0, KEYEVENTF_KEYUP, 0)
else:
if shifted:
win32api.keybd_event(self.shift_key, 0, KEYEVENTF_KEYUP, 0)
char_vk = win32api.VkKeyScan(character)
win32api.keybd_event(char_vk, 0, KEYEVENTF_KEYUP, 0) | ['def', 'release_key', '(', 'self', ',', 'character', '=', "''", ')', ':', 'try', ':', 'shifted', '=', 'self', '.', 'is_char_shifted', '(', 'character', ')', 'except', 'AttributeError', ':', 'win32api', '.', 'keybd_event', '(', 'character', ',', '0', ',', 'KEYEVENTF_KEYUP', ',', '0', ')', 'else', ':', 'if', 'shifted', ':', 'win32api', '.', 'keybd_event', '(', 'self', '.', 'shift_key', ',', '0', ',', 'KEYEVENTF_KEYUP', ',', '0', ')', 'char_vk', '=', 'win32api', '.', 'VkKeyScan', '(', 'character', ')', 'win32api', '.', 'keybd_event', '(', 'char_vk', ',', '0', ',', 'KEYEVENTF_KEYUP', ',', '0', ')'] | Release a given character key. | ['Release', 'a', 'given', 'character', 'key', '.'] | train | https://github.com/SavinaRoja/PyUserInput/blob/153c1d39b1a41b467b235fd182392d6dcbf07947/pykeyboard/windows.py#L58-L70 |
628 | maartenbreddels/ipyvolume | ipyvolume/astro.py | _randomSO3 | def _randomSO3():
"""Return random rotatation matrix, algo by James Arvo."""
u1 = np.random.random()
u2 = np.random.random()
u3 = np.random.random()
R = np.array(
[
[np.cos(2 * np.pi * u1), np.sin(2 * np.pi * u1), 0],
[-np.sin(2 * np.pi * u1), np.cos(2 * np.pi * u1), 0],
[0, 0, 1],
]
)
v = np.array([np.cos(2 * np.pi * u2) * np.sqrt(u3), np.sin(2 * np.pi * u2) * np.sqrt(u3), np.sqrt(1 - u3)])
H = np.identity(3) - 2 * v * np.transpose([v])
return -np.dot(H, R) | python | def _randomSO3():
"""Return random rotatation matrix, algo by James Arvo."""
u1 = np.random.random()
u2 = np.random.random()
u3 = np.random.random()
R = np.array(
[
[np.cos(2 * np.pi * u1), np.sin(2 * np.pi * u1), 0],
[-np.sin(2 * np.pi * u1), np.cos(2 * np.pi * u1), 0],
[0, 0, 1],
]
)
v = np.array([np.cos(2 * np.pi * u2) * np.sqrt(u3), np.sin(2 * np.pi * u2) * np.sqrt(u3), np.sqrt(1 - u3)])
H = np.identity(3) - 2 * v * np.transpose([v])
return -np.dot(H, R) | ['def', '_randomSO3', '(', ')', ':', 'u1', '=', 'np', '.', 'random', '.', 'random', '(', ')', 'u2', '=', 'np', '.', 'random', '.', 'random', '(', ')', 'u3', '=', 'np', '.', 'random', '.', 'random', '(', ')', 'R', '=', 'np', '.', 'array', '(', '[', '[', 'np', '.', 'cos', '(', '2', '*', 'np', '.', 'pi', '*', 'u1', ')', ',', 'np', '.', 'sin', '(', '2', '*', 'np', '.', 'pi', '*', 'u1', ')', ',', '0', ']', ',', '[', '-', 'np', '.', 'sin', '(', '2', '*', 'np', '.', 'pi', '*', 'u1', ')', ',', 'np', '.', 'cos', '(', '2', '*', 'np', '.', 'pi', '*', 'u1', ')', ',', '0', ']', ',', '[', '0', ',', '0', ',', '1', ']', ',', ']', ')', 'v', '=', 'np', '.', 'array', '(', '[', 'np', '.', 'cos', '(', '2', '*', 'np', '.', 'pi', '*', 'u2', ')', '*', 'np', '.', 'sqrt', '(', 'u3', ')', ',', 'np', '.', 'sin', '(', '2', '*', 'np', '.', 'pi', '*', 'u2', ')', '*', 'np', '.', 'sqrt', '(', 'u3', ')', ',', 'np', '.', 'sqrt', '(', '1', '-', 'u3', ')', ']', ')', 'H', '=', 'np', '.', 'identity', '(', '3', ')', '-', '2', '*', 'v', '*', 'np', '.', 'transpose', '(', '[', 'v', ']', ')', 'return', '-', 'np', '.', 'dot', '(', 'H', ',', 'R', ')'] | Return random rotatation matrix, algo by James Arvo. | ['Return', 'random', 'rotatation', 'matrix', 'algo', 'by', 'James', 'Arvo', '.'] | train | https://github.com/maartenbreddels/ipyvolume/blob/e68b72852b61276f8e6793bc8811f5b2432a155f/ipyvolume/astro.py#L11-L25 |
629 | tgalal/yowsup | yowsup/config/transforms/config_dict.py | ConfigDictTransform.transform | def transform(self, config):
"""
:param config:
:type config: dict
:return:
:rtype: yowsup.config.config.Config
"""
out = {}
for prop in vars(config):
out[prop] = getattr(config, prop)
return out | python | def transform(self, config):
"""
:param config:
:type config: dict
:return:
:rtype: yowsup.config.config.Config
"""
out = {}
for prop in vars(config):
out[prop] = getattr(config, prop)
return out | ['def', 'transform', '(', 'self', ',', 'config', ')', ':', 'out', '=', '{', '}', 'for', 'prop', 'in', 'vars', '(', 'config', ')', ':', 'out', '[', 'prop', ']', '=', 'getattr', '(', 'config', ',', 'prop', ')', 'return', 'out'] | :param config:
:type config: dict
:return:
:rtype: yowsup.config.config.Config | [':', 'param', 'config', ':', ':', 'type', 'config', ':', 'dict', ':', 'return', ':', ':', 'rtype', ':', 'yowsup', '.', 'config', '.', 'config', '.', 'Config'] | train | https://github.com/tgalal/yowsup/blob/b0739461ba962bf221fc76047d9d60d8ce61bc3e/yowsup/config/transforms/config_dict.py#L8-L18 |
630 | Apitax/Apitax | apitax/api/controllers/scriptax_controller.py | delete_driver_script | def delete_driver_script(driver, script_delete=None): # noqa: E501
"""Delete a script
Delete a script # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:param script_delete: The data needed to delete this script
:type script_delete: dict | bytes
:rtype: Response
"""
if connexion.request.is_json:
script_delete = ScriptDelete.from_dict(connexion.request.get_json()) # noqa: E501
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
driver.deleteDriverScript(script_delete.script.name)
return Response(status=200, body=response.getResponseBody()) | python | def delete_driver_script(driver, script_delete=None): # noqa: E501
"""Delete a script
Delete a script # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:param script_delete: The data needed to delete this script
:type script_delete: dict | bytes
:rtype: Response
"""
if connexion.request.is_json:
script_delete = ScriptDelete.from_dict(connexion.request.get_json()) # noqa: E501
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
driver.deleteDriverScript(script_delete.script.name)
return Response(status=200, body=response.getResponseBody()) | ['def', 'delete_driver_script', '(', 'driver', ',', 'script_delete', '=', 'None', ')', ':', '# noqa: E501', 'if', 'connexion', '.', 'request', '.', 'is_json', ':', 'script_delete', '=', 'ScriptDelete', '.', 'from_dict', '(', 'connexion', '.', 'request', '.', 'get_json', '(', ')', ')', '# noqa: E501', 'response', '=', 'errorIfUnauthorized', '(', 'role', '=', "'developer'", ')', 'if', 'response', ':', 'return', 'response', 'else', ':', 'response', '=', 'ApitaxResponse', '(', ')', 'driver', ':', 'Driver', '=', 'LoadedDrivers', '.', 'getDriver', '(', 'driver', ')', 'driver', '.', 'deleteDriverScript', '(', 'script_delete', '.', 'script', '.', 'name', ')', 'return', 'Response', '(', 'status', '=', '200', ',', 'body', '=', 'response', '.', 'getResponseBody', '(', ')', ')'] | Delete a script
Delete a script # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:param script_delete: The data needed to delete this script
:type script_delete: dict | bytes
:rtype: Response | ['Delete', 'a', 'script'] | train | https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/scriptax_controller.py#L47-L71 |
631 | PyCQA/astroid | astroid/manager.py | AstroidManager.ast_from_file | def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
"""given a module name, return the astroid object"""
try:
filepath = modutils.get_source_file(filepath, include_no_ext=True)
source = True
except modutils.NoSourceFile:
pass
if modname is None:
try:
modname = ".".join(modutils.modpath_from_file(filepath))
except ImportError:
modname = filepath
if (
modname in self.astroid_cache
and self.astroid_cache[modname].file == filepath
):
return self.astroid_cache[modname]
if source:
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).file_build(filepath, modname)
if fallback and modname:
return self.ast_from_module_name(modname)
raise exceptions.AstroidBuildingError(
"Unable to build an AST for {path}.", path=filepath
) | python | def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
"""given a module name, return the astroid object"""
try:
filepath = modutils.get_source_file(filepath, include_no_ext=True)
source = True
except modutils.NoSourceFile:
pass
if modname is None:
try:
modname = ".".join(modutils.modpath_from_file(filepath))
except ImportError:
modname = filepath
if (
modname in self.astroid_cache
and self.astroid_cache[modname].file == filepath
):
return self.astroid_cache[modname]
if source:
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).file_build(filepath, modname)
if fallback and modname:
return self.ast_from_module_name(modname)
raise exceptions.AstroidBuildingError(
"Unable to build an AST for {path}.", path=filepath
) | ['def', 'ast_from_file', '(', 'self', ',', 'filepath', ',', 'modname', '=', 'None', ',', 'fallback', '=', 'True', ',', 'source', '=', 'False', ')', ':', 'try', ':', 'filepath', '=', 'modutils', '.', 'get_source_file', '(', 'filepath', ',', 'include_no_ext', '=', 'True', ')', 'source', '=', 'True', 'except', 'modutils', '.', 'NoSourceFile', ':', 'pass', 'if', 'modname', 'is', 'None', ':', 'try', ':', 'modname', '=', '"."', '.', 'join', '(', 'modutils', '.', 'modpath_from_file', '(', 'filepath', ')', ')', 'except', 'ImportError', ':', 'modname', '=', 'filepath', 'if', '(', 'modname', 'in', 'self', '.', 'astroid_cache', 'and', 'self', '.', 'astroid_cache', '[', 'modname', ']', '.', 'file', '==', 'filepath', ')', ':', 'return', 'self', '.', 'astroid_cache', '[', 'modname', ']', 'if', 'source', ':', 'from', 'astroid', '.', 'builder', 'import', 'AstroidBuilder', 'return', 'AstroidBuilder', '(', 'self', ')', '.', 'file_build', '(', 'filepath', ',', 'modname', ')', 'if', 'fallback', 'and', 'modname', ':', 'return', 'self', '.', 'ast_from_module_name', '(', 'modname', ')', 'raise', 'exceptions', '.', 'AstroidBuildingError', '(', '"Unable to build an AST for {path}."', ',', 'path', '=', 'filepath', ')'] | given a module name, return the astroid object | ['given', 'a', 'module', 'name', 'return', 'the', 'astroid', 'object'] | train | https://github.com/PyCQA/astroid/blob/e0a298df55b15abcb77c2a93253f5ab7be52d0fb/astroid/manager.py#L71-L96 |
632 | tanghaibao/jcvi | jcvi/assembly/postprocess.py | overlap | def overlap(args):
"""
%prog overlap ctgfasta poolfasta
Fish out the sequences in `poolfasta` that overlap with `ctgfasta`.
Mix and combine using `minimus2`.
"""
p = OptionParser(overlap.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
ctgfasta, poolfasta = args
prefix = ctgfasta.split(".")[0]
rid = list(Fasta(ctgfasta).iterkeys())
assert len(rid) == 1, "Use overlapbatch() to improve multi-FASTA file"
rid = rid[0]
splitctgfasta = ctgfasta.rsplit(".", 1)[0] + ".split.fasta"
ctgfasta = run_gapsplit(infile=ctgfasta, outfile=splitctgfasta)
# Run BLAST
blastfile = ctgfasta + ".blast"
run_megablast(infile=ctgfasta, outfile=blastfile, db=poolfasta)
# Extract contigs and merge using minimus2
closuredir = prefix + ".closure"
closure = False
if need_update(blastfile, closuredir):
mkdir(closuredir, overwrite=True)
closure = True
if closure:
idsfile = op.join(closuredir, prefix + ".ids")
cmd = "cut -f2 {0} | sort -u".format(blastfile)
sh(cmd, outfile=idsfile)
idsfastafile = op.join(closuredir, prefix + ".ids.fasta")
cmd = "faSomeRecords {0} {1} {2}".format(poolfasta, idsfile, idsfastafile)
sh(cmd)
# This step is a hack to weight the bases from original sequences more
# than the pulled sequences, by literally adding another copy to be used
# in consensus calls.
redundantfastafile = op.join(closuredir, prefix + ".redundant.fasta")
format([ctgfasta, redundantfastafile, "--prefix=RED."])
mergedfastafile = op.join(closuredir, prefix + ".merged.fasta")
cmd = "cat {0} {1} {2}".format(ctgfasta, redundantfastafile, idsfastafile)
sh(cmd, outfile=mergedfastafile)
afgfile = op.join(closuredir, prefix + ".afg")
cmd = "toAmos -s {0} -o {1}".format(mergedfastafile, afgfile)
sh(cmd)
cwd = os.getcwd()
os.chdir(closuredir)
cmd = "minimus2 {0} -D REFCOUNT=0".format(prefix)
cmd += " -D OVERLAP=100 -D MINID=98"
sh(cmd)
os.chdir(cwd)
# Analyze output, make sure that:
# + Get the singletons of the original set back
# + Drop any contig that is comprised entirely of pulled set
originalIDs = set(Fasta(ctgfasta).iterkeys())
minimuscontig = op.join(closuredir, prefix + ".contig")
c = ContigFile(minimuscontig)
excludecontigs = set()
for rec in c.iter_records():
reads = set(x.id for x in rec.reads)
if reads.isdisjoint(originalIDs):
excludecontigs.add(rec.id)
logging.debug("Exclude contigs: {0}".\
format(", ".join(sorted(excludecontigs))))
finalfasta = prefix + ".improved.fasta_"
fw = open(finalfasta, "w")
minimusfasta = op.join(closuredir, prefix + ".fasta")
f = Fasta(minimusfasta)
for id, rec in f.iteritems_ordered():
if id in excludecontigs:
continue
SeqIO.write([rec], fw, "fasta")
singletonfile = op.join(closuredir, prefix + ".singletons")
singletons = set(x.strip() for x in open(singletonfile))
leftovers = singletons & originalIDs
logging.debug("Pull leftover singletons: {0}".\
format(", ".join(sorted(leftovers))))
f = Fasta(ctgfasta)
for id, rec in f.iteritems_ordered():
if id not in leftovers:
continue
SeqIO.write([rec], fw, "fasta")
fw.close()
fastafile = finalfasta
finalfasta = fastafile.rstrip("_")
format([fastafile, finalfasta, "--sequential", "--pad0=3",
"--prefix={0}_".format(rid)])
logging.debug("Improved FASTA written to `{0}`.".format(finalfasta))
n50([ctgfasta])
n50([finalfasta])
errlog = "error.log"
for f in (fastafile, blastfile, errlog):
if op.exists(f):
os.remove(f) | python | def overlap(args):
"""
%prog overlap ctgfasta poolfasta
Fish out the sequences in `poolfasta` that overlap with `ctgfasta`.
Mix and combine using `minimus2`.
"""
p = OptionParser(overlap.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
ctgfasta, poolfasta = args
prefix = ctgfasta.split(".")[0]
rid = list(Fasta(ctgfasta).iterkeys())
assert len(rid) == 1, "Use overlapbatch() to improve multi-FASTA file"
rid = rid[0]
splitctgfasta = ctgfasta.rsplit(".", 1)[0] + ".split.fasta"
ctgfasta = run_gapsplit(infile=ctgfasta, outfile=splitctgfasta)
# Run BLAST
blastfile = ctgfasta + ".blast"
run_megablast(infile=ctgfasta, outfile=blastfile, db=poolfasta)
# Extract contigs and merge using minimus2
closuredir = prefix + ".closure"
closure = False
if need_update(blastfile, closuredir):
mkdir(closuredir, overwrite=True)
closure = True
if closure:
idsfile = op.join(closuredir, prefix + ".ids")
cmd = "cut -f2 {0} | sort -u".format(blastfile)
sh(cmd, outfile=idsfile)
idsfastafile = op.join(closuredir, prefix + ".ids.fasta")
cmd = "faSomeRecords {0} {1} {2}".format(poolfasta, idsfile, idsfastafile)
sh(cmd)
# This step is a hack to weight the bases from original sequences more
# than the pulled sequences, by literally adding another copy to be used
# in consensus calls.
redundantfastafile = op.join(closuredir, prefix + ".redundant.fasta")
format([ctgfasta, redundantfastafile, "--prefix=RED."])
mergedfastafile = op.join(closuredir, prefix + ".merged.fasta")
cmd = "cat {0} {1} {2}".format(ctgfasta, redundantfastafile, idsfastafile)
sh(cmd, outfile=mergedfastafile)
afgfile = op.join(closuredir, prefix + ".afg")
cmd = "toAmos -s {0} -o {1}".format(mergedfastafile, afgfile)
sh(cmd)
cwd = os.getcwd()
os.chdir(closuredir)
cmd = "minimus2 {0} -D REFCOUNT=0".format(prefix)
cmd += " -D OVERLAP=100 -D MINID=98"
sh(cmd)
os.chdir(cwd)
# Analyze output, make sure that:
# + Get the singletons of the original set back
# + Drop any contig that is comprised entirely of pulled set
originalIDs = set(Fasta(ctgfasta).iterkeys())
minimuscontig = op.join(closuredir, prefix + ".contig")
c = ContigFile(minimuscontig)
excludecontigs = set()
for rec in c.iter_records():
reads = set(x.id for x in rec.reads)
if reads.isdisjoint(originalIDs):
excludecontigs.add(rec.id)
logging.debug("Exclude contigs: {0}".\
format(", ".join(sorted(excludecontigs))))
finalfasta = prefix + ".improved.fasta_"
fw = open(finalfasta, "w")
minimusfasta = op.join(closuredir, prefix + ".fasta")
f = Fasta(minimusfasta)
for id, rec in f.iteritems_ordered():
if id in excludecontigs:
continue
SeqIO.write([rec], fw, "fasta")
singletonfile = op.join(closuredir, prefix + ".singletons")
singletons = set(x.strip() for x in open(singletonfile))
leftovers = singletons & originalIDs
logging.debug("Pull leftover singletons: {0}".\
format(", ".join(sorted(leftovers))))
f = Fasta(ctgfasta)
for id, rec in f.iteritems_ordered():
if id not in leftovers:
continue
SeqIO.write([rec], fw, "fasta")
fw.close()
fastafile = finalfasta
finalfasta = fastafile.rstrip("_")
format([fastafile, finalfasta, "--sequential", "--pad0=3",
"--prefix={0}_".format(rid)])
logging.debug("Improved FASTA written to `{0}`.".format(finalfasta))
n50([ctgfasta])
n50([finalfasta])
errlog = "error.log"
for f in (fastafile, blastfile, errlog):
if op.exists(f):
os.remove(f) | ['def', 'overlap', '(', 'args', ')', ':', 'p', '=', 'OptionParser', '(', 'overlap', '.', '__doc__', ')', 'opts', ',', 'args', '=', 'p', '.', 'parse_args', '(', 'args', ')', 'if', 'len', '(', 'args', ')', '!=', '2', ':', 'sys', '.', 'exit', '(', 'not', 'p', '.', 'print_help', '(', ')', ')', 'ctgfasta', ',', 'poolfasta', '=', 'args', 'prefix', '=', 'ctgfasta', '.', 'split', '(', '"."', ')', '[', '0', ']', 'rid', '=', 'list', '(', 'Fasta', '(', 'ctgfasta', ')', '.', 'iterkeys', '(', ')', ')', 'assert', 'len', '(', 'rid', ')', '==', '1', ',', '"Use overlapbatch() to improve multi-FASTA file"', 'rid', '=', 'rid', '[', '0', ']', 'splitctgfasta', '=', 'ctgfasta', '.', 'rsplit', '(', '"."', ',', '1', ')', '[', '0', ']', '+', '".split.fasta"', 'ctgfasta', '=', 'run_gapsplit', '(', 'infile', '=', 'ctgfasta', ',', 'outfile', '=', 'splitctgfasta', ')', '# Run BLAST', 'blastfile', '=', 'ctgfasta', '+', '".blast"', 'run_megablast', '(', 'infile', '=', 'ctgfasta', ',', 'outfile', '=', 'blastfile', ',', 'db', '=', 'poolfasta', ')', '# Extract contigs and merge using minimus2', 'closuredir', '=', 'prefix', '+', '".closure"', 'closure', '=', 'False', 'if', 'need_update', '(', 'blastfile', ',', 'closuredir', ')', ':', 'mkdir', '(', 'closuredir', ',', 'overwrite', '=', 'True', ')', 'closure', '=', 'True', 'if', 'closure', ':', 'idsfile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".ids"', ')', 'cmd', '=', '"cut -f2 {0} | sort -u"', '.', 'format', '(', 'blastfile', ')', 'sh', '(', 'cmd', ',', 'outfile', '=', 'idsfile', ')', 'idsfastafile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".ids.fasta"', ')', 'cmd', '=', '"faSomeRecords {0} {1} {2}"', '.', 'format', '(', 'poolfasta', ',', 'idsfile', ',', 'idsfastafile', ')', 'sh', '(', 'cmd', ')', '# This step is a hack to weight the bases from original sequences more', '# than the pulled sequences, by literally adding another copy to be used', '# in consensus calls.', 'redundantfastafile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".redundant.fasta"', ')', 'format', '(', '[', 'ctgfasta', ',', 'redundantfastafile', ',', '"--prefix=RED."', ']', ')', 'mergedfastafile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".merged.fasta"', ')', 'cmd', '=', '"cat {0} {1} {2}"', '.', 'format', '(', 'ctgfasta', ',', 'redundantfastafile', ',', 'idsfastafile', ')', 'sh', '(', 'cmd', ',', 'outfile', '=', 'mergedfastafile', ')', 'afgfile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".afg"', ')', 'cmd', '=', '"toAmos -s {0} -o {1}"', '.', 'format', '(', 'mergedfastafile', ',', 'afgfile', ')', 'sh', '(', 'cmd', ')', 'cwd', '=', 'os', '.', 'getcwd', '(', ')', 'os', '.', 'chdir', '(', 'closuredir', ')', 'cmd', '=', '"minimus2 {0} -D REFCOUNT=0"', '.', 'format', '(', 'prefix', ')', 'cmd', '+=', '" -D OVERLAP=100 -D MINID=98"', 'sh', '(', 'cmd', ')', 'os', '.', 'chdir', '(', 'cwd', ')', '# Analyze output, make sure that:', '# + Get the singletons of the original set back', '# + Drop any contig that is comprised entirely of pulled set', 'originalIDs', '=', 'set', '(', 'Fasta', '(', 'ctgfasta', ')', '.', 'iterkeys', '(', ')', ')', 'minimuscontig', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".contig"', ')', 'c', '=', 'ContigFile', '(', 'minimuscontig', ')', 'excludecontigs', '=', 'set', '(', ')', 'for', 'rec', 'in', 'c', '.', 'iter_records', '(', ')', ':', 'reads', '=', 'set', '(', 'x', '.', 'id', 'for', 'x', 'in', 'rec', '.', 'reads', ')', 'if', 'reads', '.', 'isdisjoint', '(', 'originalIDs', ')', ':', 'excludecontigs', '.', 'add', '(', 'rec', '.', 'id', ')', 'logging', '.', 'debug', '(', '"Exclude contigs: {0}"', '.', 'format', '(', '", "', '.', 'join', '(', 'sorted', '(', 'excludecontigs', ')', ')', ')', ')', 'finalfasta', '=', 'prefix', '+', '".improved.fasta_"', 'fw', '=', 'open', '(', 'finalfasta', ',', '"w"', ')', 'minimusfasta', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".fasta"', ')', 'f', '=', 'Fasta', '(', 'minimusfasta', ')', 'for', 'id', ',', 'rec', 'in', 'f', '.', 'iteritems_ordered', '(', ')', ':', 'if', 'id', 'in', 'excludecontigs', ':', 'continue', 'SeqIO', '.', 'write', '(', '[', 'rec', ']', ',', 'fw', ',', '"fasta"', ')', 'singletonfile', '=', 'op', '.', 'join', '(', 'closuredir', ',', 'prefix', '+', '".singletons"', ')', 'singletons', '=', 'set', '(', 'x', '.', 'strip', '(', ')', 'for', 'x', 'in', 'open', '(', 'singletonfile', ')', ')', 'leftovers', '=', 'singletons', '&', 'originalIDs', 'logging', '.', 'debug', '(', '"Pull leftover singletons: {0}"', '.', 'format', '(', '", "', '.', 'join', '(', 'sorted', '(', 'leftovers', ')', ')', ')', ')', 'f', '=', 'Fasta', '(', 'ctgfasta', ')', 'for', 'id', ',', 'rec', 'in', 'f', '.', 'iteritems_ordered', '(', ')', ':', 'if', 'id', 'not', 'in', 'leftovers', ':', 'continue', 'SeqIO', '.', 'write', '(', '[', 'rec', ']', ',', 'fw', ',', '"fasta"', ')', 'fw', '.', 'close', '(', ')', 'fastafile', '=', 'finalfasta', 'finalfasta', '=', 'fastafile', '.', 'rstrip', '(', '"_"', ')', 'format', '(', '[', 'fastafile', ',', 'finalfasta', ',', '"--sequential"', ',', '"--pad0=3"', ',', '"--prefix={0}_"', '.', 'format', '(', 'rid', ')', ']', ')', 'logging', '.', 'debug', '(', '"Improved FASTA written to `{0}`."', '.', 'format', '(', 'finalfasta', ')', ')', 'n50', '(', '[', 'ctgfasta', ']', ')', 'n50', '(', '[', 'finalfasta', ']', ')', 'errlog', '=', '"error.log"', 'for', 'f', 'in', '(', 'fastafile', ',', 'blastfile', ',', 'errlog', ')', ':', 'if', 'op', '.', 'exists', '(', 'f', ')', ':', 'os', '.', 'remove', '(', 'f', ')'] | %prog overlap ctgfasta poolfasta
Fish out the sequences in `poolfasta` that overlap with `ctgfasta`.
Mix and combine using `minimus2`. | ['%prog', 'overlap', 'ctgfasta', 'poolfasta'] | train | https://github.com/tanghaibao/jcvi/blob/d2e31a77b6ade7f41f3b321febc2b4744d1cdeca/jcvi/assembly/postprocess.py#L388-L503 |
633 | StackStorm/pybind | pybind/nos/v6_0_2f/rbridge_id/qos/__init__.py | qos._set_rcv_queue | def _set_rcv_queue(self, v, load=False):
"""
Setter method for rcv_queue, mapped from YANG variable /rbridge_id/qos/rcv_queue (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_rcv_queue is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rcv_queue() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=rcv_queue.rcv_queue, is_container='container', presence=False, yang_name="rcv-queue", rest_name="rcv-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure qos ingress queueing', u'callpoint': u'qos_ingress_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rcv_queue must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=rcv_queue.rcv_queue, is_container='container', presence=False, yang_name="rcv-queue", rest_name="rcv-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure qos ingress queueing', u'callpoint': u'qos_ingress_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)""",
})
self.__rcv_queue = t
if hasattr(self, '_set'):
self._set() | python | def _set_rcv_queue(self, v, load=False):
"""
Setter method for rcv_queue, mapped from YANG variable /rbridge_id/qos/rcv_queue (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_rcv_queue is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rcv_queue() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=rcv_queue.rcv_queue, is_container='container', presence=False, yang_name="rcv-queue", rest_name="rcv-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure qos ingress queueing', u'callpoint': u'qos_ingress_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rcv_queue must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=rcv_queue.rcv_queue, is_container='container', presence=False, yang_name="rcv-queue", rest_name="rcv-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure qos ingress queueing', u'callpoint': u'qos_ingress_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)""",
})
self.__rcv_queue = t
if hasattr(self, '_set'):
self._set() | ['def', '_set_rcv_queue', '(', 'self', ',', 'v', ',', 'load', '=', 'False', ')', ':', 'if', 'hasattr', '(', 'v', ',', '"_utype"', ')', ':', 'v', '=', 'v', '.', '_utype', '(', 'v', ')', 'try', ':', 't', '=', 'YANGDynClass', '(', 'v', ',', 'base', '=', 'rcv_queue', '.', 'rcv_queue', ',', 'is_container', '=', "'container'", ',', 'presence', '=', 'False', ',', 'yang_name', '=', '"rcv-queue"', ',', 'rest_name', '=', '"rcv-queue"', ',', 'parent', '=', 'self', ',', 'path_helper', '=', 'self', '.', '_path_helper', ',', 'extmethods', '=', 'self', '.', '_extmethods', ',', 'register_paths', '=', 'True', ',', 'extensions', '=', '{', "u'tailf-common'", ':', '{', "u'info'", ':', "u'Configure qos ingress queueing'", ',', "u'callpoint'", ':', "u'qos_ingress_queue'", ',', "u'cli-incomplete-no'", ':', 'None', '}', '}', ',', 'namespace', '=', "'urn:brocade.com:mgmt:brocade-qos'", ',', 'defining_module', '=', "'brocade-qos'", ',', 'yang_type', '=', "'container'", ',', 'is_config', '=', 'True', ')', 'except', '(', 'TypeError', ',', 'ValueError', ')', ':', 'raise', 'ValueError', '(', '{', "'error-string'", ':', '"""rcv_queue must be of a type compatible with container"""', ',', "'defined-type'", ':', '"container"', ',', "'generated-type'", ':', '"""YANGDynClass(base=rcv_queue.rcv_queue, is_container=\'container\', presence=False, yang_name="rcv-queue", rest_name="rcv-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure qos ingress queueing\', u\'callpoint\': u\'qos_ingress_queue\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-qos\', defining_module=\'brocade-qos\', yang_type=\'container\', is_config=True)"""', ',', '}', ')', 'self', '.', '__rcv_queue', '=', 't', 'if', 'hasattr', '(', 'self', ',', "'_set'", ')', ':', 'self', '.', '_set', '(', ')'] | Setter method for rcv_queue, mapped from YANG variable /rbridge_id/qos/rcv_queue (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_rcv_queue is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rcv_queue() directly. | ['Setter', 'method', 'for', 'rcv_queue', 'mapped', 'from', 'YANG', 'variable', '/', 'rbridge_id', '/', 'qos', '/', 'rcv_queue', '(', 'container', ')', 'If', 'this', 'variable', 'is', 'read', '-', 'only', '(', 'config', ':', 'false', ')', 'in', 'the', 'source', 'YANG', 'file', 'then', '_set_rcv_queue', 'is', 'considered', 'as', 'a', 'private', 'method', '.', 'Backends', 'looking', 'to', 'populate', 'this', 'variable', 'should', 'do', 'so', 'via', 'calling', 'thisObj', '.', '_set_rcv_queue', '()', 'directly', '.'] | train | https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/nos/v6_0_2f/rbridge_id/qos/__init__.py#L127-L148 |
634 | BernardFW/bernard | src/bernard/middleware/_builtins.py | AutoSleep.clean_stacks | def clean_stacks(self, stacks: List[List[BaseLayer]]) \
-> List[List[BaseLayer]]:
"""
Two cases: if a stack finishes by a sleep then let's keep it (it means
that there was nothing after the text). However if the stack finishes
with something else (like a quick reply) then we don't risk an
is preserved.
"""
ns: List[List[BaseLayer]] = []
for stack in stacks:
if isinstance(stack[-1], lyr.Sleep):
ns.extend([x] for x in stack)
else:
ns.append([x for x in stack if not isinstance(x, lyr.Sleep)])
if not ns:
return ns
last = ns[-1]
if len(last) == 1 and isinstance(last[0], lyr.Sleep):
return ns[:-1]
else:
return ns | python | def clean_stacks(self, stacks: List[List[BaseLayer]]) \
-> List[List[BaseLayer]]:
"""
Two cases: if a stack finishes by a sleep then let's keep it (it means
that there was nothing after the text). However if the stack finishes
with something else (like a quick reply) then we don't risk an
is preserved.
"""
ns: List[List[BaseLayer]] = []
for stack in stacks:
if isinstance(stack[-1], lyr.Sleep):
ns.extend([x] for x in stack)
else:
ns.append([x for x in stack if not isinstance(x, lyr.Sleep)])
if not ns:
return ns
last = ns[-1]
if len(last) == 1 and isinstance(last[0], lyr.Sleep):
return ns[:-1]
else:
return ns | ['def', 'clean_stacks', '(', 'self', ',', 'stacks', ':', 'List', '[', 'List', '[', 'BaseLayer', ']', ']', ')', '->', 'List', '[', 'List', '[', 'BaseLayer', ']', ']', ':', 'ns', ':', 'List', '[', 'List', '[', 'BaseLayer', ']', ']', '=', '[', ']', 'for', 'stack', 'in', 'stacks', ':', 'if', 'isinstance', '(', 'stack', '[', '-', '1', ']', ',', 'lyr', '.', 'Sleep', ')', ':', 'ns', '.', 'extend', '(', '[', 'x', ']', 'for', 'x', 'in', 'stack', ')', 'else', ':', 'ns', '.', 'append', '(', '[', 'x', 'for', 'x', 'in', 'stack', 'if', 'not', 'isinstance', '(', 'x', ',', 'lyr', '.', 'Sleep', ')', ']', ')', 'if', 'not', 'ns', ':', 'return', 'ns', 'last', '=', 'ns', '[', '-', '1', ']', 'if', 'len', '(', 'last', ')', '==', '1', 'and', 'isinstance', '(', 'last', '[', '0', ']', ',', 'lyr', '.', 'Sleep', ')', ':', 'return', 'ns', '[', ':', '-', '1', ']', 'else', ':', 'return', 'ns'] | Two cases: if a stack finishes by a sleep then let's keep it (it means
that there was nothing after the text). However if the stack finishes
with something else (like a quick reply) then we don't risk an
is preserved. | ['Two', 'cases', ':', 'if', 'a', 'stack', 'finishes', 'by', 'a', 'sleep', 'then', 'let', 's', 'keep', 'it', '(', 'it', 'means', 'that', 'there', 'was', 'nothing', 'after', 'the', 'text', ')', '.', 'However', 'if', 'the', 'stack', 'finishes', 'with', 'something', 'else', '(', 'like', 'a', 'quick', 'reply', ')', 'then', 'we', 'don', 't', 'risk', 'an', 'is', 'preserved', '.'] | train | https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L95-L119 |
635 | JdeRobot/base | src/drivers/MAVLinkServer/MAVProxy/pymavlink/rotmat.py | Matrix3.from_euler | def from_euler(self, roll, pitch, yaw):
'''fill the matrix from Euler angles in radians'''
cp = cos(pitch)
sp = sin(pitch)
sr = sin(roll)
cr = cos(roll)
sy = sin(yaw)
cy = cos(yaw)
self.a.x = cp * cy
self.a.y = (sr * sp * cy) - (cr * sy)
self.a.z = (cr * sp * cy) + (sr * sy)
self.b.x = cp * sy
self.b.y = (sr * sp * sy) + (cr * cy)
self.b.z = (cr * sp * sy) - (sr * cy)
self.c.x = -sp
self.c.y = sr * cp
self.c.z = cr * cp | python | def from_euler(self, roll, pitch, yaw):
'''fill the matrix from Euler angles in radians'''
cp = cos(pitch)
sp = sin(pitch)
sr = sin(roll)
cr = cos(roll)
sy = sin(yaw)
cy = cos(yaw)
self.a.x = cp * cy
self.a.y = (sr * sp * cy) - (cr * sy)
self.a.z = (cr * sp * cy) + (sr * sy)
self.b.x = cp * sy
self.b.y = (sr * sp * sy) + (cr * cy)
self.b.z = (cr * sp * sy) - (sr * cy)
self.c.x = -sp
self.c.y = sr * cp
self.c.z = cr * cp | ['def', 'from_euler', '(', 'self', ',', 'roll', ',', 'pitch', ',', 'yaw', ')', ':', 'cp', '=', 'cos', '(', 'pitch', ')', 'sp', '=', 'sin', '(', 'pitch', ')', 'sr', '=', 'sin', '(', 'roll', ')', 'cr', '=', 'cos', '(', 'roll', ')', 'sy', '=', 'sin', '(', 'yaw', ')', 'cy', '=', 'cos', '(', 'yaw', ')', 'self', '.', 'a', '.', 'x', '=', 'cp', '*', 'cy', 'self', '.', 'a', '.', 'y', '=', '(', 'sr', '*', 'sp', '*', 'cy', ')', '-', '(', 'cr', '*', 'sy', ')', 'self', '.', 'a', '.', 'z', '=', '(', 'cr', '*', 'sp', '*', 'cy', ')', '+', '(', 'sr', '*', 'sy', ')', 'self', '.', 'b', '.', 'x', '=', 'cp', '*', 'sy', 'self', '.', 'b', '.', 'y', '=', '(', 'sr', '*', 'sp', '*', 'sy', ')', '+', '(', 'cr', '*', 'cy', ')', 'self', '.', 'b', '.', 'z', '=', '(', 'cr', '*', 'sp', '*', 'sy', ')', '-', '(', 'sr', '*', 'cy', ')', 'self', '.', 'c', '.', 'x', '=', '-', 'sp', 'self', '.', 'c', '.', 'y', '=', 'sr', '*', 'cp', 'self', '.', 'c', '.', 'z', '=', 'cr', '*', 'cp'] | fill the matrix from Euler angles in radians | ['fill', 'the', 'matrix', 'from', 'Euler', 'angles', 'in', 'radians'] | train | https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/rotmat.py#L154-L171 |
636 | eventbrite/eventbrite-sdk-python | eventbrite/access_methods.py | AccessMethodsMixin.get_user_events | def get_user_events(self, id, **data):
"""
GET /users/:id/events/
Returns a :ref:`paginated <pagination>` response of :format:`events <event>`, under the key ``events``, of all events the user has access to
"""
return self.get("/users/{0}/events/".format(id), data=data) | python | def get_user_events(self, id, **data):
"""
GET /users/:id/events/
Returns a :ref:`paginated <pagination>` response of :format:`events <event>`, under the key ``events``, of all events the user has access to
"""
return self.get("/users/{0}/events/".format(id), data=data) | ['def', 'get_user_events', '(', 'self', ',', 'id', ',', '*', '*', 'data', ')', ':', 'return', 'self', '.', 'get', '(', '"/users/{0}/events/"', '.', 'format', '(', 'id', ')', ',', 'data', '=', 'data', ')'] | GET /users/:id/events/
Returns a :ref:`paginated <pagination>` response of :format:`events <event>`, under the key ``events``, of all events the user has access to | ['GET', '/', 'users', '/', ':', 'id', '/', 'events', '/', 'Returns', 'a', ':', 'ref', ':', 'paginated', '<pagination', '>', 'response', 'of', ':', 'format', ':', 'events', '<event', '>', 'under', 'the', 'key', 'events', 'of', 'all', 'events', 'the', 'user', 'has', 'access', 'to'] | train | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/access_methods.py#L776-L782 |
637 | dw/mitogen | mitogen/core.py | Latch.close | def close(self):
"""
Mark the latch as closed, and cause every sleeping thread to be woken,
with :class:`mitogen.core.LatchError` raised in each thread.
"""
self._lock.acquire()
try:
self.closed = True
while self._waking < len(self._sleeping):
wsock, cookie = self._sleeping[self._waking]
self._wake(wsock, cookie)
self._waking += 1
finally:
self._lock.release() | python | def close(self):
"""
Mark the latch as closed, and cause every sleeping thread to be woken,
with :class:`mitogen.core.LatchError` raised in each thread.
"""
self._lock.acquire()
try:
self.closed = True
while self._waking < len(self._sleeping):
wsock, cookie = self._sleeping[self._waking]
self._wake(wsock, cookie)
self._waking += 1
finally:
self._lock.release() | ['def', 'close', '(', 'self', ')', ':', 'self', '.', '_lock', '.', 'acquire', '(', ')', 'try', ':', 'self', '.', 'closed', '=', 'True', 'while', 'self', '.', '_waking', '<', 'len', '(', 'self', '.', '_sleeping', ')', ':', 'wsock', ',', 'cookie', '=', 'self', '.', '_sleeping', '[', 'self', '.', '_waking', ']', 'self', '.', '_wake', '(', 'wsock', ',', 'cookie', ')', 'self', '.', '_waking', '+=', '1', 'finally', ':', 'self', '.', '_lock', '.', 'release', '(', ')'] | Mark the latch as closed, and cause every sleeping thread to be woken,
with :class:`mitogen.core.LatchError` raised in each thread. | ['Mark', 'the', 'latch', 'as', 'closed', 'and', 'cause', 'every', 'sleeping', 'thread', 'to', 'be', 'woken', 'with', ':', 'class', ':', 'mitogen', '.', 'core', '.', 'LatchError', 'raised', 'in', 'each', 'thread', '.'] | train | https://github.com/dw/mitogen/blob/a7fdb55e1300a7e0a5e404b09eb730cf9a525da7/mitogen/core.py#L2101-L2114 |
638 | bodylabs/lace | lace/topology.py | MeshMixin.remove_redundant_verts | def remove_redundant_verts(self, eps=1e-10):
"""Given verts and faces, this remove colocated vertices"""
import numpy as np
from scipy.spatial import cKDTree # FIXME pylint: disable=no-name-in-module
fshape = self.f.shape
tree = cKDTree(self.v)
close_pairs = list(tree.query_pairs(eps))
if close_pairs:
close_pairs = np.sort(close_pairs, axis=1)
# update faces to not refer to redundant vertices
equivalent_verts = np.arange(self.v.shape[0])
for v1, v2 in close_pairs:
if equivalent_verts[v2] > v1:
equivalent_verts[v2] = v1
self.f = equivalent_verts[self.f.flatten()].reshape((-1, 3))
# get rid of unused verts, and update faces accordingly
vertidxs_left = np.unique(self.f)
repl = np.arange(np.max(self.f)+1)
repl[vertidxs_left] = np.arange(len(vertidxs_left))
self.v = self.v[vertidxs_left]
self.f = repl[self.f].reshape((-1, fshape[1])) | python | def remove_redundant_verts(self, eps=1e-10):
"""Given verts and faces, this remove colocated vertices"""
import numpy as np
from scipy.spatial import cKDTree # FIXME pylint: disable=no-name-in-module
fshape = self.f.shape
tree = cKDTree(self.v)
close_pairs = list(tree.query_pairs(eps))
if close_pairs:
close_pairs = np.sort(close_pairs, axis=1)
# update faces to not refer to redundant vertices
equivalent_verts = np.arange(self.v.shape[0])
for v1, v2 in close_pairs:
if equivalent_verts[v2] > v1:
equivalent_verts[v2] = v1
self.f = equivalent_verts[self.f.flatten()].reshape((-1, 3))
# get rid of unused verts, and update faces accordingly
vertidxs_left = np.unique(self.f)
repl = np.arange(np.max(self.f)+1)
repl[vertidxs_left] = np.arange(len(vertidxs_left))
self.v = self.v[vertidxs_left]
self.f = repl[self.f].reshape((-1, fshape[1])) | ['def', 'remove_redundant_verts', '(', 'self', ',', 'eps', '=', '1e-10', ')', ':', 'import', 'numpy', 'as', 'np', 'from', 'scipy', '.', 'spatial', 'import', 'cKDTree', '# FIXME pylint: disable=no-name-in-module', 'fshape', '=', 'self', '.', 'f', '.', 'shape', 'tree', '=', 'cKDTree', '(', 'self', '.', 'v', ')', 'close_pairs', '=', 'list', '(', 'tree', '.', 'query_pairs', '(', 'eps', ')', ')', 'if', 'close_pairs', ':', 'close_pairs', '=', 'np', '.', 'sort', '(', 'close_pairs', ',', 'axis', '=', '1', ')', '# update faces to not refer to redundant vertices', 'equivalent_verts', '=', 'np', '.', 'arange', '(', 'self', '.', 'v', '.', 'shape', '[', '0', ']', ')', 'for', 'v1', ',', 'v2', 'in', 'close_pairs', ':', 'if', 'equivalent_verts', '[', 'v2', ']', '>', 'v1', ':', 'equivalent_verts', '[', 'v2', ']', '=', 'v1', 'self', '.', 'f', '=', 'equivalent_verts', '[', 'self', '.', 'f', '.', 'flatten', '(', ')', ']', '.', 'reshape', '(', '(', '-', '1', ',', '3', ')', ')', '# get rid of unused verts, and update faces accordingly', 'vertidxs_left', '=', 'np', '.', 'unique', '(', 'self', '.', 'f', ')', 'repl', '=', 'np', '.', 'arange', '(', 'np', '.', 'max', '(', 'self', '.', 'f', ')', '+', '1', ')', 'repl', '[', 'vertidxs_left', ']', '=', 'np', '.', 'arange', '(', 'len', '(', 'vertidxs_left', ')', ')', 'self', '.', 'v', '=', 'self', '.', 'v', '[', 'vertidxs_left', ']', 'self', '.', 'f', '=', 'repl', '[', 'self', '.', 'f', ']', '.', 'reshape', '(', '(', '-', '1', ',', 'fshape', '[', '1', ']', ')', ')'] | Given verts and faces, this remove colocated vertices | ['Given', 'verts', 'and', 'faces', 'this', 'remove', 'colocated', 'vertices'] | train | https://github.com/bodylabs/lace/blob/b68f4a60a4cac66c0607ffbae38ef9d07d37f459/lace/topology.py#L498-L518 |
639 | fastai/fastai | fastai/vision/data.py | _db_pre_transform | def _db_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]):
"Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`"
self.train_ds.x.after_open = compose(train_tfm)
self.valid_ds.x.after_open = compose(valid_tfm)
return self | python | def _db_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]):
"Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`"
self.train_ds.x.after_open = compose(train_tfm)
self.valid_ds.x.after_open = compose(valid_tfm)
return self | ['def', '_db_pre_transform', '(', 'self', ',', 'train_tfm', ':', 'List', '[', 'Callable', ']', ',', 'valid_tfm', ':', 'List', '[', 'Callable', ']', ')', ':', 'self', '.', 'train_ds', '.', 'x', '.', 'after_open', '=', 'compose', '(', 'train_tfm', ')', 'self', '.', 'valid_ds', '.', 'x', '.', 'after_open', '=', 'compose', '(', 'valid_tfm', ')', 'return', 'self'] | Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image` | ['Call', 'train_tfm', 'and', 'valid_tfm', 'after', 'opening', 'image', 'before', 'converting', 'from', 'PIL', '.', 'Image'] | train | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L440-L444 |
640 | modin-project/modin | modin/backends/pandas/query_compiler.py | PandasQueryCompiler.numeric_function_clean_dataframe | def numeric_function_clean_dataframe(self, axis):
"""Preprocesses numeric functions to clean dataframe and pick numeric indices.
Args:
axis: '0' if columns and '1' if rows.
Returns:
Tuple with return value(if any), indices to apply func to & cleaned Manager.
"""
result = None
query_compiler = self
# If no numeric columns and over columns, then return empty Series
if not axis and len(self.index) == 0:
result = pandas.Series(dtype=np.int64)
nonnumeric = [
col
for col, dtype in zip(self.columns, self.dtypes)
if not is_numeric_dtype(dtype)
]
if len(nonnumeric) == len(self.columns):
# If over rows and no numeric columns, return this
if axis:
result = pandas.Series([np.nan for _ in self.index])
else:
result = pandas.Series([0 for _ in self.index])
else:
query_compiler = self.drop(columns=nonnumeric)
return result, query_compiler | python | def numeric_function_clean_dataframe(self, axis):
"""Preprocesses numeric functions to clean dataframe and pick numeric indices.
Args:
axis: '0' if columns and '1' if rows.
Returns:
Tuple with return value(if any), indices to apply func to & cleaned Manager.
"""
result = None
query_compiler = self
# If no numeric columns and over columns, then return empty Series
if not axis and len(self.index) == 0:
result = pandas.Series(dtype=np.int64)
nonnumeric = [
col
for col, dtype in zip(self.columns, self.dtypes)
if not is_numeric_dtype(dtype)
]
if len(nonnumeric) == len(self.columns):
# If over rows and no numeric columns, return this
if axis:
result = pandas.Series([np.nan for _ in self.index])
else:
result = pandas.Series([0 for _ in self.index])
else:
query_compiler = self.drop(columns=nonnumeric)
return result, query_compiler | ['def', 'numeric_function_clean_dataframe', '(', 'self', ',', 'axis', ')', ':', 'result', '=', 'None', 'query_compiler', '=', 'self', '# If no numeric columns and over columns, then return empty Series', 'if', 'not', 'axis', 'and', 'len', '(', 'self', '.', 'index', ')', '==', '0', ':', 'result', '=', 'pandas', '.', 'Series', '(', 'dtype', '=', 'np', '.', 'int64', ')', 'nonnumeric', '=', '[', 'col', 'for', 'col', ',', 'dtype', 'in', 'zip', '(', 'self', '.', 'columns', ',', 'self', '.', 'dtypes', ')', 'if', 'not', 'is_numeric_dtype', '(', 'dtype', ')', ']', 'if', 'len', '(', 'nonnumeric', ')', '==', 'len', '(', 'self', '.', 'columns', ')', ':', '# If over rows and no numeric columns, return this', 'if', 'axis', ':', 'result', '=', 'pandas', '.', 'Series', '(', '[', 'np', '.', 'nan', 'for', '_', 'in', 'self', '.', 'index', ']', ')', 'else', ':', 'result', '=', 'pandas', '.', 'Series', '(', '[', '0', 'for', '_', 'in', 'self', '.', 'index', ']', ')', 'else', ':', 'query_compiler', '=', 'self', '.', 'drop', '(', 'columns', '=', 'nonnumeric', ')', 'return', 'result', ',', 'query_compiler'] | Preprocesses numeric functions to clean dataframe and pick numeric indices.
Args:
axis: '0' if columns and '1' if rows.
Returns:
Tuple with return value(if any), indices to apply func to & cleaned Manager. | ['Preprocesses', 'numeric', 'functions', 'to', 'clean', 'dataframe', 'and', 'pick', 'numeric', 'indices', '.'] | train | https://github.com/modin-project/modin/blob/5b77d242596560c646b8405340c9ce64acb183cb/modin/backends/pandas/query_compiler.py#L181-L209 |
641 | pmacosta/pmisc | pmisc/member.py | isreal | def isreal(obj):
"""
Test if the argument is a real number (float or integer).
:param obj: Object
:type obj: any
:rtype: boolean
"""
return (
(obj is not None)
and (not isinstance(obj, bool))
and isinstance(obj, (int, float))
) | python | def isreal(obj):
"""
Test if the argument is a real number (float or integer).
:param obj: Object
:type obj: any
:rtype: boolean
"""
return (
(obj is not None)
and (not isinstance(obj, bool))
and isinstance(obj, (int, float))
) | ['def', 'isreal', '(', 'obj', ')', ':', 'return', '(', '(', 'obj', 'is', 'not', 'None', ')', 'and', '(', 'not', 'isinstance', '(', 'obj', ',', 'bool', ')', ')', 'and', 'isinstance', '(', 'obj', ',', '(', 'int', ',', 'float', ')', ')', ')'] | Test if the argument is a real number (float or integer).
:param obj: Object
:type obj: any
:rtype: boolean | ['Test', 'if', 'the', 'argument', 'is', 'a', 'real', 'number', '(', 'float', 'or', 'integer', ')', '.'] | train | https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/member.py#L84-L97 |
642 | twilio/twilio-python | twilio/rest/api/v2010/account/__init__.py | AccountContext.new_keys | def new_keys(self):
"""
Access the new_keys
:returns: twilio.rest.api.v2010.account.new_key.NewKeyList
:rtype: twilio.rest.api.v2010.account.new_key.NewKeyList
"""
if self._new_keys is None:
self._new_keys = NewKeyList(self._version, account_sid=self._solution['sid'], )
return self._new_keys | python | def new_keys(self):
"""
Access the new_keys
:returns: twilio.rest.api.v2010.account.new_key.NewKeyList
:rtype: twilio.rest.api.v2010.account.new_key.NewKeyList
"""
if self._new_keys is None:
self._new_keys = NewKeyList(self._version, account_sid=self._solution['sid'], )
return self._new_keys | ['def', 'new_keys', '(', 'self', ')', ':', 'if', 'self', '.', '_new_keys', 'is', 'None', ':', 'self', '.', '_new_keys', '=', 'NewKeyList', '(', 'self', '.', '_version', ',', 'account_sid', '=', 'self', '.', '_solution', '[', "'sid'", ']', ',', ')', 'return', 'self', '.', '_new_keys'] | Access the new_keys
:returns: twilio.rest.api.v2010.account.new_key.NewKeyList
:rtype: twilio.rest.api.v2010.account.new_key.NewKeyList | ['Access', 'the', 'new_keys'] | train | https://github.com/twilio/twilio-python/blob/c867895f55dcc29f522e6e8b8868d0d18483132f/twilio/rest/api/v2010/account/__init__.py#L474-L483 |
643 | ewels/MultiQC | multiqc/modules/bbmap/bbmap.py | MultiqcModule.make_basic_table | def make_basic_table(self, file_type):
""" Create table of key-value items in 'file_type'.
"""
table_data = {sample: items['kv']
for sample, items
in self.mod_data[file_type].items()
}
table_headers = {}
for column_header, (description, header_options) in file_types[file_type]['kv_descriptions'].items():
table_headers[column_header] = {
'rid': '{}_{}_bbmstheader'.format(file_type, column_header),
'title': column_header,
'description': description,
}
table_headers[column_header].update(header_options)
tconfig = {
'id': file_type + '_bbm_table',
'namespace': 'BBTools'
}
for sample in table_data:
for key, value in table_data[sample].items():
try:
table_data[sample][key] = float(value)
except ValueError:
pass
return table.plot(table_data, table_headers, tconfig) | python | def make_basic_table(self, file_type):
""" Create table of key-value items in 'file_type'.
"""
table_data = {sample: items['kv']
for sample, items
in self.mod_data[file_type].items()
}
table_headers = {}
for column_header, (description, header_options) in file_types[file_type]['kv_descriptions'].items():
table_headers[column_header] = {
'rid': '{}_{}_bbmstheader'.format(file_type, column_header),
'title': column_header,
'description': description,
}
table_headers[column_header].update(header_options)
tconfig = {
'id': file_type + '_bbm_table',
'namespace': 'BBTools'
}
for sample in table_data:
for key, value in table_data[sample].items():
try:
table_data[sample][key] = float(value)
except ValueError:
pass
return table.plot(table_data, table_headers, tconfig) | ['def', 'make_basic_table', '(', 'self', ',', 'file_type', ')', ':', 'table_data', '=', '{', 'sample', ':', 'items', '[', "'kv'", ']', 'for', 'sample', ',', 'items', 'in', 'self', '.', 'mod_data', '[', 'file_type', ']', '.', 'items', '(', ')', '}', 'table_headers', '=', '{', '}', 'for', 'column_header', ',', '(', 'description', ',', 'header_options', ')', 'in', 'file_types', '[', 'file_type', ']', '[', "'kv_descriptions'", ']', '.', 'items', '(', ')', ':', 'table_headers', '[', 'column_header', ']', '=', '{', "'rid'", ':', "'{}_{}_bbmstheader'", '.', 'format', '(', 'file_type', ',', 'column_header', ')', ',', "'title'", ':', 'column_header', ',', "'description'", ':', 'description', ',', '}', 'table_headers', '[', 'column_header', ']', '.', 'update', '(', 'header_options', ')', 'tconfig', '=', '{', "'id'", ':', 'file_type', '+', "'_bbm_table'", ',', "'namespace'", ':', "'BBTools'", '}', 'for', 'sample', 'in', 'table_data', ':', 'for', 'key', ',', 'value', 'in', 'table_data', '[', 'sample', ']', '.', 'items', '(', ')', ':', 'try', ':', 'table_data', '[', 'sample', ']', '[', 'key', ']', '=', 'float', '(', 'value', ')', 'except', 'ValueError', ':', 'pass', 'return', 'table', '.', 'plot', '(', 'table_data', ',', 'table_headers', ',', 'tconfig', ')'] | Create table of key-value items in 'file_type'. | ['Create', 'table', 'of', 'key', '-', 'value', 'items', 'in', 'file_type', '.'] | train | https://github.com/ewels/MultiQC/blob/2037d6322b2554146a74efbf869156ad20d4c4ec/multiqc/modules/bbmap/bbmap.py#L165-L192 |
644 | saltstack/salt | salt/modules/systemd_service.py | _check_unmask | def _check_unmask(name, unmask, unmask_runtime, root=None):
'''
Common code for conditionally removing masks before making changes to a
service's state.
'''
if unmask:
unmask_(name, runtime=False, root=root)
if unmask_runtime:
unmask_(name, runtime=True, root=root) | python | def _check_unmask(name, unmask, unmask_runtime, root=None):
'''
Common code for conditionally removing masks before making changes to a
service's state.
'''
if unmask:
unmask_(name, runtime=False, root=root)
if unmask_runtime:
unmask_(name, runtime=True, root=root) | ['def', '_check_unmask', '(', 'name', ',', 'unmask', ',', 'unmask_runtime', ',', 'root', '=', 'None', ')', ':', 'if', 'unmask', ':', 'unmask_', '(', 'name', ',', 'runtime', '=', 'False', ',', 'root', '=', 'root', ')', 'if', 'unmask_runtime', ':', 'unmask_', '(', 'name', ',', 'runtime', '=', 'True', ',', 'root', '=', 'root', ')'] | Common code for conditionally removing masks before making changes to a
service's state. | ['Common', 'code', 'for', 'conditionally', 'removing', 'masks', 'before', 'making', 'changes', 'to', 'a', 'service', 's', 'state', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/systemd_service.py#L141-L149 |
645 | ArtoLabs/SimpleSteem | simplesteem/util.py | Util.minutes_back | def minutes_back(self, date):
''' Gives a number (integer) of days
since a given date
'''
elapsed = (datetime.utcnow() - datetime.strptime(date,'%Y-%m-%dT%H:%M:%S'))
if elapsed.days > 0:
secondsback = (elapsed.days * 24 * 60 * 60) + elapsed.seconds
else:
secondsback = elapsed.seconds
minutesback = secondsback / 60
return int(minutesback) | python | def minutes_back(self, date):
''' Gives a number (integer) of days
since a given date
'''
elapsed = (datetime.utcnow() - datetime.strptime(date,'%Y-%m-%dT%H:%M:%S'))
if elapsed.days > 0:
secondsback = (elapsed.days * 24 * 60 * 60) + elapsed.seconds
else:
secondsback = elapsed.seconds
minutesback = secondsback / 60
return int(minutesback) | ['def', 'minutes_back', '(', 'self', ',', 'date', ')', ':', 'elapsed', '=', '(', 'datetime', '.', 'utcnow', '(', ')', '-', 'datetime', '.', 'strptime', '(', 'date', ',', "'%Y-%m-%dT%H:%M:%S'", ')', ')', 'if', 'elapsed', '.', 'days', '>', '0', ':', 'secondsback', '=', '(', 'elapsed', '.', 'days', '*', '24', '*', '60', '*', '60', ')', '+', 'elapsed', '.', 'seconds', 'else', ':', 'secondsback', '=', 'elapsed', '.', 'seconds', 'minutesback', '=', 'secondsback', '/', '60', 'return', 'int', '(', 'minutesback', ')'] | Gives a number (integer) of days
since a given date | ['Gives', 'a', 'number', '(', 'integer', ')', 'of', 'days', 'since', 'a', 'given', 'date'] | train | https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L87-L97 |
646 | fulfilio/fulfil-python-api | fulfil_client/client.py | AsyncResult.refresh_if_needed | def refresh_if_needed(self):
"""
Refresh the status of the task from server if required.
"""
if self.state in (self.PENDING, self.STARTED):
try:
response, = self._fetch_result()['tasks']
except (KeyError, ValueError):
raise Exception(
"Unable to find results for task."
)
if 'error' in response:
self.state == self.FAILURE
raise ServerError(response['error'])
if 'state' in response:
self.state = response['state']
self.result = response['result'] | python | def refresh_if_needed(self):
"""
Refresh the status of the task from server if required.
"""
if self.state in (self.PENDING, self.STARTED):
try:
response, = self._fetch_result()['tasks']
except (KeyError, ValueError):
raise Exception(
"Unable to find results for task."
)
if 'error' in response:
self.state == self.FAILURE
raise ServerError(response['error'])
if 'state' in response:
self.state = response['state']
self.result = response['result'] | ['def', 'refresh_if_needed', '(', 'self', ')', ':', 'if', 'self', '.', 'state', 'in', '(', 'self', '.', 'PENDING', ',', 'self', '.', 'STARTED', ')', ':', 'try', ':', 'response', ',', '=', 'self', '.', '_fetch_result', '(', ')', '[', "'tasks'", ']', 'except', '(', 'KeyError', ',', 'ValueError', ')', ':', 'raise', 'Exception', '(', '"Unable to find results for task."', ')', 'if', "'error'", 'in', 'response', ':', 'self', '.', 'state', '==', 'self', '.', 'FAILURE', 'raise', 'ServerError', '(', 'response', '[', "'error'", ']', ')', 'if', "'state'", 'in', 'response', ':', 'self', '.', 'state', '=', 'response', '[', "'state'", ']', 'self', '.', 'result', '=', 'response', '[', "'result'", ']'] | Refresh the status of the task from server if required. | ['Refresh', 'the', 'status', 'of', 'the', 'task', 'from', 'server', 'if', 'required', '.'] | train | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L572-L590 |
647 | elapouya/python-textops | textops/base.py | eformat | def eformat(format_str,lst,dct,defvalue='-'):
""" Formats a list and a dictionary, manages unkown keys
It works like :meth:`string.Formatter.vformat` except that it accepts a defvalue for not matching keys.
Defvalue can be a callable that will receive the requested key as argument and return a string
Args:
format_string (str): Same format string as for :meth:`str.format`
lst (dict) : the list to format
dct (dict) : the dict to format
defvalue (str or callable): the default value to display when the data is not in the dict
Examples:
>>> d = {'count': '32591', 'soft': 'textops'}
>>> l = ['Eric','Guido']
>>> eformat('{0} => {soft} : {count} dowloads',l,d)
'Eric => textops : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,'N/A')
'N/A => N/A : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,lambda k:'unknown_tag_%s' % k)
'unknown_tag_2 => unknown_tag_software : 32591 dowloads'
"""
return vformat(format_str,DefaultList(defvalue,lst),DefaultDict(defvalue,dct)) | python | def eformat(format_str,lst,dct,defvalue='-'):
""" Formats a list and a dictionary, manages unkown keys
It works like :meth:`string.Formatter.vformat` except that it accepts a defvalue for not matching keys.
Defvalue can be a callable that will receive the requested key as argument and return a string
Args:
format_string (str): Same format string as for :meth:`str.format`
lst (dict) : the list to format
dct (dict) : the dict to format
defvalue (str or callable): the default value to display when the data is not in the dict
Examples:
>>> d = {'count': '32591', 'soft': 'textops'}
>>> l = ['Eric','Guido']
>>> eformat('{0} => {soft} : {count} dowloads',l,d)
'Eric => textops : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,'N/A')
'N/A => N/A : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,lambda k:'unknown_tag_%s' % k)
'unknown_tag_2 => unknown_tag_software : 32591 dowloads'
"""
return vformat(format_str,DefaultList(defvalue,lst),DefaultDict(defvalue,dct)) | ['def', 'eformat', '(', 'format_str', ',', 'lst', ',', 'dct', ',', 'defvalue', '=', "'-'", ')', ':', 'return', 'vformat', '(', 'format_str', ',', 'DefaultList', '(', 'defvalue', ',', 'lst', ')', ',', 'DefaultDict', '(', 'defvalue', ',', 'dct', ')', ')'] | Formats a list and a dictionary, manages unkown keys
It works like :meth:`string.Formatter.vformat` except that it accepts a defvalue for not matching keys.
Defvalue can be a callable that will receive the requested key as argument and return a string
Args:
format_string (str): Same format string as for :meth:`str.format`
lst (dict) : the list to format
dct (dict) : the dict to format
defvalue (str or callable): the default value to display when the data is not in the dict
Examples:
>>> d = {'count': '32591', 'soft': 'textops'}
>>> l = ['Eric','Guido']
>>> eformat('{0} => {soft} : {count} dowloads',l,d)
'Eric => textops : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,'N/A')
'N/A => N/A : 32591 dowloads'
>>> eformat('{2} => {software} : {count} dowloads',l,d,lambda k:'unknown_tag_%s' % k)
'unknown_tag_2 => unknown_tag_software : 32591 dowloads' | ['Formats', 'a', 'list', 'and', 'a', 'dictionary', 'manages', 'unkown', 'keys'] | train | https://github.com/elapouya/python-textops/blob/5c63b9074a1acd8dd108725f1b370f6684c941ef/textops/base.py#L1035-L1058 |
648 | saltstack/salt | salt/modules/mysql.py | verify_login | def verify_login(user, password=None, **connection_args):
'''
Attempt to login using the provided credentials.
If successful, return true. Otherwise, return False.
CLI Example:
.. code-block:: bash
salt '*' mysql.verify_login root password
'''
# Override the connection args for username and password
connection_args['connection_user'] = user
connection_args['connection_pass'] = password
dbc = _connect(**connection_args)
if dbc is None:
# Clear the mysql.error if unable to connect
# if the connection fails, we simply return False
if 'mysql.error' in __context__:
del __context__['mysql.error']
return False
return True | python | def verify_login(user, password=None, **connection_args):
'''
Attempt to login using the provided credentials.
If successful, return true. Otherwise, return False.
CLI Example:
.. code-block:: bash
salt '*' mysql.verify_login root password
'''
# Override the connection args for username and password
connection_args['connection_user'] = user
connection_args['connection_pass'] = password
dbc = _connect(**connection_args)
if dbc is None:
# Clear the mysql.error if unable to connect
# if the connection fails, we simply return False
if 'mysql.error' in __context__:
del __context__['mysql.error']
return False
return True | ['def', 'verify_login', '(', 'user', ',', 'password', '=', 'None', ',', '*', '*', 'connection_args', ')', ':', '# Override the connection args for username and password', 'connection_args', '[', "'connection_user'", ']', '=', 'user', 'connection_args', '[', "'connection_pass'", ']', '=', 'password', 'dbc', '=', '_connect', '(', '*', '*', 'connection_args', ')', 'if', 'dbc', 'is', 'None', ':', '# Clear the mysql.error if unable to connect', '# if the connection fails, we simply return False', 'if', "'mysql.error'", 'in', '__context__', ':', 'del', '__context__', '[', "'mysql.error'", ']', 'return', 'False', 'return', 'True'] | Attempt to login using the provided credentials.
If successful, return true. Otherwise, return False.
CLI Example:
.. code-block:: bash
salt '*' mysql.verify_login root password | ['Attempt', 'to', 'login', 'using', 'the', 'provided', 'credentials', '.', 'If', 'successful', 'return', 'true', '.', 'Otherwise', 'return', 'False', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/mysql.py#L2336-L2358 |
649 | tgbugs/pyontutils | neurondm/neurondm/lang.py | config | def config(remote_base= 'https://raw.githubusercontent.com/SciCrunch/NIF-Ontology/',
local_base= None, # devconfig.ontology_local_repo by default
branch= devconfig.neurons_branch,
core_graph_paths= ['ttl/phenotype-core.ttl',
'ttl/phenotypes.ttl'],
core_graph= None,
in_graph_paths= tuple(),
out_graph_path= '/tmp/_Neurons.ttl',
out_imports= ['ttl/phenotype-core.ttl'],
out_graph= None,
prefixes= tuple(),
force_remote= False,
checkout_ok= ont_checkout_ok,
scigraph= None, # defaults to devconfig.scigraph_api
iri= None,
sources= tuple(),
source_file= None,
use_local_import_paths=True,
ignore_existing= True):
""" Wraps graphBase.configGraphIO to provide a set of sane defaults
for input ontologies and output files. """
graphBase.configGraphIO(remote_base=remote_base,
local_base=local_base,
branch=branch,
core_graph_paths=core_graph_paths,
core_graph=core_graph,
in_graph_paths=in_graph_paths,
out_graph_path=out_graph_path,
out_imports=out_imports,
out_graph=out_graph,
prefixes=prefixes,
force_remote=force_remote,
checkout_ok=checkout_ok,
scigraph=scigraph,
iri=iri,
sources=sources,
source_file=source_file,
use_local_import_paths=use_local_import_paths,
ignore_existing=ignore_existing)
pred = graphBase._predicates
return pred | python | def config(remote_base= 'https://raw.githubusercontent.com/SciCrunch/NIF-Ontology/',
local_base= None, # devconfig.ontology_local_repo by default
branch= devconfig.neurons_branch,
core_graph_paths= ['ttl/phenotype-core.ttl',
'ttl/phenotypes.ttl'],
core_graph= None,
in_graph_paths= tuple(),
out_graph_path= '/tmp/_Neurons.ttl',
out_imports= ['ttl/phenotype-core.ttl'],
out_graph= None,
prefixes= tuple(),
force_remote= False,
checkout_ok= ont_checkout_ok,
scigraph= None, # defaults to devconfig.scigraph_api
iri= None,
sources= tuple(),
source_file= None,
use_local_import_paths=True,
ignore_existing= True):
""" Wraps graphBase.configGraphIO to provide a set of sane defaults
for input ontologies and output files. """
graphBase.configGraphIO(remote_base=remote_base,
local_base=local_base,
branch=branch,
core_graph_paths=core_graph_paths,
core_graph=core_graph,
in_graph_paths=in_graph_paths,
out_graph_path=out_graph_path,
out_imports=out_imports,
out_graph=out_graph,
prefixes=prefixes,
force_remote=force_remote,
checkout_ok=checkout_ok,
scigraph=scigraph,
iri=iri,
sources=sources,
source_file=source_file,
use_local_import_paths=use_local_import_paths,
ignore_existing=ignore_existing)
pred = graphBase._predicates
return pred | ['def', 'config', '(', 'remote_base', '=', "'https://raw.githubusercontent.com/SciCrunch/NIF-Ontology/'", ',', 'local_base', '=', 'None', ',', '# devconfig.ontology_local_repo by default', 'branch', '=', 'devconfig', '.', 'neurons_branch', ',', 'core_graph_paths', '=', '[', "'ttl/phenotype-core.ttl'", ',', "'ttl/phenotypes.ttl'", ']', ',', 'core_graph', '=', 'None', ',', 'in_graph_paths', '=', 'tuple', '(', ')', ',', 'out_graph_path', '=', "'/tmp/_Neurons.ttl'", ',', 'out_imports', '=', '[', "'ttl/phenotype-core.ttl'", ']', ',', 'out_graph', '=', 'None', ',', 'prefixes', '=', 'tuple', '(', ')', ',', 'force_remote', '=', 'False', ',', 'checkout_ok', '=', 'ont_checkout_ok', ',', 'scigraph', '=', 'None', ',', '# defaults to devconfig.scigraph_api', 'iri', '=', 'None', ',', 'sources', '=', 'tuple', '(', ')', ',', 'source_file', '=', 'None', ',', 'use_local_import_paths', '=', 'True', ',', 'ignore_existing', '=', 'True', ')', ':', 'graphBase', '.', 'configGraphIO', '(', 'remote_base', '=', 'remote_base', ',', 'local_base', '=', 'local_base', ',', 'branch', '=', 'branch', ',', 'core_graph_paths', '=', 'core_graph_paths', ',', 'core_graph', '=', 'core_graph', ',', 'in_graph_paths', '=', 'in_graph_paths', ',', 'out_graph_path', '=', 'out_graph_path', ',', 'out_imports', '=', 'out_imports', ',', 'out_graph', '=', 'out_graph', ',', 'prefixes', '=', 'prefixes', ',', 'force_remote', '=', 'force_remote', ',', 'checkout_ok', '=', 'checkout_ok', ',', 'scigraph', '=', 'scigraph', ',', 'iri', '=', 'iri', ',', 'sources', '=', 'sources', ',', 'source_file', '=', 'source_file', ',', 'use_local_import_paths', '=', 'use_local_import_paths', ',', 'ignore_existing', '=', 'ignore_existing', ')', 'pred', '=', 'graphBase', '.', '_predicates', 'return', 'pred'] | Wraps graphBase.configGraphIO to provide a set of sane defaults
for input ontologies and output files. | ['Wraps', 'graphBase', '.', 'configGraphIO', 'to', 'provide', 'a', 'set', 'of', 'sane', 'defaults', 'for', 'input', 'ontologies', 'and', 'output', 'files', '.'] | train | https://github.com/tgbugs/pyontutils/blob/3d913db29c177db39151592909a4f56170ef8b35/neurondm/neurondm/lang.py#L34-L75 |
650 | gabstopper/smc-python | smc/base/collection.py | ElementCollection.filter | def filter(self, *filter, **kw): # @ReservedAssignment
"""
Filter results for specific element type.
keyword arguments can be used to specify a match against the
elements attribute directly. It's important to note that if the
search filter contains a / or -, the SMC will only search the
name and comment fields. Otherwise other key fields of an element
are searched. In addition, SMC searches are a 'contains' search
meaning you may return more results than wanted. Use a key word
argument to specify the elements attribute and value expected.
::
>>> list(Router.objects.filter('10.10.10.1'))
[Router(name=Router-110.10.10.10), Router(name=Router-10.10.10.10), Router(name=Router-10.10.10.1)]
>>> list(Router.objects.filter(address='10.10.10.1'))
[Router(name=Router-10.10.10.1)]
:param str filter: any parameter to attempt to match on.
For example, if this is a service, you could match on service name
'http' or ports of interest, '80'.
:param bool exact_match: Can be passed as a keyword arg. Specifies whether
the match needs to be exact or not (default: False)
:param bool case_sensitive: Can be passed as a keyword arg. Specifies
whether the match is case sensitive or not. (default: True)
:param kw: keyword args can specify an attribute=value to use as an
exact match against the elements attribute.
:return: :class:`.ElementCollection`
"""
iexact = None
if filter:
_filter = filter[0]
exact_match = kw.pop('exact_match', False)
case_sensitive = kw.pop('case_sensitive', True)
if kw:
_, value = next(iter(kw.items()))
_filter = value
iexact = kw
# Only strip metachars from network and address range
if not exact_match and self._params.get('filter_context', {})\
in ('network', 'address_range', 'network_elements'):
_filter = _strip_metachars(_filter)
return self._clone(
filter=_filter,
iexact=iexact,
exact_match=exact_match,
case_sensitive=case_sensitive) | python | def filter(self, *filter, **kw): # @ReservedAssignment
"""
Filter results for specific element type.
keyword arguments can be used to specify a match against the
elements attribute directly. It's important to note that if the
search filter contains a / or -, the SMC will only search the
name and comment fields. Otherwise other key fields of an element
are searched. In addition, SMC searches are a 'contains' search
meaning you may return more results than wanted. Use a key word
argument to specify the elements attribute and value expected.
::
>>> list(Router.objects.filter('10.10.10.1'))
[Router(name=Router-110.10.10.10), Router(name=Router-10.10.10.10), Router(name=Router-10.10.10.1)]
>>> list(Router.objects.filter(address='10.10.10.1'))
[Router(name=Router-10.10.10.1)]
:param str filter: any parameter to attempt to match on.
For example, if this is a service, you could match on service name
'http' or ports of interest, '80'.
:param bool exact_match: Can be passed as a keyword arg. Specifies whether
the match needs to be exact or not (default: False)
:param bool case_sensitive: Can be passed as a keyword arg. Specifies
whether the match is case sensitive or not. (default: True)
:param kw: keyword args can specify an attribute=value to use as an
exact match against the elements attribute.
:return: :class:`.ElementCollection`
"""
iexact = None
if filter:
_filter = filter[0]
exact_match = kw.pop('exact_match', False)
case_sensitive = kw.pop('case_sensitive', True)
if kw:
_, value = next(iter(kw.items()))
_filter = value
iexact = kw
# Only strip metachars from network and address range
if not exact_match and self._params.get('filter_context', {})\
in ('network', 'address_range', 'network_elements'):
_filter = _strip_metachars(_filter)
return self._clone(
filter=_filter,
iexact=iexact,
exact_match=exact_match,
case_sensitive=case_sensitive) | ['def', 'filter', '(', 'self', ',', '*', 'filter', ',', '*', '*', 'kw', ')', ':', '# @ReservedAssignment', 'iexact', '=', 'None', 'if', 'filter', ':', '_filter', '=', 'filter', '[', '0', ']', 'exact_match', '=', 'kw', '.', 'pop', '(', "'exact_match'", ',', 'False', ')', 'case_sensitive', '=', 'kw', '.', 'pop', '(', "'case_sensitive'", ',', 'True', ')', 'if', 'kw', ':', '_', ',', 'value', '=', 'next', '(', 'iter', '(', 'kw', '.', 'items', '(', ')', ')', ')', '_filter', '=', 'value', 'iexact', '=', 'kw', '# Only strip metachars from network and address range', 'if', 'not', 'exact_match', 'and', 'self', '.', '_params', '.', 'get', '(', "'filter_context'", ',', '{', '}', ')', 'in', '(', "'network'", ',', "'address_range'", ',', "'network_elements'", ')', ':', '_filter', '=', '_strip_metachars', '(', '_filter', ')', 'return', 'self', '.', '_clone', '(', 'filter', '=', '_filter', ',', 'iexact', '=', 'iexact', ',', 'exact_match', '=', 'exact_match', ',', 'case_sensitive', '=', 'case_sensitive', ')'] | Filter results for specific element type.
keyword arguments can be used to specify a match against the
elements attribute directly. It's important to note that if the
search filter contains a / or -, the SMC will only search the
name and comment fields. Otherwise other key fields of an element
are searched. In addition, SMC searches are a 'contains' search
meaning you may return more results than wanted. Use a key word
argument to specify the elements attribute and value expected.
::
>>> list(Router.objects.filter('10.10.10.1'))
[Router(name=Router-110.10.10.10), Router(name=Router-10.10.10.10), Router(name=Router-10.10.10.1)]
>>> list(Router.objects.filter(address='10.10.10.1'))
[Router(name=Router-10.10.10.1)]
:param str filter: any parameter to attempt to match on.
For example, if this is a service, you could match on service name
'http' or ports of interest, '80'.
:param bool exact_match: Can be passed as a keyword arg. Specifies whether
the match needs to be exact or not (default: False)
:param bool case_sensitive: Can be passed as a keyword arg. Specifies
whether the match is case sensitive or not. (default: True)
:param kw: keyword args can specify an attribute=value to use as an
exact match against the elements attribute.
:return: :class:`.ElementCollection` | ['Filter', 'results', 'for', 'specific', 'element', 'type', '.', 'keyword', 'arguments', 'can', 'be', 'used', 'to', 'specify', 'a', 'match', 'against', 'the', 'elements', 'attribute', 'directly', '.', 'It', 's', 'important', 'to', 'note', 'that', 'if', 'the', 'search', 'filter', 'contains', 'a', '/', 'or', '-', 'the', 'SMC', 'will', 'only', 'search', 'the', 'name', 'and', 'comment', 'fields', '.', 'Otherwise', 'other', 'key', 'fields', 'of', 'an', 'element', 'are', 'searched', '.', 'In', 'addition', 'SMC', 'searches', 'are', 'a', 'contains', 'search', 'meaning', 'you', 'may', 'return', 'more', 'results', 'than', 'wanted', '.', 'Use', 'a', 'key', 'word', 'argument', 'to', 'specify', 'the', 'elements', 'attribute', 'and', 'value', 'expected', '.', '::', '>>>', 'list', '(', 'Router', '.', 'objects', '.', 'filter', '(', '10', '.', '10', '.', '10', '.', '1', '))', '[', 'Router', '(', 'name', '=', 'Router', '-', '110', '.', '10', '.', '10', '.', '10', ')', 'Router', '(', 'name', '=', 'Router', '-', '10', '.', '10', '.', '10', '.', '10', ')', 'Router', '(', 'name', '=', 'Router', '-', '10', '.', '10', '.', '10', '.', '1', ')', ']', '>>>', 'list', '(', 'Router', '.', 'objects', '.', 'filter', '(', 'address', '=', '10', '.', '10', '.', '10', '.', '1', '))', '[', 'Router', '(', 'name', '=', 'Router', '-', '10', '.', '10', '.', '10', '.', '1', ')', ']', ':', 'param', 'str', 'filter', ':', 'any', 'parameter', 'to', 'attempt', 'to', 'match', 'on', '.', 'For', 'example', 'if', 'this', 'is', 'a', 'service', 'you', 'could', 'match', 'on', 'service', 'name', 'http', 'or', 'ports', 'of', 'interest', '80', '.', ':', 'param', 'bool', 'exact_match', ':', 'Can', 'be', 'passed', 'as', 'a', 'keyword', 'arg', '.', 'Specifies', 'whether', 'the', 'match', 'needs', 'to', 'be', 'exact', 'or', 'not', '(', 'default', ':', 'False', ')', ':', 'param', 'bool', 'case_sensitive', ':', 'Can', 'be', 'passed', 'as', 'a', 'keyword', 'arg', '.', 'Specifies', 'whether', 'the', 'match', 'is', 'case', 'sensitive', 'or', 'not', '.', '(', 'default', ':', 'True', ')', ':', 'param', 'kw', ':', 'keyword', 'args', 'can', 'specify', 'an', 'attribute', '=', 'value', 'to', 'use', 'as', 'an', 'exact', 'match', 'against', 'the', 'elements', 'attribute', '.', ':', 'return', ':', ':', 'class', ':', '.', 'ElementCollection'] | train | https://github.com/gabstopper/smc-python/blob/e027b8a5dcfaf884eada32d113d41c1e56b32457/smc/base/collection.py#L472-L522 |
651 | hardbyte/python-can | can/io/sqlite.py | SqliteWriter.stop | def stop(self):
"""Stops the reader an writes all remaining messages to the database. Thus, this
might take a while and block.
"""
BufferedReader.stop(self)
self._stop_running_event.set()
self._writer_thread.join()
BaseIOHandler.stop(self) | python | def stop(self):
"""Stops the reader an writes all remaining messages to the database. Thus, this
might take a while and block.
"""
BufferedReader.stop(self)
self._stop_running_event.set()
self._writer_thread.join()
BaseIOHandler.stop(self) | ['def', 'stop', '(', 'self', ')', ':', 'BufferedReader', '.', 'stop', '(', 'self', ')', 'self', '.', '_stop_running_event', '.', 'set', '(', ')', 'self', '.', '_writer_thread', '.', 'join', '(', ')', 'BaseIOHandler', '.', 'stop', '(', 'self', ')'] | Stops the reader an writes all remaining messages to the database. Thus, this
might take a while and block. | ['Stops', 'the', 'reader', 'an', 'writes', 'all', 'remaining', 'messages', 'to', 'the', 'database', '.', 'Thus', 'this', 'might', 'take', 'a', 'while', 'and', 'block', '.'] | train | https://github.com/hardbyte/python-can/blob/cdc5254d96072df7739263623f3e920628a7d214/can/io/sqlite.py#L229-L236 |
652 | django-fluent/django-fluent-comments | fluent_comments/__init__.py | get_form | def get_form():
"""
Return the form to use for commenting.
"""
global form_class
from fluent_comments import appsettings
if form_class is None:
if appsettings.FLUENT_COMMENTS_FORM_CLASS:
from django.utils.module_loading import import_string
form_class = import_string(appsettings.FLUENT_COMMENTS_FORM_CLASS)
else:
from fluent_comments.forms import FluentCommentForm
form_class = FluentCommentForm
return form_class | python | def get_form():
"""
Return the form to use for commenting.
"""
global form_class
from fluent_comments import appsettings
if form_class is None:
if appsettings.FLUENT_COMMENTS_FORM_CLASS:
from django.utils.module_loading import import_string
form_class = import_string(appsettings.FLUENT_COMMENTS_FORM_CLASS)
else:
from fluent_comments.forms import FluentCommentForm
form_class = FluentCommentForm
return form_class | ['def', 'get_form', '(', ')', ':', 'global', 'form_class', 'from', 'fluent_comments', 'import', 'appsettings', 'if', 'form_class', 'is', 'None', ':', 'if', 'appsettings', '.', 'FLUENT_COMMENTS_FORM_CLASS', ':', 'from', 'django', '.', 'utils', '.', 'module_loading', 'import', 'import_string', 'form_class', '=', 'import_string', '(', 'appsettings', '.', 'FLUENT_COMMENTS_FORM_CLASS', ')', 'else', ':', 'from', 'fluent_comments', '.', 'forms', 'import', 'FluentCommentForm', 'form_class', '=', 'FluentCommentForm', 'return', 'form_class'] | Return the form to use for commenting. | ['Return', 'the', 'form', 'to', 'use', 'for', 'commenting', '.'] | train | https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/__init__.py#L26-L40 |
653 | openclimatedata/pymagicc | pymagicc/definitions/__init__.py | get_magicc6_to_magicc7_variable_mapping | def get_magicc6_to_magicc7_variable_mapping(inverse=False):
"""Get the mappings from MAGICC6 to MAGICC7 variables.
Note that this mapping is not one to one. For example, "HFC4310", "HFC43-10" and
"HFC-43-10" in MAGICC6 both map to "HFC4310" in MAGICC7 but "HFC4310" in
MAGICC7 maps back to "HFC4310".
Note that HFC-245fa was mistakenly labelled as HFC-245ca in MAGICC6. In reality,
they are not the same thing. However, the MAGICC6 labelling was merely a typo so
the mapping between the two is one-to-one.
Parameters
----------
inverse : bool
If True, return the inverse mappings i.e. MAGICC7 to MAGICC6 mappings
Returns
-------
dict
Dictionary of mappings
"""
# we generate the mapping dynamically, the first name in the list
# is the one which will be used for inverse mappings
magicc6_simple_mapping_vars = [
"KYOTO-CO2EQ",
"CO2I",
"CO2B",
"CH4",
"N2O",
"BC",
"OC",
"SOx",
"NOx",
"NMVOC",
"CO",
"SF6",
"NH3",
"CF4",
"C2F6",
"HFC4310",
"HFC43-10",
"HFC-43-10",
"HFC4310",
"HFC134a",
"HFC143a",
"HFC227ea",
"CCl4",
"CH3CCl3",
"HFC245fa",
"Halon 1211",
"Halon 1202",
"Halon 1301",
"Halon 2402",
"Halon1211",
"Halon1202",
"Halon1301",
"Halon2402",
"CH3Br",
"CH3Cl",
"C6F14",
]
magicc6_sometimes_hyphen_vars = [
"CFC-11",
"CFC-12",
"CFC-113",
"CFC-114",
"CFC-115",
"HCFC-22",
"HFC-23",
"HFC-32",
"HFC-125",
"HFC-134a",
"HFC-143a",
"HCFC-141b",
"HCFC-142b",
"HFC-227ea",
"HFC-245fa",
]
magicc6_sometimes_hyphen_vars = [
v.replace("-", "") for v in magicc6_sometimes_hyphen_vars
] + magicc6_sometimes_hyphen_vars
magicc6_sometimes_underscore_vars = [
"HFC43_10",
"CFC_11",
"CFC_12",
"CFC_113",
"CFC_114",
"CFC_115",
"HCFC_22",
"HCFC_141b",
"HCFC_142b",
]
magicc6_sometimes_underscore_replacements = {
v: v.replace("_", "") for v in magicc6_sometimes_underscore_vars
}
special_case_replacements = {
"FossilCO2": "CO2I",
"OtherCO2": "CO2B",
"MCF": "CH3CCL3",
"CARB_TET": "CCL4",
"MHALOSUMCFC12EQ": "MHALOSUMCFC12EQ", # special case to avoid confusion with MCF
}
one_way_replacements = {"HFC-245ca": "HFC245FA", "HFC245ca": "HFC245FA"}
all_possible_magicc6_vars = (
magicc6_simple_mapping_vars
+ magicc6_sometimes_hyphen_vars
+ magicc6_sometimes_underscore_vars
+ list(special_case_replacements.keys())
+ list(one_way_replacements.keys())
)
replacements = {}
for m6v in all_possible_magicc6_vars:
if m6v in special_case_replacements:
replacements[m6v] = special_case_replacements[m6v]
elif (
m6v in magicc6_sometimes_underscore_vars and not inverse
): # underscores one way
replacements[m6v] = magicc6_sometimes_underscore_replacements[m6v]
elif (m6v in one_way_replacements) and not inverse:
replacements[m6v] = one_way_replacements[m6v]
else:
m7v = m6v.replace("-", "").replace(" ", "").upper()
# i.e. if we've already got a value for the inverse, we don't
# want to overwrite it
if (m7v in replacements.values()) and inverse:
continue
replacements[m6v] = m7v
if inverse:
return {v: k for k, v in replacements.items()}
else:
return replacements | python | def get_magicc6_to_magicc7_variable_mapping(inverse=False):
"""Get the mappings from MAGICC6 to MAGICC7 variables.
Note that this mapping is not one to one. For example, "HFC4310", "HFC43-10" and
"HFC-43-10" in MAGICC6 both map to "HFC4310" in MAGICC7 but "HFC4310" in
MAGICC7 maps back to "HFC4310".
Note that HFC-245fa was mistakenly labelled as HFC-245ca in MAGICC6. In reality,
they are not the same thing. However, the MAGICC6 labelling was merely a typo so
the mapping between the two is one-to-one.
Parameters
----------
inverse : bool
If True, return the inverse mappings i.e. MAGICC7 to MAGICC6 mappings
Returns
-------
dict
Dictionary of mappings
"""
# we generate the mapping dynamically, the first name in the list
# is the one which will be used for inverse mappings
magicc6_simple_mapping_vars = [
"KYOTO-CO2EQ",
"CO2I",
"CO2B",
"CH4",
"N2O",
"BC",
"OC",
"SOx",
"NOx",
"NMVOC",
"CO",
"SF6",
"NH3",
"CF4",
"C2F6",
"HFC4310",
"HFC43-10",
"HFC-43-10",
"HFC4310",
"HFC134a",
"HFC143a",
"HFC227ea",
"CCl4",
"CH3CCl3",
"HFC245fa",
"Halon 1211",
"Halon 1202",
"Halon 1301",
"Halon 2402",
"Halon1211",
"Halon1202",
"Halon1301",
"Halon2402",
"CH3Br",
"CH3Cl",
"C6F14",
]
magicc6_sometimes_hyphen_vars = [
"CFC-11",
"CFC-12",
"CFC-113",
"CFC-114",
"CFC-115",
"HCFC-22",
"HFC-23",
"HFC-32",
"HFC-125",
"HFC-134a",
"HFC-143a",
"HCFC-141b",
"HCFC-142b",
"HFC-227ea",
"HFC-245fa",
]
magicc6_sometimes_hyphen_vars = [
v.replace("-", "") for v in magicc6_sometimes_hyphen_vars
] + magicc6_sometimes_hyphen_vars
magicc6_sometimes_underscore_vars = [
"HFC43_10",
"CFC_11",
"CFC_12",
"CFC_113",
"CFC_114",
"CFC_115",
"HCFC_22",
"HCFC_141b",
"HCFC_142b",
]
magicc6_sometimes_underscore_replacements = {
v: v.replace("_", "") for v in magicc6_sometimes_underscore_vars
}
special_case_replacements = {
"FossilCO2": "CO2I",
"OtherCO2": "CO2B",
"MCF": "CH3CCL3",
"CARB_TET": "CCL4",
"MHALOSUMCFC12EQ": "MHALOSUMCFC12EQ", # special case to avoid confusion with MCF
}
one_way_replacements = {"HFC-245ca": "HFC245FA", "HFC245ca": "HFC245FA"}
all_possible_magicc6_vars = (
magicc6_simple_mapping_vars
+ magicc6_sometimes_hyphen_vars
+ magicc6_sometimes_underscore_vars
+ list(special_case_replacements.keys())
+ list(one_way_replacements.keys())
)
replacements = {}
for m6v in all_possible_magicc6_vars:
if m6v in special_case_replacements:
replacements[m6v] = special_case_replacements[m6v]
elif (
m6v in magicc6_sometimes_underscore_vars and not inverse
): # underscores one way
replacements[m6v] = magicc6_sometimes_underscore_replacements[m6v]
elif (m6v in one_way_replacements) and not inverse:
replacements[m6v] = one_way_replacements[m6v]
else:
m7v = m6v.replace("-", "").replace(" ", "").upper()
# i.e. if we've already got a value for the inverse, we don't
# want to overwrite it
if (m7v in replacements.values()) and inverse:
continue
replacements[m6v] = m7v
if inverse:
return {v: k for k, v in replacements.items()}
else:
return replacements | ['def', 'get_magicc6_to_magicc7_variable_mapping', '(', 'inverse', '=', 'False', ')', ':', '# we generate the mapping dynamically, the first name in the list', '# is the one which will be used for inverse mappings', 'magicc6_simple_mapping_vars', '=', '[', '"KYOTO-CO2EQ"', ',', '"CO2I"', ',', '"CO2B"', ',', '"CH4"', ',', '"N2O"', ',', '"BC"', ',', '"OC"', ',', '"SOx"', ',', '"NOx"', ',', '"NMVOC"', ',', '"CO"', ',', '"SF6"', ',', '"NH3"', ',', '"CF4"', ',', '"C2F6"', ',', '"HFC4310"', ',', '"HFC43-10"', ',', '"HFC-43-10"', ',', '"HFC4310"', ',', '"HFC134a"', ',', '"HFC143a"', ',', '"HFC227ea"', ',', '"CCl4"', ',', '"CH3CCl3"', ',', '"HFC245fa"', ',', '"Halon 1211"', ',', '"Halon 1202"', ',', '"Halon 1301"', ',', '"Halon 2402"', ',', '"Halon1211"', ',', '"Halon1202"', ',', '"Halon1301"', ',', '"Halon2402"', ',', '"CH3Br"', ',', '"CH3Cl"', ',', '"C6F14"', ',', ']', 'magicc6_sometimes_hyphen_vars', '=', '[', '"CFC-11"', ',', '"CFC-12"', ',', '"CFC-113"', ',', '"CFC-114"', ',', '"CFC-115"', ',', '"HCFC-22"', ',', '"HFC-23"', ',', '"HFC-32"', ',', '"HFC-125"', ',', '"HFC-134a"', ',', '"HFC-143a"', ',', '"HCFC-141b"', ',', '"HCFC-142b"', ',', '"HFC-227ea"', ',', '"HFC-245fa"', ',', ']', 'magicc6_sometimes_hyphen_vars', '=', '[', 'v', '.', 'replace', '(', '"-"', ',', '""', ')', 'for', 'v', 'in', 'magicc6_sometimes_hyphen_vars', ']', '+', 'magicc6_sometimes_hyphen_vars', 'magicc6_sometimes_underscore_vars', '=', '[', '"HFC43_10"', ',', '"CFC_11"', ',', '"CFC_12"', ',', '"CFC_113"', ',', '"CFC_114"', ',', '"CFC_115"', ',', '"HCFC_22"', ',', '"HCFC_141b"', ',', '"HCFC_142b"', ',', ']', 'magicc6_sometimes_underscore_replacements', '=', '{', 'v', ':', 'v', '.', 'replace', '(', '"_"', ',', '""', ')', 'for', 'v', 'in', 'magicc6_sometimes_underscore_vars', '}', 'special_case_replacements', '=', '{', '"FossilCO2"', ':', '"CO2I"', ',', '"OtherCO2"', ':', '"CO2B"', ',', '"MCF"', ':', '"CH3CCL3"', ',', '"CARB_TET"', ':', '"CCL4"', ',', '"MHALOSUMCFC12EQ"', ':', '"MHALOSUMCFC12EQ"', ',', '# special case to avoid confusion with MCF', '}', 'one_way_replacements', '=', '{', '"HFC-245ca"', ':', '"HFC245FA"', ',', '"HFC245ca"', ':', '"HFC245FA"', '}', 'all_possible_magicc6_vars', '=', '(', 'magicc6_simple_mapping_vars', '+', 'magicc6_sometimes_hyphen_vars', '+', 'magicc6_sometimes_underscore_vars', '+', 'list', '(', 'special_case_replacements', '.', 'keys', '(', ')', ')', '+', 'list', '(', 'one_way_replacements', '.', 'keys', '(', ')', ')', ')', 'replacements', '=', '{', '}', 'for', 'm6v', 'in', 'all_possible_magicc6_vars', ':', 'if', 'm6v', 'in', 'special_case_replacements', ':', 'replacements', '[', 'm6v', ']', '=', 'special_case_replacements', '[', 'm6v', ']', 'elif', '(', 'm6v', 'in', 'magicc6_sometimes_underscore_vars', 'and', 'not', 'inverse', ')', ':', '# underscores one way', 'replacements', '[', 'm6v', ']', '=', 'magicc6_sometimes_underscore_replacements', '[', 'm6v', ']', 'elif', '(', 'm6v', 'in', 'one_way_replacements', ')', 'and', 'not', 'inverse', ':', 'replacements', '[', 'm6v', ']', '=', 'one_way_replacements', '[', 'm6v', ']', 'else', ':', 'm7v', '=', 'm6v', '.', 'replace', '(', '"-"', ',', '""', ')', '.', 'replace', '(', '" "', ',', '""', ')', '.', 'upper', '(', ')', "# i.e. if we've already got a value for the inverse, we don't", '# want to overwrite it', 'if', '(', 'm7v', 'in', 'replacements', '.', 'values', '(', ')', ')', 'and', 'inverse', ':', 'continue', 'replacements', '[', 'm6v', ']', '=', 'm7v', 'if', 'inverse', ':', 'return', '{', 'v', ':', 'k', 'for', 'k', ',', 'v', 'in', 'replacements', '.', 'items', '(', ')', '}', 'else', ':', 'return', 'replacements'] | Get the mappings from MAGICC6 to MAGICC7 variables.
Note that this mapping is not one to one. For example, "HFC4310", "HFC43-10" and
"HFC-43-10" in MAGICC6 both map to "HFC4310" in MAGICC7 but "HFC4310" in
MAGICC7 maps back to "HFC4310".
Note that HFC-245fa was mistakenly labelled as HFC-245ca in MAGICC6. In reality,
they are not the same thing. However, the MAGICC6 labelling was merely a typo so
the mapping between the two is one-to-one.
Parameters
----------
inverse : bool
If True, return the inverse mappings i.e. MAGICC7 to MAGICC6 mappings
Returns
-------
dict
Dictionary of mappings | ['Get', 'the', 'mappings', 'from', 'MAGICC6', 'to', 'MAGICC7', 'variables', '.'] | train | https://github.com/openclimatedata/pymagicc/blob/d896014832cf458d1e95e5878fd6d5961f3e2e05/pymagicc/definitions/__init__.py#L426-L562 |
654 | brocade/pynos | pynos/versions/ver_7/ver_7_1_0/yang/brocade_fabric_service.py | brocade_fabric_service.show_fabric_trunk_info_output_show_trunk_list_trunk_list_groups_trunk_list_member_trunk_list_nbr_interface_type | def show_fabric_trunk_info_output_show_trunk_list_trunk_list_groups_trunk_list_member_trunk_list_nbr_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_fabric_trunk_info = ET.Element("show_fabric_trunk_info")
config = show_fabric_trunk_info
output = ET.SubElement(show_fabric_trunk_info, "output")
show_trunk_list = ET.SubElement(output, "show-trunk-list")
trunk_list_groups = ET.SubElement(show_trunk_list, "trunk-list-groups")
trunk_list_member = ET.SubElement(trunk_list_groups, "trunk-list-member")
trunk_list_nbr_interface_type = ET.SubElement(trunk_list_member, "trunk-list-nbr-interface-type")
trunk_list_nbr_interface_type.text = kwargs.pop('trunk_list_nbr_interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config) | python | def show_fabric_trunk_info_output_show_trunk_list_trunk_list_groups_trunk_list_member_trunk_list_nbr_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_fabric_trunk_info = ET.Element("show_fabric_trunk_info")
config = show_fabric_trunk_info
output = ET.SubElement(show_fabric_trunk_info, "output")
show_trunk_list = ET.SubElement(output, "show-trunk-list")
trunk_list_groups = ET.SubElement(show_trunk_list, "trunk-list-groups")
trunk_list_member = ET.SubElement(trunk_list_groups, "trunk-list-member")
trunk_list_nbr_interface_type = ET.SubElement(trunk_list_member, "trunk-list-nbr-interface-type")
trunk_list_nbr_interface_type.text = kwargs.pop('trunk_list_nbr_interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config) | ['def', 'show_fabric_trunk_info_output_show_trunk_list_trunk_list_groups_trunk_list_member_trunk_list_nbr_interface_type', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'show_fabric_trunk_info', '=', 'ET', '.', 'Element', '(', '"show_fabric_trunk_info"', ')', 'config', '=', 'show_fabric_trunk_info', 'output', '=', 'ET', '.', 'SubElement', '(', 'show_fabric_trunk_info', ',', '"output"', ')', 'show_trunk_list', '=', 'ET', '.', 'SubElement', '(', 'output', ',', '"show-trunk-list"', ')', 'trunk_list_groups', '=', 'ET', '.', 'SubElement', '(', 'show_trunk_list', ',', '"trunk-list-groups"', ')', 'trunk_list_member', '=', 'ET', '.', 'SubElement', '(', 'trunk_list_groups', ',', '"trunk-list-member"', ')', 'trunk_list_nbr_interface_type', '=', 'ET', '.', 'SubElement', '(', 'trunk_list_member', ',', '"trunk-list-nbr-interface-type"', ')', 'trunk_list_nbr_interface_type', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'trunk_list_nbr_interface_type'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')'] | Auto Generated Code | ['Auto', 'Generated', 'Code'] | train | https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_7/ver_7_1_0/yang/brocade_fabric_service.py#L905-L919 |
655 | celery/django-celery | djcelery/compat.py | python_2_unicode_compatible | def python_2_unicode_compatible(cls):
"""Taken from Django project (django/utils/encoding.py) & modified a bit to
always have __unicode__ method available.
"""
if '__str__' not in cls.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
cls.__name__)
cls.__unicode__ = cls.__str__
if PY2:
cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
return cls | python | def python_2_unicode_compatible(cls):
"""Taken from Django project (django/utils/encoding.py) & modified a bit to
always have __unicode__ method available.
"""
if '__str__' not in cls.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
cls.__name__)
cls.__unicode__ = cls.__str__
if PY2:
cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
return cls | ['def', 'python_2_unicode_compatible', '(', 'cls', ')', ':', 'if', "'__str__'", 'not', 'in', 'cls', '.', '__dict__', ':', 'raise', 'ValueError', '(', '"@python_2_unicode_compatible cannot be applied "', '"to %s because it doesn\'t define __str__()."', '%', 'cls', '.', '__name__', ')', 'cls', '.', '__unicode__', '=', 'cls', '.', '__str__', 'if', 'PY2', ':', 'cls', '.', '__str__', '=', 'lambda', 'self', ':', 'self', '.', '__unicode__', '(', ')', '.', 'encode', '(', "'utf-8'", ')', 'return', 'cls'] | Taken from Django project (django/utils/encoding.py) & modified a bit to
always have __unicode__ method available. | ['Taken', 'from', 'Django', 'project', '(', 'django', '/', 'utils', '/', 'encoding', '.', 'py', ')', '&', 'modified', 'a', 'bit', 'to', 'always', 'have', '__unicode__', 'method', 'available', '.'] | train | https://github.com/celery/django-celery/blob/5d1ecb09c6304d22cc447c7c08fba0bd1febc2ef/djcelery/compat.py#L12-L26 |
656 | cloudboss/friend | friend/collections.py | select_dict | def select_dict(coll, key, value):
"""
Given an iterable of dictionaries, return the dictionaries
where the values at a given key match the given value.
If the value is an iterable of objects, the function will
consider any to be a match.
This is especially useful when calling REST APIs which
return arrays of JSON objects. When such a response is
converted to a Python list of dictionaries, it may be
easily filtered using this function.
:param iter coll: An iterable containing dictionaries
:param obj key: A key to search in each dictionary
:param value: A value or iterable of values to match
:type value: obj or iter
:returns: A list of dictionaries matching the query
:rtype: list
:Example:
::
>>> dicts = [
... {'hi': 'bye'},
... {10: 2, 30: 4},
... {'hi': 'hello', 'bye': 'goodbye'},
... ]
>>> select_dict(dicts, 'hi', 'bye')
[{'hi': 'bye'}]
>>> select_dict(dicts, 'hi', ('bye', 'hello'))
[{'hi': 'bye'}, {'hi': 'hello', 'bye': 'goodbye'}]
"""
if getattr(value, '__iter__', None):
iterable = value
else:
iterable = [value]
return [v for v in coll if key in v and v[key] in iterable] | python | def select_dict(coll, key, value):
"""
Given an iterable of dictionaries, return the dictionaries
where the values at a given key match the given value.
If the value is an iterable of objects, the function will
consider any to be a match.
This is especially useful when calling REST APIs which
return arrays of JSON objects. When such a response is
converted to a Python list of dictionaries, it may be
easily filtered using this function.
:param iter coll: An iterable containing dictionaries
:param obj key: A key to search in each dictionary
:param value: A value or iterable of values to match
:type value: obj or iter
:returns: A list of dictionaries matching the query
:rtype: list
:Example:
::
>>> dicts = [
... {'hi': 'bye'},
... {10: 2, 30: 4},
... {'hi': 'hello', 'bye': 'goodbye'},
... ]
>>> select_dict(dicts, 'hi', 'bye')
[{'hi': 'bye'}]
>>> select_dict(dicts, 'hi', ('bye', 'hello'))
[{'hi': 'bye'}, {'hi': 'hello', 'bye': 'goodbye'}]
"""
if getattr(value, '__iter__', None):
iterable = value
else:
iterable = [value]
return [v for v in coll if key in v and v[key] in iterable] | ['def', 'select_dict', '(', 'coll', ',', 'key', ',', 'value', ')', ':', 'if', 'getattr', '(', 'value', ',', "'__iter__'", ',', 'None', ')', ':', 'iterable', '=', 'value', 'else', ':', 'iterable', '=', '[', 'value', ']', 'return', '[', 'v', 'for', 'v', 'in', 'coll', 'if', 'key', 'in', 'v', 'and', 'v', '[', 'key', ']', 'in', 'iterable', ']'] | Given an iterable of dictionaries, return the dictionaries
where the values at a given key match the given value.
If the value is an iterable of objects, the function will
consider any to be a match.
This is especially useful when calling REST APIs which
return arrays of JSON objects. When such a response is
converted to a Python list of dictionaries, it may be
easily filtered using this function.
:param iter coll: An iterable containing dictionaries
:param obj key: A key to search in each dictionary
:param value: A value or iterable of values to match
:type value: obj or iter
:returns: A list of dictionaries matching the query
:rtype: list
:Example:
::
>>> dicts = [
... {'hi': 'bye'},
... {10: 2, 30: 4},
... {'hi': 'hello', 'bye': 'goodbye'},
... ]
>>> select_dict(dicts, 'hi', 'bye')
[{'hi': 'bye'}]
>>> select_dict(dicts, 'hi', ('bye', 'hello'))
[{'hi': 'bye'}, {'hi': 'hello', 'bye': 'goodbye'}] | ['Given', 'an', 'iterable', 'of', 'dictionaries', 'return', 'the', 'dictionaries', 'where', 'the', 'values', 'at', 'a', 'given', 'key', 'match', 'the', 'given', 'value', '.', 'If', 'the', 'value', 'is', 'an', 'iterable', 'of', 'objects', 'the', 'function', 'will', 'consider', 'any', 'to', 'be', 'a', 'match', '.'] | train | https://github.com/cloudboss/friend/blob/3357e6ec849552e3ae9ed28017ff0926e4006e4e/friend/collections.py#L22-L59 |
657 | project-ncl/pnc-cli | pnc_cli/common.py | get_entity | def get_entity(api, entity_id):
"""
Generic "getSpecific" call that calls get_specific with the given id
:param api: api to call get_specific on
:param id: id of the entity to retrieve
:return: REST entity
"""
response = utils.checked_api_call(api, 'get_specific', id=entity_id)
if response:
return response.content
return | python | def get_entity(api, entity_id):
"""
Generic "getSpecific" call that calls get_specific with the given id
:param api: api to call get_specific on
:param id: id of the entity to retrieve
:return: REST entity
"""
response = utils.checked_api_call(api, 'get_specific', id=entity_id)
if response:
return response.content
return | ['def', 'get_entity', '(', 'api', ',', 'entity_id', ')', ':', 'response', '=', 'utils', '.', 'checked_api_call', '(', 'api', ',', "'get_specific'", ',', 'id', '=', 'entity_id', ')', 'if', 'response', ':', 'return', 'response', '.', 'content', 'return'] | Generic "getSpecific" call that calls get_specific with the given id
:param api: api to call get_specific on
:param id: id of the entity to retrieve
:return: REST entity | ['Generic', 'getSpecific', 'call', 'that', 'calls', 'get_specific', 'with', 'the', 'given', 'id', ':', 'param', 'api', ':', 'api', 'to', 'call', 'get_specific', 'on', ':', 'param', 'id', ':', 'id', 'of', 'the', 'entity', 'to', 'retrieve', ':', 'return', ':', 'REST', 'entity'] | train | https://github.com/project-ncl/pnc-cli/blob/3dc149bf84928f60a8044ac50b58bbaddd451902/pnc_cli/common.py#L52-L62 |
658 | jrmontag/STLDecompose | stldecompose/forecast_funcs.py | mean | def mean(data, n=3, **kwargs):
"""The mean forecast for the next point is the mean value of the previous ``n`` points in
the series.
Args:
data (np.array): Observed data, presumed to be ordered in time.
n (int): period over which to calculate the mean
Returns:
float: a single-valued forecast for the next value in the series.
"""
# don't start averaging until we've seen n points
if len(data[-n:]) < n:
forecast = np.nan
else:
# nb: we'll keep the forecast as a float
forecast = np.mean(data[-n:])
return forecast | python | def mean(data, n=3, **kwargs):
"""The mean forecast for the next point is the mean value of the previous ``n`` points in
the series.
Args:
data (np.array): Observed data, presumed to be ordered in time.
n (int): period over which to calculate the mean
Returns:
float: a single-valued forecast for the next value in the series.
"""
# don't start averaging until we've seen n points
if len(data[-n:]) < n:
forecast = np.nan
else:
# nb: we'll keep the forecast as a float
forecast = np.mean(data[-n:])
return forecast | ['def', 'mean', '(', 'data', ',', 'n', '=', '3', ',', '*', '*', 'kwargs', ')', ':', "# don't start averaging until we've seen n points", 'if', 'len', '(', 'data', '[', '-', 'n', ':', ']', ')', '<', 'n', ':', 'forecast', '=', 'np', '.', 'nan', 'else', ':', "# nb: we'll keep the forecast as a float", 'forecast', '=', 'np', '.', 'mean', '(', 'data', '[', '-', 'n', ':', ']', ')', 'return', 'forecast'] | The mean forecast for the next point is the mean value of the previous ``n`` points in
the series.
Args:
data (np.array): Observed data, presumed to be ordered in time.
n (int): period over which to calculate the mean
Returns:
float: a single-valued forecast for the next value in the series. | ['The', 'mean', 'forecast', 'for', 'the', 'next', 'point', 'is', 'the', 'mean', 'value', 'of', 'the', 'previous', 'n', 'points', 'in', 'the', 'series', '.'] | train | https://github.com/jrmontag/STLDecompose/blob/f53f89dab4b13618c1cf13f88a01e3e3dc8abdec/stldecompose/forecast_funcs.py#L41-L58 |
659 | pywbem/pywbem | pywbem/_statistics.py | OperationStatistic.stop_timer | def stop_timer(self, request_len, reply_len, server_time=None,
exception=False):
"""
This is a low-level method is called by pywbem at the end of an
operation. It completes the measurement for that operation by capturing
the needed data, and updates the statistics data, if statistics is
enabled for the connection.
Parameters:
request_len (:term:`integer`)
Size of the HTTP body of the CIM-XML request message, in Bytes.
reply_len (:term:`integer`)
Size of the HTTP body of the CIM-XML response message, in Bytes.
exception (:class:`py:bool`)
Boolean that specifies whether an exception was raised while
processing the operation.
server_time (:class:`py:bool`)
Time in seconds that the server optionally returns to the
client in the HTTP response defining the time from when the
server received the request to when it started sending the
response. If `None`, there is no time from the server.
Returns:
float: The elapsed time for the operation that just ended, or
`None` if the statistics container holding this object is not
enabled.
"""
if not self.container.enabled:
return None
# stop the timer
if self._start_time is None:
raise RuntimeError('stop_timer() called without preceding '
'start_timer()')
dt = time.time() - self._start_time
self._start_time = None
self._count += 1
self._time_sum += dt
self._request_len_sum += request_len
self._reply_len_sum += reply_len
if exception:
self._exception_count += 1
if dt > self._time_max:
self._time_max = dt
if dt < self._time_min:
self._time_min = dt
if server_time:
self._server_time_stored = True
self._server_time_sum += server_time
if dt > self._server_time_max:
self._server_time_max = server_time
if dt < self._server_time_min:
self._server_time_min = server_time
if request_len > self._request_len_max:
self._request_len_max = request_len
if request_len < self._request_len_min:
self._request_len_min = request_len
if reply_len > self._reply_len_max:
self._reply_len_max = reply_len
if reply_len < self._reply_len_min:
self._reply_len_min = reply_len
return dt | python | def stop_timer(self, request_len, reply_len, server_time=None,
exception=False):
"""
This is a low-level method is called by pywbem at the end of an
operation. It completes the measurement for that operation by capturing
the needed data, and updates the statistics data, if statistics is
enabled for the connection.
Parameters:
request_len (:term:`integer`)
Size of the HTTP body of the CIM-XML request message, in Bytes.
reply_len (:term:`integer`)
Size of the HTTP body of the CIM-XML response message, in Bytes.
exception (:class:`py:bool`)
Boolean that specifies whether an exception was raised while
processing the operation.
server_time (:class:`py:bool`)
Time in seconds that the server optionally returns to the
client in the HTTP response defining the time from when the
server received the request to when it started sending the
response. If `None`, there is no time from the server.
Returns:
float: The elapsed time for the operation that just ended, or
`None` if the statistics container holding this object is not
enabled.
"""
if not self.container.enabled:
return None
# stop the timer
if self._start_time is None:
raise RuntimeError('stop_timer() called without preceding '
'start_timer()')
dt = time.time() - self._start_time
self._start_time = None
self._count += 1
self._time_sum += dt
self._request_len_sum += request_len
self._reply_len_sum += reply_len
if exception:
self._exception_count += 1
if dt > self._time_max:
self._time_max = dt
if dt < self._time_min:
self._time_min = dt
if server_time:
self._server_time_stored = True
self._server_time_sum += server_time
if dt > self._server_time_max:
self._server_time_max = server_time
if dt < self._server_time_min:
self._server_time_min = server_time
if request_len > self._request_len_max:
self._request_len_max = request_len
if request_len < self._request_len_min:
self._request_len_min = request_len
if reply_len > self._reply_len_max:
self._reply_len_max = reply_len
if reply_len < self._reply_len_min:
self._reply_len_min = reply_len
return dt | ['def', 'stop_timer', '(', 'self', ',', 'request_len', ',', 'reply_len', ',', 'server_time', '=', 'None', ',', 'exception', '=', 'False', ')', ':', 'if', 'not', 'self', '.', 'container', '.', 'enabled', ':', 'return', 'None', '# stop the timer', 'if', 'self', '.', '_start_time', 'is', 'None', ':', 'raise', 'RuntimeError', '(', "'stop_timer() called without preceding '", "'start_timer()'", ')', 'dt', '=', 'time', '.', 'time', '(', ')', '-', 'self', '.', '_start_time', 'self', '.', '_start_time', '=', 'None', 'self', '.', '_count', '+=', '1', 'self', '.', '_time_sum', '+=', 'dt', 'self', '.', '_request_len_sum', '+=', 'request_len', 'self', '.', '_reply_len_sum', '+=', 'reply_len', 'if', 'exception', ':', 'self', '.', '_exception_count', '+=', '1', 'if', 'dt', '>', 'self', '.', '_time_max', ':', 'self', '.', '_time_max', '=', 'dt', 'if', 'dt', '<', 'self', '.', '_time_min', ':', 'self', '.', '_time_min', '=', 'dt', 'if', 'server_time', ':', 'self', '.', '_server_time_stored', '=', 'True', 'self', '.', '_server_time_sum', '+=', 'server_time', 'if', 'dt', '>', 'self', '.', '_server_time_max', ':', 'self', '.', '_server_time_max', '=', 'server_time', 'if', 'dt', '<', 'self', '.', '_server_time_min', ':', 'self', '.', '_server_time_min', '=', 'server_time', 'if', 'request_len', '>', 'self', '.', '_request_len_max', ':', 'self', '.', '_request_len_max', '=', 'request_len', 'if', 'request_len', '<', 'self', '.', '_request_len_min', ':', 'self', '.', '_request_len_min', '=', 'request_len', 'if', 'reply_len', '>', 'self', '.', '_reply_len_max', ':', 'self', '.', '_reply_len_max', '=', 'reply_len', 'if', 'reply_len', '<', 'self', '.', '_reply_len_min', ':', 'self', '.', '_reply_len_min', '=', 'reply_len', 'return', 'dt'] | This is a low-level method is called by pywbem at the end of an
operation. It completes the measurement for that operation by capturing
the needed data, and updates the statistics data, if statistics is
enabled for the connection.
Parameters:
request_len (:term:`integer`)
Size of the HTTP body of the CIM-XML request message, in Bytes.
reply_len (:term:`integer`)
Size of the HTTP body of the CIM-XML response message, in Bytes.
exception (:class:`py:bool`)
Boolean that specifies whether an exception was raised while
processing the operation.
server_time (:class:`py:bool`)
Time in seconds that the server optionally returns to the
client in the HTTP response defining the time from when the
server received the request to when it started sending the
response. If `None`, there is no time from the server.
Returns:
float: The elapsed time for the operation that just ended, or
`None` if the statistics container holding this object is not
enabled. | ['This', 'is', 'a', 'low', '-', 'level', 'method', 'is', 'called', 'by', 'pywbem', 'at', 'the', 'end', 'of', 'an', 'operation', '.', 'It', 'completes', 'the', 'measurement', 'for', 'that', 'operation', 'by', 'capturing', 'the', 'needed', 'data', 'and', 'updates', 'the', 'statistics', 'data', 'if', 'statistics', 'is', 'enabled', 'for', 'the', 'connection', '.'] | train | https://github.com/pywbem/pywbem/blob/e54ecb82c2211e289a268567443d60fdd489f1e4/pywbem/_statistics.py#L398-L470 |
660 | tanghaibao/jcvi | jcvi/compara/quota.py | get_1D_overlap | def get_1D_overlap(eclusters, depth=1):
"""
Find blocks that are 1D overlapping,
returns cliques of block ids that are in conflict
"""
overlap_set = set()
active = set()
ends = []
for i, (chr, left, right) in enumerate(eclusters):
ends.append((chr, left, 0, i)) # 0/1 for left/right-ness
ends.append((chr, right, 1, i))
ends.sort()
chr_last = ""
for chr, pos, left_right, i in ends:
if chr != chr_last:
active.clear()
if left_right == 0:
active.add(i)
else:
active.remove(i)
if len(active) > depth:
overlap_set.add(tuple(sorted(active)))
chr_last = chr
return overlap_set | python | def get_1D_overlap(eclusters, depth=1):
"""
Find blocks that are 1D overlapping,
returns cliques of block ids that are in conflict
"""
overlap_set = set()
active = set()
ends = []
for i, (chr, left, right) in enumerate(eclusters):
ends.append((chr, left, 0, i)) # 0/1 for left/right-ness
ends.append((chr, right, 1, i))
ends.sort()
chr_last = ""
for chr, pos, left_right, i in ends:
if chr != chr_last:
active.clear()
if left_right == 0:
active.add(i)
else:
active.remove(i)
if len(active) > depth:
overlap_set.add(tuple(sorted(active)))
chr_last = chr
return overlap_set | ['def', 'get_1D_overlap', '(', 'eclusters', ',', 'depth', '=', '1', ')', ':', 'overlap_set', '=', 'set', '(', ')', 'active', '=', 'set', '(', ')', 'ends', '=', '[', ']', 'for', 'i', ',', '(', 'chr', ',', 'left', ',', 'right', ')', 'in', 'enumerate', '(', 'eclusters', ')', ':', 'ends', '.', 'append', '(', '(', 'chr', ',', 'left', ',', '0', ',', 'i', ')', ')', '# 0/1 for left/right-ness', 'ends', '.', 'append', '(', '(', 'chr', ',', 'right', ',', '1', ',', 'i', ')', ')', 'ends', '.', 'sort', '(', ')', 'chr_last', '=', '""', 'for', 'chr', ',', 'pos', ',', 'left_right', ',', 'i', 'in', 'ends', ':', 'if', 'chr', '!=', 'chr_last', ':', 'active', '.', 'clear', '(', ')', 'if', 'left_right', '==', '0', ':', 'active', '.', 'add', '(', 'i', ')', 'else', ':', 'active', '.', 'remove', '(', 'i', ')', 'if', 'len', '(', 'active', ')', '>', 'depth', ':', 'overlap_set', '.', 'add', '(', 'tuple', '(', 'sorted', '(', 'active', ')', ')', ')', 'chr_last', '=', 'chr', 'return', 'overlap_set'] | Find blocks that are 1D overlapping,
returns cliques of block ids that are in conflict | ['Find', 'blocks', 'that', 'are', '1D', 'overlapping', 'returns', 'cliques', 'of', 'block', 'ids', 'that', 'are', 'in', 'conflict'] | train | https://github.com/tanghaibao/jcvi/blob/d2e31a77b6ade7f41f3b321febc2b4744d1cdeca/jcvi/compara/quota.py#L33-L61 |
661 | spyder-ide/spyder | spyder/plugins/plots/widgets/figurebrowser.py | FigureThumbnail.eventFilter | def eventFilter(self, widget, event):
"""
A filter that is used to send a signal when the figure canvas is
clicked.
"""
if event.type() == QEvent.MouseButtonPress:
if event.button() == Qt.LeftButton:
self.sig_canvas_clicked.emit(self)
return super(FigureThumbnail, self).eventFilter(widget, event) | python | def eventFilter(self, widget, event):
"""
A filter that is used to send a signal when the figure canvas is
clicked.
"""
if event.type() == QEvent.MouseButtonPress:
if event.button() == Qt.LeftButton:
self.sig_canvas_clicked.emit(self)
return super(FigureThumbnail, self).eventFilter(widget, event) | ['def', 'eventFilter', '(', 'self', ',', 'widget', ',', 'event', ')', ':', 'if', 'event', '.', 'type', '(', ')', '==', 'QEvent', '.', 'MouseButtonPress', ':', 'if', 'event', '.', 'button', '(', ')', '==', 'Qt', '.', 'LeftButton', ':', 'self', '.', 'sig_canvas_clicked', '.', 'emit', '(', 'self', ')', 'return', 'super', '(', 'FigureThumbnail', ',', 'self', ')', '.', 'eventFilter', '(', 'widget', ',', 'event', ')'] | A filter that is used to send a signal when the figure canvas is
clicked. | ['A', 'filter', 'that', 'is', 'used', 'to', 'send', 'a', 'signal', 'when', 'the', 'figure', 'canvas', 'is', 'clicked', '.'] | train | https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/plots/widgets/figurebrowser.py#L809-L817 |
662 | RudolfCardinal/pythonlib | cardinal_pythonlib/extract_text.py | convert_odt_to_text | def convert_odt_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts an OpenOffice ODT file to text.
Pass either a filename or a binary object.
"""
# We can't use exactly the same method as for DOCX files, using docx:
# sometimes that works, but sometimes it falls over with:
# KeyError: "There is no item named 'word/document.xml' in the archive"
with get_filelikeobject(filename, blob) as fp:
z = zipfile.ZipFile(fp)
tree = ElementTree.fromstring(z.read('content.xml'))
# ... may raise zipfile.BadZipfile
textlist = [] # type: List[str]
for element in tree.iter():
if element.text:
textlist.append(element.text.strip())
return '\n\n'.join(textlist) | python | def convert_odt_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts an OpenOffice ODT file to text.
Pass either a filename or a binary object.
"""
# We can't use exactly the same method as for DOCX files, using docx:
# sometimes that works, but sometimes it falls over with:
# KeyError: "There is no item named 'word/document.xml' in the archive"
with get_filelikeobject(filename, blob) as fp:
z = zipfile.ZipFile(fp)
tree = ElementTree.fromstring(z.read('content.xml'))
# ... may raise zipfile.BadZipfile
textlist = [] # type: List[str]
for element in tree.iter():
if element.text:
textlist.append(element.text.strip())
return '\n\n'.join(textlist) | ['def', 'convert_odt_to_text', '(', 'filename', ':', 'str', '=', 'None', ',', 'blob', ':', 'bytes', '=', 'None', ',', 'config', ':', 'TextProcessingConfig', '=', '_DEFAULT_CONFIG', ')', '->', 'str', ':', "# We can't use exactly the same method as for DOCX files, using docx:", '# sometimes that works, but sometimes it falls over with:', '# KeyError: "There is no item named \'word/document.xml\' in the archive"', 'with', 'get_filelikeobject', '(', 'filename', ',', 'blob', ')', 'as', 'fp', ':', 'z', '=', 'zipfile', '.', 'ZipFile', '(', 'fp', ')', 'tree', '=', 'ElementTree', '.', 'fromstring', '(', 'z', '.', 'read', '(', "'content.xml'", ')', ')', '# ... may raise zipfile.BadZipfile', 'textlist', '=', '[', ']', '# type: List[str]', 'for', 'element', 'in', 'tree', '.', 'iter', '(', ')', ':', 'if', 'element', '.', 'text', ':', 'textlist', '.', 'append', '(', 'element', '.', 'text', '.', 'strip', '(', ')', ')', 'return', "'\\n\\n'", '.', 'join', '(', 'textlist', ')'] | Converts an OpenOffice ODT file to text.
Pass either a filename or a binary object. | ['Converts', 'an', 'OpenOffice', 'ODT', 'file', 'to', 'text', '.'] | train | https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/extract_text.py#L972-L991 |
663 | hotdoc/hotdoc | hotdoc/extensions/c/clang/cindex.py | Type.element_count | def element_count(self):
"""Retrieve the number of elements in this type.
Returns an int.
If the Type is not an array or vector, this raises.
"""
result = conf.lib.clang_getNumElements(self)
if result < 0:
raise Exception('Type does not have elements.')
return result | python | def element_count(self):
"""Retrieve the number of elements in this type.
Returns an int.
If the Type is not an array or vector, this raises.
"""
result = conf.lib.clang_getNumElements(self)
if result < 0:
raise Exception('Type does not have elements.')
return result | ['def', 'element_count', '(', 'self', ')', ':', 'result', '=', 'conf', '.', 'lib', '.', 'clang_getNumElements', '(', 'self', ')', 'if', 'result', '<', '0', ':', 'raise', 'Exception', '(', "'Type does not have elements.'", ')', 'return', 'result'] | Retrieve the number of elements in this type.
Returns an int.
If the Type is not an array or vector, this raises. | ['Retrieve', 'the', 'number', 'of', 'elements', 'in', 'this', 'type', '.'] | train | https://github.com/hotdoc/hotdoc/blob/1067cdc8482b585b364a38fb52ca5d904e486280/hotdoc/extensions/c/clang/cindex.py#L2026-L2037 |
664 | ska-sa/katcp-python | katcp/server.py | MessageHandlerThread.stop | def stop(self, timeout=1.0):
"""Stop the handler thread (from another thread).
Parameters
----------
timeout : float, optional
Seconds to wait for server to have *started*.
"""
if timeout:
self._running.wait(timeout)
self._running.clear()
# Make sure to wake the run thread.
self._wake.set() | python | def stop(self, timeout=1.0):
"""Stop the handler thread (from another thread).
Parameters
----------
timeout : float, optional
Seconds to wait for server to have *started*.
"""
if timeout:
self._running.wait(timeout)
self._running.clear()
# Make sure to wake the run thread.
self._wake.set() | ['def', 'stop', '(', 'self', ',', 'timeout', '=', '1.0', ')', ':', 'if', 'timeout', ':', 'self', '.', '_running', '.', 'wait', '(', 'timeout', ')', 'self', '.', '_running', '.', 'clear', '(', ')', '# Make sure to wake the run thread.', 'self', '.', '_wake', '.', 'set', '(', ')'] | Stop the handler thread (from another thread).
Parameters
----------
timeout : float, optional
Seconds to wait for server to have *started*. | ['Stop', 'the', 'handler', 'thread', '(', 'from', 'another', 'thread', ')', '.'] | train | https://github.com/ska-sa/katcp-python/blob/9127c826a1d030c53b84d0e95743e20e5c5ea153/katcp/server.py#L877-L890 |
665 | pantsbuild/pants | src/python/pants/build_graph/target.py | Target.create_sources_field | def create_sources_field(self, sources, sources_rel_path, key_arg=None):
"""Factory method to create a SourcesField appropriate for the type of the sources object.
Note that this method is called before the call to Target.__init__ so don't expect fields to
be populated!
:API: public
:return: a payload field object representing the sources parameter
:rtype: SourcesField
"""
if not sources:
sources = FilesetWithSpec.empty(sources_rel_path)
elif not isinstance(sources, FilesetWithSpec):
key_arg_section = "'{}' to be ".format(key_arg) if key_arg else ""
raise TargetDefinitionException(self, "Expected {}a glob, an address or a list, but was {}"
.format(key_arg_section, type(sources)))
return SourcesField(sources=sources) | python | def create_sources_field(self, sources, sources_rel_path, key_arg=None):
"""Factory method to create a SourcesField appropriate for the type of the sources object.
Note that this method is called before the call to Target.__init__ so don't expect fields to
be populated!
:API: public
:return: a payload field object representing the sources parameter
:rtype: SourcesField
"""
if not sources:
sources = FilesetWithSpec.empty(sources_rel_path)
elif not isinstance(sources, FilesetWithSpec):
key_arg_section = "'{}' to be ".format(key_arg) if key_arg else ""
raise TargetDefinitionException(self, "Expected {}a glob, an address or a list, but was {}"
.format(key_arg_section, type(sources)))
return SourcesField(sources=sources) | ['def', 'create_sources_field', '(', 'self', ',', 'sources', ',', 'sources_rel_path', ',', 'key_arg', '=', 'None', ')', ':', 'if', 'not', 'sources', ':', 'sources', '=', 'FilesetWithSpec', '.', 'empty', '(', 'sources_rel_path', ')', 'elif', 'not', 'isinstance', '(', 'sources', ',', 'FilesetWithSpec', ')', ':', 'key_arg_section', '=', '"\'{}\' to be "', '.', 'format', '(', 'key_arg', ')', 'if', 'key_arg', 'else', '""', 'raise', 'TargetDefinitionException', '(', 'self', ',', '"Expected {}a glob, an address or a list, but was {}"', '.', 'format', '(', 'key_arg_section', ',', 'type', '(', 'sources', ')', ')', ')', 'return', 'SourcesField', '(', 'sources', '=', 'sources', ')'] | Factory method to create a SourcesField appropriate for the type of the sources object.
Note that this method is called before the call to Target.__init__ so don't expect fields to
be populated!
:API: public
:return: a payload field object representing the sources parameter
:rtype: SourcesField | ['Factory', 'method', 'to', 'create', 'a', 'SourcesField', 'appropriate', 'for', 'the', 'type', 'of', 'the', 'sources', 'object', '.'] | train | https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L852-L870 |
666 | AndrewAnnex/SpiceyPy | spiceypy/spiceypy.py | surfnm | def surfnm(a, b, c, point):
"""
This routine computes the outward-pointing, unit normal vector
from a point on the surface of an ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfnm_c.html
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:param point: Body-fixed coordinates of a point on the ellipsoid'
:type point: 3-Element Array of floats
:return: Outward pointing unit normal to ellipsoid at point.
:rtype: 3-Element Array of floats
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
point = stypes.toDoubleVector(point)
normal = stypes.emptyDoubleVector(3)
libspice.surfnm_c(a, b, c, point, normal)
return stypes.cVectorToPython(normal) | python | def surfnm(a, b, c, point):
"""
This routine computes the outward-pointing, unit normal vector
from a point on the surface of an ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfnm_c.html
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:param point: Body-fixed coordinates of a point on the ellipsoid'
:type point: 3-Element Array of floats
:return: Outward pointing unit normal to ellipsoid at point.
:rtype: 3-Element Array of floats
"""
a = ctypes.c_double(a)
b = ctypes.c_double(b)
c = ctypes.c_double(c)
point = stypes.toDoubleVector(point)
normal = stypes.emptyDoubleVector(3)
libspice.surfnm_c(a, b, c, point, normal)
return stypes.cVectorToPython(normal) | ['def', 'surfnm', '(', 'a', ',', 'b', ',', 'c', ',', 'point', ')', ':', 'a', '=', 'ctypes', '.', 'c_double', '(', 'a', ')', 'b', '=', 'ctypes', '.', 'c_double', '(', 'b', ')', 'c', '=', 'ctypes', '.', 'c_double', '(', 'c', ')', 'point', '=', 'stypes', '.', 'toDoubleVector', '(', 'point', ')', 'normal', '=', 'stypes', '.', 'emptyDoubleVector', '(', '3', ')', 'libspice', '.', 'surfnm_c', '(', 'a', ',', 'b', ',', 'c', ',', 'point', ',', 'normal', ')', 'return', 'stypes', '.', 'cVectorToPython', '(', 'normal', ')'] | This routine computes the outward-pointing, unit normal vector
from a point on the surface of an ellipsoid.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/surfnm_c.html
:param a: Length of the ellisoid semi-axis along the x-axis.
:type a: float
:param b: Length of the ellisoid semi-axis along the y-axis.
:type b: float
:param c: Length of the ellisoid semi-axis along the z-axis.
:type c: float
:param point: Body-fixed coordinates of a point on the ellipsoid'
:type point: 3-Element Array of floats
:return: Outward pointing unit normal to ellipsoid at point.
:rtype: 3-Element Array of floats | ['This', 'routine', 'computes', 'the', 'outward', '-', 'pointing', 'unit', 'normal', 'vector', 'from', 'a', 'point', 'on', 'the', 'surface', 'of', 'an', 'ellipsoid', '.'] | train | https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L13575-L13599 |
667 | tensorflow/tensor2tensor | tensor2tensor/layers/common_attention.py | masked_local_attention_2d | def masked_local_attention_2d(q,
k,
v,
query_shape=(8, 16),
memory_flange=(8, 16),
name=None):
"""Strided block local self-attention.
Each position in a query block can attend to all the generated queries in
the query block, which are generated in raster scan, and positions that are
generated to the left and top. The shapes are specified by query shape and
memory flange. Note that if you're using this function, you do not need to
right shift. Right shifting happens inside this function separately for each
block.
Args:
q: a Tensor with shape [batch, heads, h, w, depth_k]
k: a Tensor with shape [batch, heads, h, w, depth_k]
v: a Tensor with shape [batch, heads, h, w, depth_v]. In the current
implementation, depth_v must be equal to depth_k.
query_shape: an tuple indicating the height and width of each query block.
query_shape = block_shape
memory_flange: an integer indicating how much to look in height and width
from each query block.
memory shape = query_shape + (block_flange[0], 2*block_flange[1])
name: an optional string
Returns:
a Tensor of shape [batch, heads, h, w, depth_v]
"""
with tf.variable_scope(
name, default_name="local_masked_self_attention_2d", values=[q, k, v]):
v_shape = common_layers.shape_list(v)
# Pad query to ensure multiple of corresponding lengths.
q = pad_to_multiple_2d(q, query_shape)
# Set up query blocks.
q_indices = gather_indices_2d(q, query_shape, query_shape)
q_new = gather_blocks_2d(q, q_indices)
# Set up key and value blocks.
k_flange, k_center = get_memory_region(k, query_shape, memory_flange,
q_indices)
v_flange, v_center = get_memory_region(v, query_shape, memory_flange,
q_indices)
if k_flange is not None:
k_new = tf.concat([k_flange, k_center], axis=3)
v_new = tf.concat([v_flange, v_center], axis=3)
else:
k_new = k_center
v_new = v_center
# Set up the masks.
query_elements = np.prod(query_shape)
padding_mask = None
if k_flange is not None:
padding_mask = tf.expand_dims(
embedding_to_padding(k_flange) * -1e9, axis=-2)
padding_mask = tf.tile(padding_mask, [1, 1, 1, query_elements, 1])
center_attention_bias = attention_bias_lower_triangle(
np.prod(query_elements))
center_attention_bias = tf.reshape(
center_attention_bias, [1, 1, 1, query_elements, query_elements])
v_center_shape = common_layers.shape_list(v_center)
center_attention_bias = tf.tile(
center_attention_bias,
[v_center_shape[0], v_center_shape[1], v_center_shape[2], 1, 1])
if padding_mask is not None:
# Combine the mask for padding and visible region.
attention_bias = tf.concat([padding_mask, center_attention_bias], axis=4)
else:
attention_bias = center_attention_bias
output = dot_product_attention(
q_new,
k_new,
v_new,
attention_bias,
dropout_rate=0.,
name="masked_local_2d",
make_image_summary=False)
# Put representations back into original shapes.
padded_q_shape = common_layers.shape_list(q)
output = scatter_blocks_2d(output, q_indices, padded_q_shape)
# Remove the padding if introduced.
output = tf.slice(output, [0, 0, 0, 0, 0],
[-1, -1, v_shape[2], v_shape[3], -1])
return output | python | def masked_local_attention_2d(q,
k,
v,
query_shape=(8, 16),
memory_flange=(8, 16),
name=None):
"""Strided block local self-attention.
Each position in a query block can attend to all the generated queries in
the query block, which are generated in raster scan, and positions that are
generated to the left and top. The shapes are specified by query shape and
memory flange. Note that if you're using this function, you do not need to
right shift. Right shifting happens inside this function separately for each
block.
Args:
q: a Tensor with shape [batch, heads, h, w, depth_k]
k: a Tensor with shape [batch, heads, h, w, depth_k]
v: a Tensor with shape [batch, heads, h, w, depth_v]. In the current
implementation, depth_v must be equal to depth_k.
query_shape: an tuple indicating the height and width of each query block.
query_shape = block_shape
memory_flange: an integer indicating how much to look in height and width
from each query block.
memory shape = query_shape + (block_flange[0], 2*block_flange[1])
name: an optional string
Returns:
a Tensor of shape [batch, heads, h, w, depth_v]
"""
with tf.variable_scope(
name, default_name="local_masked_self_attention_2d", values=[q, k, v]):
v_shape = common_layers.shape_list(v)
# Pad query to ensure multiple of corresponding lengths.
q = pad_to_multiple_2d(q, query_shape)
# Set up query blocks.
q_indices = gather_indices_2d(q, query_shape, query_shape)
q_new = gather_blocks_2d(q, q_indices)
# Set up key and value blocks.
k_flange, k_center = get_memory_region(k, query_shape, memory_flange,
q_indices)
v_flange, v_center = get_memory_region(v, query_shape, memory_flange,
q_indices)
if k_flange is not None:
k_new = tf.concat([k_flange, k_center], axis=3)
v_new = tf.concat([v_flange, v_center], axis=3)
else:
k_new = k_center
v_new = v_center
# Set up the masks.
query_elements = np.prod(query_shape)
padding_mask = None
if k_flange is not None:
padding_mask = tf.expand_dims(
embedding_to_padding(k_flange) * -1e9, axis=-2)
padding_mask = tf.tile(padding_mask, [1, 1, 1, query_elements, 1])
center_attention_bias = attention_bias_lower_triangle(
np.prod(query_elements))
center_attention_bias = tf.reshape(
center_attention_bias, [1, 1, 1, query_elements, query_elements])
v_center_shape = common_layers.shape_list(v_center)
center_attention_bias = tf.tile(
center_attention_bias,
[v_center_shape[0], v_center_shape[1], v_center_shape[2], 1, 1])
if padding_mask is not None:
# Combine the mask for padding and visible region.
attention_bias = tf.concat([padding_mask, center_attention_bias], axis=4)
else:
attention_bias = center_attention_bias
output = dot_product_attention(
q_new,
k_new,
v_new,
attention_bias,
dropout_rate=0.,
name="masked_local_2d",
make_image_summary=False)
# Put representations back into original shapes.
padded_q_shape = common_layers.shape_list(q)
output = scatter_blocks_2d(output, q_indices, padded_q_shape)
# Remove the padding if introduced.
output = tf.slice(output, [0, 0, 0, 0, 0],
[-1, -1, v_shape[2], v_shape[3], -1])
return output | ['def', 'masked_local_attention_2d', '(', 'q', ',', 'k', ',', 'v', ',', 'query_shape', '=', '(', '8', ',', '16', ')', ',', 'memory_flange', '=', '(', '8', ',', '16', ')', ',', 'name', '=', 'None', ')', ':', 'with', 'tf', '.', 'variable_scope', '(', 'name', ',', 'default_name', '=', '"local_masked_self_attention_2d"', ',', 'values', '=', '[', 'q', ',', 'k', ',', 'v', ']', ')', ':', 'v_shape', '=', 'common_layers', '.', 'shape_list', '(', 'v', ')', '# Pad query to ensure multiple of corresponding lengths.', 'q', '=', 'pad_to_multiple_2d', '(', 'q', ',', 'query_shape', ')', '# Set up query blocks.', 'q_indices', '=', 'gather_indices_2d', '(', 'q', ',', 'query_shape', ',', 'query_shape', ')', 'q_new', '=', 'gather_blocks_2d', '(', 'q', ',', 'q_indices', ')', '# Set up key and value blocks.', 'k_flange', ',', 'k_center', '=', 'get_memory_region', '(', 'k', ',', 'query_shape', ',', 'memory_flange', ',', 'q_indices', ')', 'v_flange', ',', 'v_center', '=', 'get_memory_region', '(', 'v', ',', 'query_shape', ',', 'memory_flange', ',', 'q_indices', ')', 'if', 'k_flange', 'is', 'not', 'None', ':', 'k_new', '=', 'tf', '.', 'concat', '(', '[', 'k_flange', ',', 'k_center', ']', ',', 'axis', '=', '3', ')', 'v_new', '=', 'tf', '.', 'concat', '(', '[', 'v_flange', ',', 'v_center', ']', ',', 'axis', '=', '3', ')', 'else', ':', 'k_new', '=', 'k_center', 'v_new', '=', 'v_center', '# Set up the masks.', 'query_elements', '=', 'np', '.', 'prod', '(', 'query_shape', ')', 'padding_mask', '=', 'None', 'if', 'k_flange', 'is', 'not', 'None', ':', 'padding_mask', '=', 'tf', '.', 'expand_dims', '(', 'embedding_to_padding', '(', 'k_flange', ')', '*', '-', '1e9', ',', 'axis', '=', '-', '2', ')', 'padding_mask', '=', 'tf', '.', 'tile', '(', 'padding_mask', ',', '[', '1', ',', '1', ',', '1', ',', 'query_elements', ',', '1', ']', ')', 'center_attention_bias', '=', 'attention_bias_lower_triangle', '(', 'np', '.', 'prod', '(', 'query_elements', ')', ')', 'center_attention_bias', '=', 'tf', '.', 'reshape', '(', 'center_attention_bias', ',', '[', '1', ',', '1', ',', '1', ',', 'query_elements', ',', 'query_elements', ']', ')', 'v_center_shape', '=', 'common_layers', '.', 'shape_list', '(', 'v_center', ')', 'center_attention_bias', '=', 'tf', '.', 'tile', '(', 'center_attention_bias', ',', '[', 'v_center_shape', '[', '0', ']', ',', 'v_center_shape', '[', '1', ']', ',', 'v_center_shape', '[', '2', ']', ',', '1', ',', '1', ']', ')', 'if', 'padding_mask', 'is', 'not', 'None', ':', '# Combine the mask for padding and visible region.', 'attention_bias', '=', 'tf', '.', 'concat', '(', '[', 'padding_mask', ',', 'center_attention_bias', ']', ',', 'axis', '=', '4', ')', 'else', ':', 'attention_bias', '=', 'center_attention_bias', 'output', '=', 'dot_product_attention', '(', 'q_new', ',', 'k_new', ',', 'v_new', ',', 'attention_bias', ',', 'dropout_rate', '=', '0.', ',', 'name', '=', '"masked_local_2d"', ',', 'make_image_summary', '=', 'False', ')', '# Put representations back into original shapes.', 'padded_q_shape', '=', 'common_layers', '.', 'shape_list', '(', 'q', ')', 'output', '=', 'scatter_blocks_2d', '(', 'output', ',', 'q_indices', ',', 'padded_q_shape', ')', '# Remove the padding if introduced.', 'output', '=', 'tf', '.', 'slice', '(', 'output', ',', '[', '0', ',', '0', ',', '0', ',', '0', ',', '0', ']', ',', '[', '-', '1', ',', '-', '1', ',', 'v_shape', '[', '2', ']', ',', 'v_shape', '[', '3', ']', ',', '-', '1', ']', ')', 'return', 'output'] | Strided block local self-attention.
Each position in a query block can attend to all the generated queries in
the query block, which are generated in raster scan, and positions that are
generated to the left and top. The shapes are specified by query shape and
memory flange. Note that if you're using this function, you do not need to
right shift. Right shifting happens inside this function separately for each
block.
Args:
q: a Tensor with shape [batch, heads, h, w, depth_k]
k: a Tensor with shape [batch, heads, h, w, depth_k]
v: a Tensor with shape [batch, heads, h, w, depth_v]. In the current
implementation, depth_v must be equal to depth_k.
query_shape: an tuple indicating the height and width of each query block.
query_shape = block_shape
memory_flange: an integer indicating how much to look in height and width
from each query block.
memory shape = query_shape + (block_flange[0], 2*block_flange[1])
name: an optional string
Returns:
a Tensor of shape [batch, heads, h, w, depth_v] | ['Strided', 'block', 'local', 'self', '-', 'attention', '.'] | train | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/layers/common_attention.py#L3760-L3850 |
668 | ueg1990/faker-schema | faker_schema/schema_loader.py | load_json_from_file | def load_json_from_file(file_path):
"""Load schema from a JSON file"""
try:
with open(file_path) as f:
json_data = json.load(f)
except ValueError as e:
raise ValueError('Given file {} is not a valid JSON file: {}'.format(file_path, e))
else:
return json_data | python | def load_json_from_file(file_path):
"""Load schema from a JSON file"""
try:
with open(file_path) as f:
json_data = json.load(f)
except ValueError as e:
raise ValueError('Given file {} is not a valid JSON file: {}'.format(file_path, e))
else:
return json_data | ['def', 'load_json_from_file', '(', 'file_path', ')', ':', 'try', ':', 'with', 'open', '(', 'file_path', ')', 'as', 'f', ':', 'json_data', '=', 'json', '.', 'load', '(', 'f', ')', 'except', 'ValueError', 'as', 'e', ':', 'raise', 'ValueError', '(', "'Given file {} is not a valid JSON file: {}'", '.', 'format', '(', 'file_path', ',', 'e', ')', ')', 'else', ':', 'return', 'json_data'] | Load schema from a JSON file | ['Load', 'schema', 'from', 'a', 'JSON', 'file'] | train | https://github.com/ueg1990/faker-schema/blob/419175eaf34baa43b306dd05c17362763948ec49/faker_schema/schema_loader.py#L4-L12 |
669 | google/grr | grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py | _ExtractHuntIdFromPath | def _ExtractHuntIdFromPath(entry, event):
"""Extracts a Hunt ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*hunt/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/hunts/{}".format(match.group(1)) | python | def _ExtractHuntIdFromPath(entry, event):
"""Extracts a Hunt ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*hunt/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/hunts/{}".format(match.group(1)) | ['def', '_ExtractHuntIdFromPath', '(', 'entry', ',', 'event', ')', ':', 'match', '=', 're', '.', 'match', '(', 'r".*hunt/([^/]+).*"', ',', 'entry', '.', 'http_request_path', ')', 'if', 'match', ':', 'event', '.', 'urn', '=', '"aff4:/hunts/{}"', '.', 'format', '(', 'match', '.', 'group', '(', '1', ')', ')'] | Extracts a Hunt ID from an APIAuditEntry's HTTP request path. | ['Extracts', 'a', 'Hunt', 'ID', 'from', 'an', 'APIAuditEntry', 's', 'HTTP', 'request', 'path', '.'] | train | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py#L82-L86 |
670 | tensorflow/mesh | mesh_tensorflow/ops.py | MeshImpl.slice_begin | def slice_begin(self, tensor_shape, pnum):
"""Begin position for the tensor slice for the given processor.
Args:
tensor_shape: Shape.
pnum: int <= self.size.
Returns:
list of integers with length tensor_shape.ndims.
"""
tensor_layout = self.tensor_layout(tensor_shape)
coordinates = pnum_to_processor_coordinates(self.shape, pnum)
ret = []
for dim_size, mesh_axis in zip(
tensor_shape.to_integer_list, tensor_layout.tensor_axis_to_mesh_axis):
if mesh_axis is None:
ret.append(0)
else:
ret.append(
dim_size // self.shape[mesh_axis].size * coordinates[mesh_axis])
return ret | python | def slice_begin(self, tensor_shape, pnum):
"""Begin position for the tensor slice for the given processor.
Args:
tensor_shape: Shape.
pnum: int <= self.size.
Returns:
list of integers with length tensor_shape.ndims.
"""
tensor_layout = self.tensor_layout(tensor_shape)
coordinates = pnum_to_processor_coordinates(self.shape, pnum)
ret = []
for dim_size, mesh_axis in zip(
tensor_shape.to_integer_list, tensor_layout.tensor_axis_to_mesh_axis):
if mesh_axis is None:
ret.append(0)
else:
ret.append(
dim_size // self.shape[mesh_axis].size * coordinates[mesh_axis])
return ret | ['def', 'slice_begin', '(', 'self', ',', 'tensor_shape', ',', 'pnum', ')', ':', 'tensor_layout', '=', 'self', '.', 'tensor_layout', '(', 'tensor_shape', ')', 'coordinates', '=', 'pnum_to_processor_coordinates', '(', 'self', '.', 'shape', ',', 'pnum', ')', 'ret', '=', '[', ']', 'for', 'dim_size', ',', 'mesh_axis', 'in', 'zip', '(', 'tensor_shape', '.', 'to_integer_list', ',', 'tensor_layout', '.', 'tensor_axis_to_mesh_axis', ')', ':', 'if', 'mesh_axis', 'is', 'None', ':', 'ret', '.', 'append', '(', '0', ')', 'else', ':', 'ret', '.', 'append', '(', 'dim_size', '//', 'self', '.', 'shape', '[', 'mesh_axis', ']', '.', 'size', '*', 'coordinates', '[', 'mesh_axis', ']', ')', 'return', 'ret'] | Begin position for the tensor slice for the given processor.
Args:
tensor_shape: Shape.
pnum: int <= self.size.
Returns:
list of integers with length tensor_shape.ndims. | ['Begin', 'position', 'for', 'the', 'tensor', 'slice', 'for', 'the', 'given', 'processor', '.'] | train | https://github.com/tensorflow/mesh/blob/3921196e5e43302e820da0a87329f25d7e2a3016/mesh_tensorflow/ops.py#L859-L879 |
671 | mottosso/be | be/util.py | ls | def ls(*topic, **kwargs):
"""List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling
"""
context = dump()
root = kwargs.get("root") or context.get("cwd") or os.getcwd()
backend = kwargs.get("backend", os.listdir)
absolute = kwargs.get("absolute", True)
content = {
0: "projects",
1: "inventory",
2: "template"
}[min(2, len(topic))]
# List projects
if content == "projects":
projects = lib.list_projects(root=root, backend=backend)
if absolute:
return map(lambda p: os.path.join(root, p), projects)
else:
return projects
# List items
if content == "inventory":
project = topic[0]
be = _extern.load(project, "be", root=root)
inventory = _extern.load(project, "inventory", root=root)
inventory = lib.invert_inventory(inventory)
templates = _extern.load(project, "templates", root=root)
if absolute:
paths = list()
for item, binding in inventory.iteritems():
template = templates.get(binding)
index = len(topic)
sliced = lib.slice(index, template)
paths.append(sliced.format(*(topic + (item,)), **context))
return paths
else:
return inventory.keys()
# List template
if content == "template":
project = topic[0]
be = _extern.load(project, "be", root=root)
templates = _extern.load(project, "templates", root=root)
inventory = _extern.load(project, "inventory", root=root)
return lib.list_template(root=root,
topics=topic,
templates=templates,
inventory=inventory,
be=be,
absolute=absolute) | python | def ls(*topic, **kwargs):
"""List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling
"""
context = dump()
root = kwargs.get("root") or context.get("cwd") or os.getcwd()
backend = kwargs.get("backend", os.listdir)
absolute = kwargs.get("absolute", True)
content = {
0: "projects",
1: "inventory",
2: "template"
}[min(2, len(topic))]
# List projects
if content == "projects":
projects = lib.list_projects(root=root, backend=backend)
if absolute:
return map(lambda p: os.path.join(root, p), projects)
else:
return projects
# List items
if content == "inventory":
project = topic[0]
be = _extern.load(project, "be", root=root)
inventory = _extern.load(project, "inventory", root=root)
inventory = lib.invert_inventory(inventory)
templates = _extern.load(project, "templates", root=root)
if absolute:
paths = list()
for item, binding in inventory.iteritems():
template = templates.get(binding)
index = len(topic)
sliced = lib.slice(index, template)
paths.append(sliced.format(*(topic + (item,)), **context))
return paths
else:
return inventory.keys()
# List template
if content == "template":
project = topic[0]
be = _extern.load(project, "be", root=root)
templates = _extern.load(project, "templates", root=root)
inventory = _extern.load(project, "inventory", root=root)
return lib.list_template(root=root,
topics=topic,
templates=templates,
inventory=inventory,
be=be,
absolute=absolute) | ['def', 'ls', '(', '*', 'topic', ',', '*', '*', 'kwargs', ')', ':', 'context', '=', 'dump', '(', ')', 'root', '=', 'kwargs', '.', 'get', '(', '"root"', ')', 'or', 'context', '.', 'get', '(', '"cwd"', ')', 'or', 'os', '.', 'getcwd', '(', ')', 'backend', '=', 'kwargs', '.', 'get', '(', '"backend"', ',', 'os', '.', 'listdir', ')', 'absolute', '=', 'kwargs', '.', 'get', '(', '"absolute"', ',', 'True', ')', 'content', '=', '{', '0', ':', '"projects"', ',', '1', ':', '"inventory"', ',', '2', ':', '"template"', '}', '[', 'min', '(', '2', ',', 'len', '(', 'topic', ')', ')', ']', '# List projects', 'if', 'content', '==', '"projects"', ':', 'projects', '=', 'lib', '.', 'list_projects', '(', 'root', '=', 'root', ',', 'backend', '=', 'backend', ')', 'if', 'absolute', ':', 'return', 'map', '(', 'lambda', 'p', ':', 'os', '.', 'path', '.', 'join', '(', 'root', ',', 'p', ')', ',', 'projects', ')', 'else', ':', 'return', 'projects', '# List items', 'if', 'content', '==', '"inventory"', ':', 'project', '=', 'topic', '[', '0', ']', 'be', '=', '_extern', '.', 'load', '(', 'project', ',', '"be"', ',', 'root', '=', 'root', ')', 'inventory', '=', '_extern', '.', 'load', '(', 'project', ',', '"inventory"', ',', 'root', '=', 'root', ')', 'inventory', '=', 'lib', '.', 'invert_inventory', '(', 'inventory', ')', 'templates', '=', '_extern', '.', 'load', '(', 'project', ',', '"templates"', ',', 'root', '=', 'root', ')', 'if', 'absolute', ':', 'paths', '=', 'list', '(', ')', 'for', 'item', ',', 'binding', 'in', 'inventory', '.', 'iteritems', '(', ')', ':', 'template', '=', 'templates', '.', 'get', '(', 'binding', ')', 'index', '=', 'len', '(', 'topic', ')', 'sliced', '=', 'lib', '.', 'slice', '(', 'index', ',', 'template', ')', 'paths', '.', 'append', '(', 'sliced', '.', 'format', '(', '*', '(', 'topic', '+', '(', 'item', ',', ')', ')', ',', '*', '*', 'context', ')', ')', 'return', 'paths', 'else', ':', 'return', 'inventory', '.', 'keys', '(', ')', '# List template', 'if', 'content', '==', '"template"', ':', 'project', '=', 'topic', '[', '0', ']', 'be', '=', '_extern', '.', 'load', '(', 'project', ',', '"be"', ',', 'root', '=', 'root', ')', 'templates', '=', '_extern', '.', 'load', '(', 'project', ',', '"templates"', ',', 'root', '=', 'root', ')', 'inventory', '=', '_extern', '.', 'load', '(', 'project', ',', '"inventory"', ',', 'root', '=', 'root', ')', 'return', 'lib', '.', 'list_template', '(', 'root', '=', 'root', ',', 'topics', '=', 'topic', ',', 'templates', '=', 'templates', ',', 'inventory', '=', 'inventory', ',', 'be', '=', 'be', ',', 'absolute', '=', 'absolute', ')'] | List topic from external datastore
Arguments:
topic (str): One or more topics, e.g. ("project", "item", "task")
root (str, optional): Absolute path to where projects reside,
defaults to os.getcwd()
backend (callable, optional): Function to call with absolute path as
argument to retrieve children. Defaults to os.listdir
absolute (bool, optional): Whether to return relative or absolute paths
Example:
>> ls()
/projects/thedeal
/projects/hulk
>> ls("thedeal")
/projects/thedeal/assets/ben
/projects/thedeal/assets/table
>> ls("thedeal", "ben")
/projects/thedeal/assets/ben/rigging
/projects/thedeal/assets/ben/modeling | ['List', 'topic', 'from', 'external', 'datastore'] | train | https://github.com/mottosso/be/blob/0f3d4f3597c71223f616d78c6d9b2c8dffcd8a71/be/util.py#L7-L82 |
672 | openstack/pyghmi | pyghmi/ipmi/command.py | Command.set_channel_access | def set_channel_access(self, channel=None,
access_update_mode='non_volatile',
alerting=False, per_msg_auth=False,
user_level_auth=False, access_mode='always',
privilege_update_mode='non_volatile',
privilege_level='administrator'):
"""Set channel access
:param channel: number [1:7]
:param access_update_mode:
dont_change = don't set or change Channel Access
non_volatile = set non-volatile Channel Access
volatile = set volatile (active) setting of Channel Access
:param alerting: PEF Alerting Enable/Disable
True = enable PEF Alerting
False = disable PEF Alerting on this channel
(Alert Immediate command can still be used to generate alerts)
:param per_msg_auth: Per-message Authentication
True = enable
False = disable Per-message Authentication. [Authentication required to
activate any session on this channel, but authentication not
used on subsequent packets for the session.]
:param user_level_auth: User Level Authentication Enable/Disable.
True = enable User Level Authentication. All User Level commands are
to be authenticated per the Authentication Type that was
negotiated when the session was activated.
False = disable User Level Authentication. Allow User Level commands to
be executed without being authenticated.
If the option to disable User Level Command authentication is
accepted, the BMC will accept packets with Authentication Type
set to None if they contain user level commands.
For outgoing packets, the BMC returns responses with the same
Authentication Type that was used for the request.
:param access_mode: Access Mode for IPMI messaging
(PEF Alerting is enabled/disabled separately from IPMI messaging)
disabled = disabled for IPMI messaging
pre_boot = pre-boot only channel only available when system is in a
powered down state or in BIOS prior to start of boot.
always = channel always available regardless of system mode.
BIOS typically dedicates the serial connection to the BMC.
shared = same as always available, but BIOS typically leaves the
serial port available for software use.
:param privilege_update_mode: Channel Privilege Level Limit.
This value sets the maximum privilege level
that can be accepted on the specified channel.
dont_change = don't set or change channel Privilege Level Limit
non_volatile = non-volatile Privilege Level Limit according
volatile = volatile setting of Privilege Level Limit
:param privilege_level: Channel Privilege Level Limit
* reserved = unused
* callback
* user
* operator
* administrator
* proprietary = used by OEM
"""
if channel is None:
channel = self.get_network_channel()
data = []
data.append(channel & 0b00001111)
access_update_modes = {
'dont_change': 0,
'non_volatile': 1,
'volatile': 2,
# 'reserved': 3
}
b = 0
b |= (access_update_modes[access_update_mode] << 6) & 0b11000000
if alerting:
b |= 0b00100000
if per_msg_auth:
b |= 0b00010000
if user_level_auth:
b |= 0b00001000
access_modes = {
'disabled': 0,
'pre_boot': 1,
'always': 2,
'shared': 3,
}
b |= access_modes[access_mode] & 0b00000111
data.append(b)
b = 0
privilege_update_modes = {
'dont_change': 0,
'non_volatile': 1,
'volatile': 2,
# 'reserved': 3
}
b |= (privilege_update_modes[privilege_update_mode] << 6) & 0b11000000
privilege_levels = {
'reserved': 0,
'callback': 1,
'user': 2,
'operator': 3,
'administrator': 4,
'proprietary': 5,
# 'no_access': 0x0F,
}
b |= privilege_levels[privilege_level] & 0b00000111
data.append(b)
response = self.raw_command(netfn=0x06, command=0x40, data=data)
if 'error' in response:
raise Exception(response['error'])
return True | python | def set_channel_access(self, channel=None,
access_update_mode='non_volatile',
alerting=False, per_msg_auth=False,
user_level_auth=False, access_mode='always',
privilege_update_mode='non_volatile',
privilege_level='administrator'):
"""Set channel access
:param channel: number [1:7]
:param access_update_mode:
dont_change = don't set or change Channel Access
non_volatile = set non-volatile Channel Access
volatile = set volatile (active) setting of Channel Access
:param alerting: PEF Alerting Enable/Disable
True = enable PEF Alerting
False = disable PEF Alerting on this channel
(Alert Immediate command can still be used to generate alerts)
:param per_msg_auth: Per-message Authentication
True = enable
False = disable Per-message Authentication. [Authentication required to
activate any session on this channel, but authentication not
used on subsequent packets for the session.]
:param user_level_auth: User Level Authentication Enable/Disable.
True = enable User Level Authentication. All User Level commands are
to be authenticated per the Authentication Type that was
negotiated when the session was activated.
False = disable User Level Authentication. Allow User Level commands to
be executed without being authenticated.
If the option to disable User Level Command authentication is
accepted, the BMC will accept packets with Authentication Type
set to None if they contain user level commands.
For outgoing packets, the BMC returns responses with the same
Authentication Type that was used for the request.
:param access_mode: Access Mode for IPMI messaging
(PEF Alerting is enabled/disabled separately from IPMI messaging)
disabled = disabled for IPMI messaging
pre_boot = pre-boot only channel only available when system is in a
powered down state or in BIOS prior to start of boot.
always = channel always available regardless of system mode.
BIOS typically dedicates the serial connection to the BMC.
shared = same as always available, but BIOS typically leaves the
serial port available for software use.
:param privilege_update_mode: Channel Privilege Level Limit.
This value sets the maximum privilege level
that can be accepted on the specified channel.
dont_change = don't set or change channel Privilege Level Limit
non_volatile = non-volatile Privilege Level Limit according
volatile = volatile setting of Privilege Level Limit
:param privilege_level: Channel Privilege Level Limit
* reserved = unused
* callback
* user
* operator
* administrator
* proprietary = used by OEM
"""
if channel is None:
channel = self.get_network_channel()
data = []
data.append(channel & 0b00001111)
access_update_modes = {
'dont_change': 0,
'non_volatile': 1,
'volatile': 2,
# 'reserved': 3
}
b = 0
b |= (access_update_modes[access_update_mode] << 6) & 0b11000000
if alerting:
b |= 0b00100000
if per_msg_auth:
b |= 0b00010000
if user_level_auth:
b |= 0b00001000
access_modes = {
'disabled': 0,
'pre_boot': 1,
'always': 2,
'shared': 3,
}
b |= access_modes[access_mode] & 0b00000111
data.append(b)
b = 0
privilege_update_modes = {
'dont_change': 0,
'non_volatile': 1,
'volatile': 2,
# 'reserved': 3
}
b |= (privilege_update_modes[privilege_update_mode] << 6) & 0b11000000
privilege_levels = {
'reserved': 0,
'callback': 1,
'user': 2,
'operator': 3,
'administrator': 4,
'proprietary': 5,
# 'no_access': 0x0F,
}
b |= privilege_levels[privilege_level] & 0b00000111
data.append(b)
response = self.raw_command(netfn=0x06, command=0x40, data=data)
if 'error' in response:
raise Exception(response['error'])
return True | ['def', 'set_channel_access', '(', 'self', ',', 'channel', '=', 'None', ',', 'access_update_mode', '=', "'non_volatile'", ',', 'alerting', '=', 'False', ',', 'per_msg_auth', '=', 'False', ',', 'user_level_auth', '=', 'False', ',', 'access_mode', '=', "'always'", ',', 'privilege_update_mode', '=', "'non_volatile'", ',', 'privilege_level', '=', "'administrator'", ')', ':', 'if', 'channel', 'is', 'None', ':', 'channel', '=', 'self', '.', 'get_network_channel', '(', ')', 'data', '=', '[', ']', 'data', '.', 'append', '(', 'channel', '&', '0b00001111', ')', 'access_update_modes', '=', '{', "'dont_change'", ':', '0', ',', "'non_volatile'", ':', '1', ',', "'volatile'", ':', '2', ',', "# 'reserved': 3", '}', 'b', '=', '0', 'b', '|=', '(', 'access_update_modes', '[', 'access_update_mode', ']', '<<', '6', ')', '&', '0b11000000', 'if', 'alerting', ':', 'b', '|=', '0b00100000', 'if', 'per_msg_auth', ':', 'b', '|=', '0b00010000', 'if', 'user_level_auth', ':', 'b', '|=', '0b00001000', 'access_modes', '=', '{', "'disabled'", ':', '0', ',', "'pre_boot'", ':', '1', ',', "'always'", ':', '2', ',', "'shared'", ':', '3', ',', '}', 'b', '|=', 'access_modes', '[', 'access_mode', ']', '&', '0b00000111', 'data', '.', 'append', '(', 'b', ')', 'b', '=', '0', 'privilege_update_modes', '=', '{', "'dont_change'", ':', '0', ',', "'non_volatile'", ':', '1', ',', "'volatile'", ':', '2', ',', "# 'reserved': 3", '}', 'b', '|=', '(', 'privilege_update_modes', '[', 'privilege_update_mode', ']', '<<', '6', ')', '&', '0b11000000', 'privilege_levels', '=', '{', "'reserved'", ':', '0', ',', "'callback'", ':', '1', ',', "'user'", ':', '2', ',', "'operator'", ':', '3', ',', "'administrator'", ':', '4', ',', "'proprietary'", ':', '5', ',', "# 'no_access': 0x0F,", '}', 'b', '|=', 'privilege_levels', '[', 'privilege_level', ']', '&', '0b00000111', 'data', '.', 'append', '(', 'b', ')', 'response', '=', 'self', '.', 'raw_command', '(', 'netfn', '=', '0x06', ',', 'command', '=', '0x40', ',', 'data', '=', 'data', ')', 'if', "'error'", 'in', 'response', ':', 'raise', 'Exception', '(', 'response', '[', "'error'", ']', ')', 'return', 'True'] | Set channel access
:param channel: number [1:7]
:param access_update_mode:
dont_change = don't set or change Channel Access
non_volatile = set non-volatile Channel Access
volatile = set volatile (active) setting of Channel Access
:param alerting: PEF Alerting Enable/Disable
True = enable PEF Alerting
False = disable PEF Alerting on this channel
(Alert Immediate command can still be used to generate alerts)
:param per_msg_auth: Per-message Authentication
True = enable
False = disable Per-message Authentication. [Authentication required to
activate any session on this channel, but authentication not
used on subsequent packets for the session.]
:param user_level_auth: User Level Authentication Enable/Disable.
True = enable User Level Authentication. All User Level commands are
to be authenticated per the Authentication Type that was
negotiated when the session was activated.
False = disable User Level Authentication. Allow User Level commands to
be executed without being authenticated.
If the option to disable User Level Command authentication is
accepted, the BMC will accept packets with Authentication Type
set to None if they contain user level commands.
For outgoing packets, the BMC returns responses with the same
Authentication Type that was used for the request.
:param access_mode: Access Mode for IPMI messaging
(PEF Alerting is enabled/disabled separately from IPMI messaging)
disabled = disabled for IPMI messaging
pre_boot = pre-boot only channel only available when system is in a
powered down state or in BIOS prior to start of boot.
always = channel always available regardless of system mode.
BIOS typically dedicates the serial connection to the BMC.
shared = same as always available, but BIOS typically leaves the
serial port available for software use.
:param privilege_update_mode: Channel Privilege Level Limit.
This value sets the maximum privilege level
that can be accepted on the specified channel.
dont_change = don't set or change channel Privilege Level Limit
non_volatile = non-volatile Privilege Level Limit according
volatile = volatile setting of Privilege Level Limit
:param privilege_level: Channel Privilege Level Limit
* reserved = unused
* callback
* user
* operator
* administrator
* proprietary = used by OEM | ['Set', 'channel', 'access'] | train | https://github.com/openstack/pyghmi/blob/f710b1d30a8eed19a9e86f01f9351c737666f3e5/pyghmi/ipmi/command.py#L1281-L1392 |
673 | nugget/python-insteonplm | insteonplm/tools.py | Commander.do_add_device_override | def do_add_device_override(self, args):
"""Add a device override to the IM.
Usage:
add_device_override address cat subcat [firmware]
Arguments:
address: Insteon address of the device to override
cat: Device category
subcat: Device subcategory
firmware: Optional - Device firmware
The device address can be written with our without the dots and in
upper or lower case, for example: 1a2b3c or 1A.2B.3C.
The category, subcategory and firmware numbers are written in hex
format, for example: 0x01 0x1b
Example:
add_device_override 1a2b3c 0x02 0x1a
"""
params = args.split()
addr = None
cat = None
subcat = None
firmware = None
error = None
try:
addr = Address(params[0])
cat = binascii.unhexlify(params[1][2:])
subcat = binascii.unhexlify(params[2][2:])
firmware = binascii.unhexlify(params[3][2:])
except IndexError:
error = 'missing'
except ValueError:
error = 'value'
if addr and cat and subcat:
self.tools.add_device_override(addr, cat, subcat, firmware)
else:
if error == 'missing':
_LOGGING.error('Device address, category and subcategory are '
'required.')
else:
_LOGGING.error('Check the vales for address, category and '
'subcategory.')
self.do_help('add_device_override') | python | def do_add_device_override(self, args):
"""Add a device override to the IM.
Usage:
add_device_override address cat subcat [firmware]
Arguments:
address: Insteon address of the device to override
cat: Device category
subcat: Device subcategory
firmware: Optional - Device firmware
The device address can be written with our without the dots and in
upper or lower case, for example: 1a2b3c or 1A.2B.3C.
The category, subcategory and firmware numbers are written in hex
format, for example: 0x01 0x1b
Example:
add_device_override 1a2b3c 0x02 0x1a
"""
params = args.split()
addr = None
cat = None
subcat = None
firmware = None
error = None
try:
addr = Address(params[0])
cat = binascii.unhexlify(params[1][2:])
subcat = binascii.unhexlify(params[2][2:])
firmware = binascii.unhexlify(params[3][2:])
except IndexError:
error = 'missing'
except ValueError:
error = 'value'
if addr and cat and subcat:
self.tools.add_device_override(addr, cat, subcat, firmware)
else:
if error == 'missing':
_LOGGING.error('Device address, category and subcategory are '
'required.')
else:
_LOGGING.error('Check the vales for address, category and '
'subcategory.')
self.do_help('add_device_override') | ['def', 'do_add_device_override', '(', 'self', ',', 'args', ')', ':', 'params', '=', 'args', '.', 'split', '(', ')', 'addr', '=', 'None', 'cat', '=', 'None', 'subcat', '=', 'None', 'firmware', '=', 'None', 'error', '=', 'None', 'try', ':', 'addr', '=', 'Address', '(', 'params', '[', '0', ']', ')', 'cat', '=', 'binascii', '.', 'unhexlify', '(', 'params', '[', '1', ']', '[', '2', ':', ']', ')', 'subcat', '=', 'binascii', '.', 'unhexlify', '(', 'params', '[', '2', ']', '[', '2', ':', ']', ')', 'firmware', '=', 'binascii', '.', 'unhexlify', '(', 'params', '[', '3', ']', '[', '2', ':', ']', ')', 'except', 'IndexError', ':', 'error', '=', "'missing'", 'except', 'ValueError', ':', 'error', '=', "'value'", 'if', 'addr', 'and', 'cat', 'and', 'subcat', ':', 'self', '.', 'tools', '.', 'add_device_override', '(', 'addr', ',', 'cat', ',', 'subcat', ',', 'firmware', ')', 'else', ':', 'if', 'error', '==', "'missing'", ':', '_LOGGING', '.', 'error', '(', "'Device address, category and subcategory are '", "'required.'", ')', 'else', ':', '_LOGGING', '.', 'error', '(', "'Check the vales for address, category and '", "'subcategory.'", ')', 'self', '.', 'do_help', '(', "'add_device_override'", ')'] | Add a device override to the IM.
Usage:
add_device_override address cat subcat [firmware]
Arguments:
address: Insteon address of the device to override
cat: Device category
subcat: Device subcategory
firmware: Optional - Device firmware
The device address can be written with our without the dots and in
upper or lower case, for example: 1a2b3c or 1A.2B.3C.
The category, subcategory and firmware numbers are written in hex
format, for example: 0x01 0x1b
Example:
add_device_override 1a2b3c 0x02 0x1a | ['Add', 'a', 'device', 'override', 'to', 'the', 'IM', '.'] | train | https://github.com/nugget/python-insteonplm/blob/65548041f1b0729ae1ae904443dd81b0c6cbf1bf/insteonplm/tools.py#L945-L992 |
674 | saltstack/salt | salt/spm/__init__.py | SPMClient._traverse_repos | def _traverse_repos(self, callback, repo_name=None):
'''
Traverse through all repo files and apply the functionality provided in
the callback to them
'''
repo_files = []
if os.path.exists(self.opts['spm_repos_config']):
repo_files.append(self.opts['spm_repos_config'])
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk('{0}.d'.format(self.opts['spm_repos_config'])):
for repo_file in filenames:
if not repo_file.endswith('.repo'):
continue
repo_files.append(repo_file)
for repo_file in repo_files:
repo_path = '{0}.d/{1}'.format(self.opts['spm_repos_config'], repo_file)
with salt.utils.files.fopen(repo_path) as rph:
repo_data = salt.utils.yaml.safe_load(rph)
for repo in repo_data:
if repo_data[repo].get('enabled', True) is False:
continue
if repo_name is not None and repo != repo_name:
continue
callback(repo, repo_data[repo]) | python | def _traverse_repos(self, callback, repo_name=None):
'''
Traverse through all repo files and apply the functionality provided in
the callback to them
'''
repo_files = []
if os.path.exists(self.opts['spm_repos_config']):
repo_files.append(self.opts['spm_repos_config'])
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk('{0}.d'.format(self.opts['spm_repos_config'])):
for repo_file in filenames:
if not repo_file.endswith('.repo'):
continue
repo_files.append(repo_file)
for repo_file in repo_files:
repo_path = '{0}.d/{1}'.format(self.opts['spm_repos_config'], repo_file)
with salt.utils.files.fopen(repo_path) as rph:
repo_data = salt.utils.yaml.safe_load(rph)
for repo in repo_data:
if repo_data[repo].get('enabled', True) is False:
continue
if repo_name is not None and repo != repo_name:
continue
callback(repo, repo_data[repo]) | ['def', '_traverse_repos', '(', 'self', ',', 'callback', ',', 'repo_name', '=', 'None', ')', ':', 'repo_files', '=', '[', ']', 'if', 'os', '.', 'path', '.', 'exists', '(', 'self', '.', 'opts', '[', "'spm_repos_config'", ']', ')', ':', 'repo_files', '.', 'append', '(', 'self', '.', 'opts', '[', "'spm_repos_config'", ']', ')', 'for', '(', 'dirpath', ',', 'dirnames', ',', 'filenames', ')', 'in', 'salt', '.', 'utils', '.', 'path', '.', 'os_walk', '(', "'{0}.d'", '.', 'format', '(', 'self', '.', 'opts', '[', "'spm_repos_config'", ']', ')', ')', ':', 'for', 'repo_file', 'in', 'filenames', ':', 'if', 'not', 'repo_file', '.', 'endswith', '(', "'.repo'", ')', ':', 'continue', 'repo_files', '.', 'append', '(', 'repo_file', ')', 'for', 'repo_file', 'in', 'repo_files', ':', 'repo_path', '=', "'{0}.d/{1}'", '.', 'format', '(', 'self', '.', 'opts', '[', "'spm_repos_config'", ']', ',', 'repo_file', ')', 'with', 'salt', '.', 'utils', '.', 'files', '.', 'fopen', '(', 'repo_path', ')', 'as', 'rph', ':', 'repo_data', '=', 'salt', '.', 'utils', '.', 'yaml', '.', 'safe_load', '(', 'rph', ')', 'for', 'repo', 'in', 'repo_data', ':', 'if', 'repo_data', '[', 'repo', ']', '.', 'get', '(', "'enabled'", ',', 'True', ')', 'is', 'False', ':', 'continue', 'if', 'repo_name', 'is', 'not', 'None', 'and', 'repo', '!=', 'repo_name', ':', 'continue', 'callback', '(', 'repo', ',', 'repo_data', '[', 'repo', ']', ')'] | Traverse through all repo files and apply the functionality provided in
the callback to them | ['Traverse', 'through', 'all', 'repo', 'files', 'and', 'apply', 'the', 'functionality', 'provided', 'in', 'the', 'callback', 'to', 'them'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/spm/__init__.py#L620-L644 |
675 | striglia/pyramid_swagger | pyramid_swagger/tween.py | get_swagger_objects | def get_swagger_objects(settings, route_info, registry):
"""Returns appropriate swagger handler and swagger spec schema.
Swagger Handler contains callables that isolate implementation differences
in the tween to handle both Swagger 1.2 and Swagger 2.0.
Exception is made when `settings.prefer_20_routes` are non-empty and
['1.2', '2.0'] both are present in available swagger versions. In this
special scenario, '2.0' spec is chosen only for requests which are listed
in the `prefer_20_routes`. This helps in incremental migration of
routes from v1.2 to v2.0 by making moving to v2.0 opt-in.
:rtype: (:class:`SwaggerHandler`,
:class:`pyramid_swagger.model.SwaggerSchema` OR
:class:`bravado_core.spec.Spec`)
"""
enabled_swagger_versions = get_swagger_versions(registry.settings)
schema12 = registry.settings['pyramid_swagger.schema12']
schema20 = registry.settings['pyramid_swagger.schema20']
fallback_to_swagger12_route = (
SWAGGER_20 in enabled_swagger_versions and
SWAGGER_12 in enabled_swagger_versions and
settings.prefer_20_routes and
route_info.get('route') and
route_info['route'].name not in settings.prefer_20_routes
)
if fallback_to_swagger12_route:
return settings.swagger12_handler, schema12
if SWAGGER_20 in enabled_swagger_versions:
return settings.swagger20_handler, schema20
if SWAGGER_12 in enabled_swagger_versions:
return settings.swagger12_handler, schema12 | python | def get_swagger_objects(settings, route_info, registry):
"""Returns appropriate swagger handler and swagger spec schema.
Swagger Handler contains callables that isolate implementation differences
in the tween to handle both Swagger 1.2 and Swagger 2.0.
Exception is made when `settings.prefer_20_routes` are non-empty and
['1.2', '2.0'] both are present in available swagger versions. In this
special scenario, '2.0' spec is chosen only for requests which are listed
in the `prefer_20_routes`. This helps in incremental migration of
routes from v1.2 to v2.0 by making moving to v2.0 opt-in.
:rtype: (:class:`SwaggerHandler`,
:class:`pyramid_swagger.model.SwaggerSchema` OR
:class:`bravado_core.spec.Spec`)
"""
enabled_swagger_versions = get_swagger_versions(registry.settings)
schema12 = registry.settings['pyramid_swagger.schema12']
schema20 = registry.settings['pyramid_swagger.schema20']
fallback_to_swagger12_route = (
SWAGGER_20 in enabled_swagger_versions and
SWAGGER_12 in enabled_swagger_versions and
settings.prefer_20_routes and
route_info.get('route') and
route_info['route'].name not in settings.prefer_20_routes
)
if fallback_to_swagger12_route:
return settings.swagger12_handler, schema12
if SWAGGER_20 in enabled_swagger_versions:
return settings.swagger20_handler, schema20
if SWAGGER_12 in enabled_swagger_versions:
return settings.swagger12_handler, schema12 | ['def', 'get_swagger_objects', '(', 'settings', ',', 'route_info', ',', 'registry', ')', ':', 'enabled_swagger_versions', '=', 'get_swagger_versions', '(', 'registry', '.', 'settings', ')', 'schema12', '=', 'registry', '.', 'settings', '[', "'pyramid_swagger.schema12'", ']', 'schema20', '=', 'registry', '.', 'settings', '[', "'pyramid_swagger.schema20'", ']', 'fallback_to_swagger12_route', '=', '(', 'SWAGGER_20', 'in', 'enabled_swagger_versions', 'and', 'SWAGGER_12', 'in', 'enabled_swagger_versions', 'and', 'settings', '.', 'prefer_20_routes', 'and', 'route_info', '.', 'get', '(', "'route'", ')', 'and', 'route_info', '[', "'route'", ']', '.', 'name', 'not', 'in', 'settings', '.', 'prefer_20_routes', ')', 'if', 'fallback_to_swagger12_route', ':', 'return', 'settings', '.', 'swagger12_handler', ',', 'schema12', 'if', 'SWAGGER_20', 'in', 'enabled_swagger_versions', ':', 'return', 'settings', '.', 'swagger20_handler', ',', 'schema20', 'if', 'SWAGGER_12', 'in', 'enabled_swagger_versions', ':', 'return', 'settings', '.', 'swagger12_handler', ',', 'schema12'] | Returns appropriate swagger handler and swagger spec schema.
Swagger Handler contains callables that isolate implementation differences
in the tween to handle both Swagger 1.2 and Swagger 2.0.
Exception is made when `settings.prefer_20_routes` are non-empty and
['1.2', '2.0'] both are present in available swagger versions. In this
special scenario, '2.0' spec is chosen only for requests which are listed
in the `prefer_20_routes`. This helps in incremental migration of
routes from v1.2 to v2.0 by making moving to v2.0 opt-in.
:rtype: (:class:`SwaggerHandler`,
:class:`pyramid_swagger.model.SwaggerSchema` OR
:class:`bravado_core.spec.Spec`) | ['Returns', 'appropriate', 'swagger', 'handler', 'and', 'swagger', 'spec', 'schema', '.'] | train | https://github.com/striglia/pyramid_swagger/blob/1dbc0b4f23e2e5f4ed575c116f3f7d0e83e30d45/pyramid_swagger/tween.py#L107-L141 |
676 | yero13/na3x | na3x/transformation/transformer.py | Transformation.perform | def perform(self, cfg):
"""
Performs transformation according to configuration
:param cfg: transformation configuration
"""
self.__src = self._load(cfg[Transformation.__CFG_KEY_LOAD])
self.__transform(cfg[Transformation.__CFG_KEY_TRANSFORM])
self.__cleanup(cfg[Transformation.__CFG_KEY_CLEANUP])
self.__save(cfg[Transformation.__CFG_KEY_SAVE]) | python | def perform(self, cfg):
"""
Performs transformation according to configuration
:param cfg: transformation configuration
"""
self.__src = self._load(cfg[Transformation.__CFG_KEY_LOAD])
self.__transform(cfg[Transformation.__CFG_KEY_TRANSFORM])
self.__cleanup(cfg[Transformation.__CFG_KEY_CLEANUP])
self.__save(cfg[Transformation.__CFG_KEY_SAVE]) | ['def', 'perform', '(', 'self', ',', 'cfg', ')', ':', 'self', '.', '__src', '=', 'self', '.', '_load', '(', 'cfg', '[', 'Transformation', '.', '__CFG_KEY_LOAD', ']', ')', 'self', '.', '__transform', '(', 'cfg', '[', 'Transformation', '.', '__CFG_KEY_TRANSFORM', ']', ')', 'self', '.', '__cleanup', '(', 'cfg', '[', 'Transformation', '.', '__CFG_KEY_CLEANUP', ']', ')', 'self', '.', '__save', '(', 'cfg', '[', 'Transformation', '.', '__CFG_KEY_SAVE', ']', ')'] | Performs transformation according to configuration
:param cfg: transformation configuration | ['Performs', 'transformation', 'according', 'to', 'configuration', ':', 'param', 'cfg', ':', 'transformation', 'configuration'] | train | https://github.com/yero13/na3x/blob/b31ef801ea574081125020a7d0f9c4242f8f8b02/na3x/transformation/transformer.py#L127-L135 |
677 | dhylands/rshell | rshell/main.py | extra_info | def extra_info(port):
"""Collects the serial nunber and manufacturer into a string, if
the fields are available."""
extra_items = []
if port.manufacturer:
extra_items.append("vendor '{}'".format(port.manufacturer))
if port.serial_number:
extra_items.append("serial '{}'".format(port.serial_number))
if port.interface:
extra_items.append("intf '{}'".format(port.interface))
if extra_items:
return ' with ' + ' '.join(extra_items)
return '' | python | def extra_info(port):
"""Collects the serial nunber and manufacturer into a string, if
the fields are available."""
extra_items = []
if port.manufacturer:
extra_items.append("vendor '{}'".format(port.manufacturer))
if port.serial_number:
extra_items.append("serial '{}'".format(port.serial_number))
if port.interface:
extra_items.append("intf '{}'".format(port.interface))
if extra_items:
return ' with ' + ' '.join(extra_items)
return '' | ['def', 'extra_info', '(', 'port', ')', ':', 'extra_items', '=', '[', ']', 'if', 'port', '.', 'manufacturer', ':', 'extra_items', '.', 'append', '(', '"vendor \'{}\'"', '.', 'format', '(', 'port', '.', 'manufacturer', ')', ')', 'if', 'port', '.', 'serial_number', ':', 'extra_items', '.', 'append', '(', '"serial \'{}\'"', '.', 'format', '(', 'port', '.', 'serial_number', ')', ')', 'if', 'port', '.', 'interface', ':', 'extra_items', '.', 'append', '(', '"intf \'{}\'"', '.', 'format', '(', 'port', '.', 'interface', ')', ')', 'if', 'extra_items', ':', 'return', "' with '", '+', "' '", '.', 'join', '(', 'extra_items', ')', 'return', "''"] | Collects the serial nunber and manufacturer into a string, if
the fields are available. | ['Collects', 'the', 'serial', 'nunber', 'and', 'manufacturer', 'into', 'a', 'string', 'if', 'the', 'fields', 'are', 'available', '.'] | train | https://github.com/dhylands/rshell/blob/a92a8fa8074ac792241c83c640a51b394667c324/rshell/main.py#L320-L332 |
678 | mattmakai/underwear | underwear/run_underwear.py | colorize | def colorize(lead, num, color):
""" Print 'lead' = 'num' in 'color' """
if num != 0 and ANSIBLE_COLOR and color is not None:
return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
else:
return "%s=%-4s" % (lead, str(num)) | python | def colorize(lead, num, color):
""" Print 'lead' = 'num' in 'color' """
if num != 0 and ANSIBLE_COLOR and color is not None:
return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
else:
return "%s=%-4s" % (lead, str(num)) | ['def', 'colorize', '(', 'lead', ',', 'num', ',', 'color', ')', ':', 'if', 'num', '!=', '0', 'and', 'ANSIBLE_COLOR', 'and', 'color', 'is', 'not', 'None', ':', 'return', '"%s%s%-15s"', '%', '(', 'stringc', '(', 'lead', ',', 'color', ')', ',', 'stringc', '(', '"="', ',', 'color', ')', ',', 'stringc', '(', 'str', '(', 'num', ')', ',', 'color', ')', ')', 'else', ':', 'return', '"%s=%-4s"', '%', '(', 'lead', ',', 'str', '(', 'num', ')', ')'] | Print 'lead' = 'num' in 'color' | ['Print', 'lead', '=', 'num', 'in', 'color'] | train | https://github.com/mattmakai/underwear/blob/7c484c7937d2df86dc569d411249ba366ed43ead/underwear/run_underwear.py#L24-L29 |
679 | bartromgens/geojsoncontour | geojsoncontour/utilities/multipoly.py | angle | def angle(v1, v2):
"""Return the angle in radians between vectors 'v1' and 'v2'."""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) | python | def angle(v1, v2):
"""Return the angle in radians between vectors 'v1' and 'v2'."""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) | ['def', 'angle', '(', 'v1', ',', 'v2', ')', ':', 'v1_u', '=', 'unit_vector', '(', 'v1', ')', 'v2_u', '=', 'unit_vector', '(', 'v2', ')', 'return', 'np', '.', 'arccos', '(', 'np', '.', 'clip', '(', 'np', '.', 'dot', '(', 'v1_u', ',', 'v2_u', ')', ',', '-', '1.0', ',', '1.0', ')', ')'] | Return the angle in radians between vectors 'v1' and 'v2'. | ['Return', 'the', 'angle', 'in', 'radians', 'between', 'vectors', 'v1', 'and', 'v2', '.'] | train | https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/utilities/multipoly.py#L39-L43 |
680 | mikedh/trimesh | trimesh/triangles.py | points_to_barycentric | def points_to_barycentric(triangles,
points,
method='cramer'):
"""
Find the barycentric coordinates of points relative to triangles.
The Cramer's rule solution implements:
http://blackpawn.com/texts/pointinpoly
The cross product solution implements:
https://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf
Parameters
-----------
triangles : (n, 3, 3) float
Triangles vertices in space
points : (n, 3) float
Point in space associated with a triangle
method : str
Which method to compute the barycentric coordinates with:
- 'cross': uses a method using cross products, roughly 2x slower but
different numerical robustness properties
- anything else: uses a cramer's rule solution
Returns
-----------
barycentric : (n, 3) float
Barycentric coordinates of each point
"""
def method_cross():
n = np.cross(edge_vectors[:, 0], edge_vectors[:, 1])
denominator = util.diagonal_dot(n, n)
barycentric = np.zeros((len(triangles), 3), dtype=np.float64)
barycentric[:, 2] = util.diagonal_dot(
np.cross(edge_vectors[:, 0], w), n) / denominator
barycentric[:, 1] = util.diagonal_dot(
np.cross(w, edge_vectors[:, 1]), n) / denominator
barycentric[:, 0] = 1 - barycentric[:, 1] - barycentric[:, 2]
return barycentric
def method_cramer():
dot00 = util.diagonal_dot(edge_vectors[:, 0], edge_vectors[:, 0])
dot01 = util.diagonal_dot(edge_vectors[:, 0], edge_vectors[:, 1])
dot02 = util.diagonal_dot(edge_vectors[:, 0], w)
dot11 = util.diagonal_dot(edge_vectors[:, 1], edge_vectors[:, 1])
dot12 = util.diagonal_dot(edge_vectors[:, 1], w)
inverse_denominator = 1.0 / (dot00 * dot11 - dot01 * dot01)
barycentric = np.zeros((len(triangles), 3), dtype=np.float64)
barycentric[:, 2] = (dot00 * dot12 - dot01 *
dot02) * inverse_denominator
barycentric[:, 1] = (dot11 * dot02 - dot01 *
dot12) * inverse_denominator
barycentric[:, 0] = 1 - barycentric[:, 1] - barycentric[:, 2]
return barycentric
# establish that input triangles and points are sane
triangles = np.asanyarray(triangles, dtype=np.float64)
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(triangles, (-1, 3, 3)):
raise ValueError('triangles shape incorrect')
if not util.is_shape(points, (len(triangles), 3)):
raise ValueError('triangles and points must correspond')
edge_vectors = triangles[:, 1:] - triangles[:, :1]
w = points - triangles[:, 0].reshape((-1, 3))
if method == 'cross':
return method_cross()
return method_cramer() | python | def points_to_barycentric(triangles,
points,
method='cramer'):
"""
Find the barycentric coordinates of points relative to triangles.
The Cramer's rule solution implements:
http://blackpawn.com/texts/pointinpoly
The cross product solution implements:
https://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf
Parameters
-----------
triangles : (n, 3, 3) float
Triangles vertices in space
points : (n, 3) float
Point in space associated with a triangle
method : str
Which method to compute the barycentric coordinates with:
- 'cross': uses a method using cross products, roughly 2x slower but
different numerical robustness properties
- anything else: uses a cramer's rule solution
Returns
-----------
barycentric : (n, 3) float
Barycentric coordinates of each point
"""
def method_cross():
n = np.cross(edge_vectors[:, 0], edge_vectors[:, 1])
denominator = util.diagonal_dot(n, n)
barycentric = np.zeros((len(triangles), 3), dtype=np.float64)
barycentric[:, 2] = util.diagonal_dot(
np.cross(edge_vectors[:, 0], w), n) / denominator
barycentric[:, 1] = util.diagonal_dot(
np.cross(w, edge_vectors[:, 1]), n) / denominator
barycentric[:, 0] = 1 - barycentric[:, 1] - barycentric[:, 2]
return barycentric
def method_cramer():
dot00 = util.diagonal_dot(edge_vectors[:, 0], edge_vectors[:, 0])
dot01 = util.diagonal_dot(edge_vectors[:, 0], edge_vectors[:, 1])
dot02 = util.diagonal_dot(edge_vectors[:, 0], w)
dot11 = util.diagonal_dot(edge_vectors[:, 1], edge_vectors[:, 1])
dot12 = util.diagonal_dot(edge_vectors[:, 1], w)
inverse_denominator = 1.0 / (dot00 * dot11 - dot01 * dot01)
barycentric = np.zeros((len(triangles), 3), dtype=np.float64)
barycentric[:, 2] = (dot00 * dot12 - dot01 *
dot02) * inverse_denominator
barycentric[:, 1] = (dot11 * dot02 - dot01 *
dot12) * inverse_denominator
barycentric[:, 0] = 1 - barycentric[:, 1] - barycentric[:, 2]
return barycentric
# establish that input triangles and points are sane
triangles = np.asanyarray(triangles, dtype=np.float64)
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(triangles, (-1, 3, 3)):
raise ValueError('triangles shape incorrect')
if not util.is_shape(points, (len(triangles), 3)):
raise ValueError('triangles and points must correspond')
edge_vectors = triangles[:, 1:] - triangles[:, :1]
w = points - triangles[:, 0].reshape((-1, 3))
if method == 'cross':
return method_cross()
return method_cramer() | ['def', 'points_to_barycentric', '(', 'triangles', ',', 'points', ',', 'method', '=', "'cramer'", ')', ':', 'def', 'method_cross', '(', ')', ':', 'n', '=', 'np', '.', 'cross', '(', 'edge_vectors', '[', ':', ',', '0', ']', ',', 'edge_vectors', '[', ':', ',', '1', ']', ')', 'denominator', '=', 'util', '.', 'diagonal_dot', '(', 'n', ',', 'n', ')', 'barycentric', '=', 'np', '.', 'zeros', '(', '(', 'len', '(', 'triangles', ')', ',', '3', ')', ',', 'dtype', '=', 'np', '.', 'float64', ')', 'barycentric', '[', ':', ',', '2', ']', '=', 'util', '.', 'diagonal_dot', '(', 'np', '.', 'cross', '(', 'edge_vectors', '[', ':', ',', '0', ']', ',', 'w', ')', ',', 'n', ')', '/', 'denominator', 'barycentric', '[', ':', ',', '1', ']', '=', 'util', '.', 'diagonal_dot', '(', 'np', '.', 'cross', '(', 'w', ',', 'edge_vectors', '[', ':', ',', '1', ']', ')', ',', 'n', ')', '/', 'denominator', 'barycentric', '[', ':', ',', '0', ']', '=', '1', '-', 'barycentric', '[', ':', ',', '1', ']', '-', 'barycentric', '[', ':', ',', '2', ']', 'return', 'barycentric', 'def', 'method_cramer', '(', ')', ':', 'dot00', '=', 'util', '.', 'diagonal_dot', '(', 'edge_vectors', '[', ':', ',', '0', ']', ',', 'edge_vectors', '[', ':', ',', '0', ']', ')', 'dot01', '=', 'util', '.', 'diagonal_dot', '(', 'edge_vectors', '[', ':', ',', '0', ']', ',', 'edge_vectors', '[', ':', ',', '1', ']', ')', 'dot02', '=', 'util', '.', 'diagonal_dot', '(', 'edge_vectors', '[', ':', ',', '0', ']', ',', 'w', ')', 'dot11', '=', 'util', '.', 'diagonal_dot', '(', 'edge_vectors', '[', ':', ',', '1', ']', ',', 'edge_vectors', '[', ':', ',', '1', ']', ')', 'dot12', '=', 'util', '.', 'diagonal_dot', '(', 'edge_vectors', '[', ':', ',', '1', ']', ',', 'w', ')', 'inverse_denominator', '=', '1.0', '/', '(', 'dot00', '*', 'dot11', '-', 'dot01', '*', 'dot01', ')', 'barycentric', '=', 'np', '.', 'zeros', '(', '(', 'len', '(', 'triangles', ')', ',', '3', ')', ',', 'dtype', '=', 'np', '.', 'float64', ')', 'barycentric', '[', ':', ',', '2', ']', '=', '(', 'dot00', '*', 'dot12', '-', 'dot01', '*', 'dot02', ')', '*', 'inverse_denominator', 'barycentric', '[', ':', ',', '1', ']', '=', '(', 'dot11', '*', 'dot02', '-', 'dot01', '*', 'dot12', ')', '*', 'inverse_denominator', 'barycentric', '[', ':', ',', '0', ']', '=', '1', '-', 'barycentric', '[', ':', ',', '1', ']', '-', 'barycentric', '[', ':', ',', '2', ']', 'return', 'barycentric', '# establish that input triangles and points are sane', 'triangles', '=', 'np', '.', 'asanyarray', '(', 'triangles', ',', 'dtype', '=', 'np', '.', 'float64', ')', 'points', '=', 'np', '.', 'asanyarray', '(', 'points', ',', 'dtype', '=', 'np', '.', 'float64', ')', 'if', 'not', 'util', '.', 'is_shape', '(', 'triangles', ',', '(', '-', '1', ',', '3', ',', '3', ')', ')', ':', 'raise', 'ValueError', '(', "'triangles shape incorrect'", ')', 'if', 'not', 'util', '.', 'is_shape', '(', 'points', ',', '(', 'len', '(', 'triangles', ')', ',', '3', ')', ')', ':', 'raise', 'ValueError', '(', "'triangles and points must correspond'", ')', 'edge_vectors', '=', 'triangles', '[', ':', ',', '1', ':', ']', '-', 'triangles', '[', ':', ',', ':', '1', ']', 'w', '=', 'points', '-', 'triangles', '[', ':', ',', '0', ']', '.', 'reshape', '(', '(', '-', '1', ',', '3', ')', ')', 'if', 'method', '==', "'cross'", ':', 'return', 'method_cross', '(', ')', 'return', 'method_cramer', '(', ')'] | Find the barycentric coordinates of points relative to triangles.
The Cramer's rule solution implements:
http://blackpawn.com/texts/pointinpoly
The cross product solution implements:
https://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf
Parameters
-----------
triangles : (n, 3, 3) float
Triangles vertices in space
points : (n, 3) float
Point in space associated with a triangle
method : str
Which method to compute the barycentric coordinates with:
- 'cross': uses a method using cross products, roughly 2x slower but
different numerical robustness properties
- anything else: uses a cramer's rule solution
Returns
-----------
barycentric : (n, 3) float
Barycentric coordinates of each point | ['Find', 'the', 'barycentric', 'coordinates', 'of', 'points', 'relative', 'to', 'triangles', '.'] | train | https://github.com/mikedh/trimesh/blob/25e059bf6d4caa74f62ffd58ce4f61a90ee4e518/trimesh/triangles.py#L452-L525 |
681 | andreikop/qutepart | qutepart/__init__.py | Qutepart.terminate | def terminate(self):
""" Terminate Qutepart instance.
This method MUST be called before application stop to avoid crashes and
some other interesting effects
Call it on close to free memory and stop background highlighting
"""
self.text = ''
self._completer.terminate()
if self._highlighter is not None:
self._highlighter.terminate()
if self._vim is not None:
self._vim.terminate() | python | def terminate(self):
""" Terminate Qutepart instance.
This method MUST be called before application stop to avoid crashes and
some other interesting effects
Call it on close to free memory and stop background highlighting
"""
self.text = ''
self._completer.terminate()
if self._highlighter is not None:
self._highlighter.terminate()
if self._vim is not None:
self._vim.terminate() | ['def', 'terminate', '(', 'self', ')', ':', 'self', '.', 'text', '=', "''", 'self', '.', '_completer', '.', 'terminate', '(', ')', 'if', 'self', '.', '_highlighter', 'is', 'not', 'None', ':', 'self', '.', '_highlighter', '.', 'terminate', '(', ')', 'if', 'self', '.', '_vim', 'is', 'not', 'None', ':', 'self', '.', '_vim', '.', 'terminate', '(', ')'] | Terminate Qutepart instance.
This method MUST be called before application stop to avoid crashes and
some other interesting effects
Call it on close to free memory and stop background highlighting | ['Terminate', 'Qutepart', 'instance', '.', 'This', 'method', 'MUST', 'be', 'called', 'before', 'application', 'stop', 'to', 'avoid', 'crashes', 'and', 'some', 'other', 'interesting', 'effects', 'Call', 'it', 'on', 'close', 'to', 'free', 'memory', 'and', 'stop', 'background', 'highlighting'] | train | https://github.com/andreikop/qutepart/blob/109d76b239751318bcef06f39b2fbbf18687a40b/qutepart/__init__.py#L335-L348 |
682 | Yipit/ejson | ejson/__init__.py | dumps | def dumps(data, escape=False, **kwargs):
"""A wrapper around `json.dumps` that can handle objects that json
module is not aware.
This function is aware of a list of custom serializers that can be
registered by the API user, making it possible to convert any kind
of object to types that the json library can handle.
"""
if 'sort_keys' not in kwargs:
kwargs['sort_keys'] = True
converted = json.dumps(data, default=_converter, **kwargs)
if escape:
# We're escaping the whole dumped string here cause there's no (easy)
# way to hook into the native json library and change how they process
# values like strings, None objects and some other "literal" stuff.
#
# Also, we're not escaping quotes here cause they're escaped by the
# native json library already. So, we just escape basic html entities,
# like <, > and &;
return cgi.escape(converted)
return converted | python | def dumps(data, escape=False, **kwargs):
"""A wrapper around `json.dumps` that can handle objects that json
module is not aware.
This function is aware of a list of custom serializers that can be
registered by the API user, making it possible to convert any kind
of object to types that the json library can handle.
"""
if 'sort_keys' not in kwargs:
kwargs['sort_keys'] = True
converted = json.dumps(data, default=_converter, **kwargs)
if escape:
# We're escaping the whole dumped string here cause there's no (easy)
# way to hook into the native json library and change how they process
# values like strings, None objects and some other "literal" stuff.
#
# Also, we're not escaping quotes here cause they're escaped by the
# native json library already. So, we just escape basic html entities,
# like <, > and &;
return cgi.escape(converted)
return converted | ['def', 'dumps', '(', 'data', ',', 'escape', '=', 'False', ',', '*', '*', 'kwargs', ')', ':', 'if', "'sort_keys'", 'not', 'in', 'kwargs', ':', 'kwargs', '[', "'sort_keys'", ']', '=', 'True', 'converted', '=', 'json', '.', 'dumps', '(', 'data', ',', 'default', '=', '_converter', ',', '*', '*', 'kwargs', ')', 'if', 'escape', ':', "# We're escaping the whole dumped string here cause there's no (easy)", '# way to hook into the native json library and change how they process', '# values like strings, None objects and some other "literal" stuff.', '#', "# Also, we're not escaping quotes here cause they're escaped by the", '# native json library already. So, we just escape basic html entities,', '# like <, > and &;', 'return', 'cgi', '.', 'escape', '(', 'converted', ')', 'return', 'converted'] | A wrapper around `json.dumps` that can handle objects that json
module is not aware.
This function is aware of a list of custom serializers that can be
registered by the API user, making it possible to convert any kind
of object to types that the json library can handle. | ['A', 'wrapper', 'around', 'json', '.', 'dumps', 'that', 'can', 'handle', 'objects', 'that', 'json', 'module', 'is', 'not', 'aware', '.'] | train | https://github.com/Yipit/ejson/blob/6665703f1534923d1c30849e08339f0ff97d8230/ejson/__init__.py#L49-L70 |
683 | rackerlabs/rackspace-python-neutronclient | neutronclient/v2_0/client.py | Client.list_ext | def list_ext(self, collection, path, retrieve_all, **_params):
"""Client extension hook for list."""
return self.list(collection, path, retrieve_all, **_params) | python | def list_ext(self, collection, path, retrieve_all, **_params):
"""Client extension hook for list."""
return self.list(collection, path, retrieve_all, **_params) | ['def', 'list_ext', '(', 'self', ',', 'collection', ',', 'path', ',', 'retrieve_all', ',', '*', '*', '_params', ')', ':', 'return', 'self', '.', 'list', '(', 'collection', ',', 'path', ',', 'retrieve_all', ',', '*', '*', '_params', ')'] | Client extension hook for list. | ['Client', 'extension', 'hook', 'for', 'list', '.'] | train | https://github.com/rackerlabs/rackspace-python-neutronclient/blob/5a5009a8fe078e3aa1d582176669f1b28ab26bef/neutronclient/v2_0/client.py#L571-L573 |
684 | materialsproject/pymatgen | pymatgen/io/abinit/qadapters.py | MpiRunner.string_to_run | def string_to_run(self, qad, executable, stdin=None, stdout=None, stderr=None, exec_args=None):
"""
Build and return a string with the command required to launch `executable` with the qadapter `qad`.
Args
qad: Qadapter instance.
executable (str): Executable name or path
stdin (str): Name of the file to be used as standard input. None means no redirection.
stdout (str): Name of the file to be used as standard output. None means no redirection.
stderr (str): Name of the file to be used as standard error. None means no redirection.
exec_args: Optional list of strings with options passed to `executable`.
Return:
String with command to execute.
"""
stdin = "< " + stdin if stdin is not None else ""
stdout = "> " + stdout if stdout is not None else ""
stderr = "2> " + stderr if stderr is not None else ""
if exec_args:
executable = executable + " " + " ".join(list_strings(exec_args))
basename = os.path.basename(self.name)
if basename in ["mpirun", "mpiexec", "srun"]:
if self.type is None:
# $MPIRUN -n $MPI_PROCS $EXECUTABLE < $STDIN > $STDOUT 2> $STDERR
num_opt = "-n " + str(qad.mpi_procs)
cmd = " ".join([self.name, self.options, num_opt, executable, stdin, stdout, stderr])
else:
raise NotImplementedError("type %s is not supported!" % self.type)
elif basename == "runjob":
#runjob --ranks-per-node 2 --exp-env OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR
#runjob -n 2 --exp-env=OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR
# exe must be absolute path or relative to cwd.
bg_size, rpn = qad.bgsize_rankspernode()
#num_opt = "-n " + str(qad.mpi_procs)
num_opt = "--ranks-per-node " + str(rpn)
cmd = " ".join([self.name, self.options, num_opt, "--exp-env OMP_NUM_THREADS",
"--exe `which " + executable + "` ", stdin, stdout, stderr])
else:
if qad.mpi_procs != 1:
raise ValueError("Cannot use mpi_procs > when mpi_runner basename=%s" % basename)
cmd = " ".join([executable, stdin, stdout, stderr])
return cmd | python | def string_to_run(self, qad, executable, stdin=None, stdout=None, stderr=None, exec_args=None):
"""
Build and return a string with the command required to launch `executable` with the qadapter `qad`.
Args
qad: Qadapter instance.
executable (str): Executable name or path
stdin (str): Name of the file to be used as standard input. None means no redirection.
stdout (str): Name of the file to be used as standard output. None means no redirection.
stderr (str): Name of the file to be used as standard error. None means no redirection.
exec_args: Optional list of strings with options passed to `executable`.
Return:
String with command to execute.
"""
stdin = "< " + stdin if stdin is not None else ""
stdout = "> " + stdout if stdout is not None else ""
stderr = "2> " + stderr if stderr is not None else ""
if exec_args:
executable = executable + " " + " ".join(list_strings(exec_args))
basename = os.path.basename(self.name)
if basename in ["mpirun", "mpiexec", "srun"]:
if self.type is None:
# $MPIRUN -n $MPI_PROCS $EXECUTABLE < $STDIN > $STDOUT 2> $STDERR
num_opt = "-n " + str(qad.mpi_procs)
cmd = " ".join([self.name, self.options, num_opt, executable, stdin, stdout, stderr])
else:
raise NotImplementedError("type %s is not supported!" % self.type)
elif basename == "runjob":
#runjob --ranks-per-node 2 --exp-env OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR
#runjob -n 2 --exp-env=OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR
# exe must be absolute path or relative to cwd.
bg_size, rpn = qad.bgsize_rankspernode()
#num_opt = "-n " + str(qad.mpi_procs)
num_opt = "--ranks-per-node " + str(rpn)
cmd = " ".join([self.name, self.options, num_opt, "--exp-env OMP_NUM_THREADS",
"--exe `which " + executable + "` ", stdin, stdout, stderr])
else:
if qad.mpi_procs != 1:
raise ValueError("Cannot use mpi_procs > when mpi_runner basename=%s" % basename)
cmd = " ".join([executable, stdin, stdout, stderr])
return cmd | ['def', 'string_to_run', '(', 'self', ',', 'qad', ',', 'executable', ',', 'stdin', '=', 'None', ',', 'stdout', '=', 'None', ',', 'stderr', '=', 'None', ',', 'exec_args', '=', 'None', ')', ':', 'stdin', '=', '"< "', '+', 'stdin', 'if', 'stdin', 'is', 'not', 'None', 'else', '""', 'stdout', '=', '"> "', '+', 'stdout', 'if', 'stdout', 'is', 'not', 'None', 'else', '""', 'stderr', '=', '"2> "', '+', 'stderr', 'if', 'stderr', 'is', 'not', 'None', 'else', '""', 'if', 'exec_args', ':', 'executable', '=', 'executable', '+', '" "', '+', '" "', '.', 'join', '(', 'list_strings', '(', 'exec_args', ')', ')', 'basename', '=', 'os', '.', 'path', '.', 'basename', '(', 'self', '.', 'name', ')', 'if', 'basename', 'in', '[', '"mpirun"', ',', '"mpiexec"', ',', '"srun"', ']', ':', 'if', 'self', '.', 'type', 'is', 'None', ':', '# $MPIRUN -n $MPI_PROCS $EXECUTABLE < $STDIN > $STDOUT 2> $STDERR', 'num_opt', '=', '"-n "', '+', 'str', '(', 'qad', '.', 'mpi_procs', ')', 'cmd', '=', '" "', '.', 'join', '(', '[', 'self', '.', 'name', ',', 'self', '.', 'options', ',', 'num_opt', ',', 'executable', ',', 'stdin', ',', 'stdout', ',', 'stderr', ']', ')', 'else', ':', 'raise', 'NotImplementedError', '(', '"type %s is not supported!"', '%', 'self', '.', 'type', ')', 'elif', 'basename', '==', '"runjob"', ':', '#runjob --ranks-per-node 2 --exp-env OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR', '#runjob -n 2 --exp-env=OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR', '# exe must be absolute path or relative to cwd.', 'bg_size', ',', 'rpn', '=', 'qad', '.', 'bgsize_rankspernode', '(', ')', '#num_opt = "-n " + str(qad.mpi_procs)', 'num_opt', '=', '"--ranks-per-node "', '+', 'str', '(', 'rpn', ')', 'cmd', '=', '" "', '.', 'join', '(', '[', 'self', '.', 'name', ',', 'self', '.', 'options', ',', 'num_opt', ',', '"--exp-env OMP_NUM_THREADS"', ',', '"--exe `which "', '+', 'executable', '+', '"` "', ',', 'stdin', ',', 'stdout', ',', 'stderr', ']', ')', 'else', ':', 'if', 'qad', '.', 'mpi_procs', '!=', '1', ':', 'raise', 'ValueError', '(', '"Cannot use mpi_procs > when mpi_runner basename=%s"', '%', 'basename', ')', 'cmd', '=', '" "', '.', 'join', '(', '[', 'executable', ',', 'stdin', ',', 'stdout', ',', 'stderr', ']', ')', 'return', 'cmd'] | Build and return a string with the command required to launch `executable` with the qadapter `qad`.
Args
qad: Qadapter instance.
executable (str): Executable name or path
stdin (str): Name of the file to be used as standard input. None means no redirection.
stdout (str): Name of the file to be used as standard output. None means no redirection.
stderr (str): Name of the file to be used as standard error. None means no redirection.
exec_args: Optional list of strings with options passed to `executable`.
Return:
String with command to execute. | ['Build', 'and', 'return', 'a', 'string', 'with', 'the', 'command', 'required', 'to', 'launch', 'executable', 'with', 'the', 'qadapter', 'qad', '.'] | train | https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/io/abinit/qadapters.py#L83-L128 |
685 | O365/python-o365 | O365/drive.py | DriveItemPermission.update_roles | def update_roles(self, roles='view'):
""" Updates the roles of this permission
:return: Success / Failure
:rtype: bool
"""
if not self.object_id:
return False
url = self.build_url(self._endpoints.get('permission').format(
driveitem_id=self.driveitem_id, id=self.object_id))
if roles in {'view', 'read'}:
data = {'roles': ['read']}
elif roles == {'edit', 'write'}:
data = {'roles': ['write']}
else:
raise ValueError('"{}" is not a valid share_type'.format(roles))
response = self.con.patch(url, data=data)
if not response:
return False
self.roles = data.get('roles', [])
return True | python | def update_roles(self, roles='view'):
""" Updates the roles of this permission
:return: Success / Failure
:rtype: bool
"""
if not self.object_id:
return False
url = self.build_url(self._endpoints.get('permission').format(
driveitem_id=self.driveitem_id, id=self.object_id))
if roles in {'view', 'read'}:
data = {'roles': ['read']}
elif roles == {'edit', 'write'}:
data = {'roles': ['write']}
else:
raise ValueError('"{}" is not a valid share_type'.format(roles))
response = self.con.patch(url, data=data)
if not response:
return False
self.roles = data.get('roles', [])
return True | ['def', 'update_roles', '(', 'self', ',', 'roles', '=', "'view'", ')', ':', 'if', 'not', 'self', '.', 'object_id', ':', 'return', 'False', 'url', '=', 'self', '.', 'build_url', '(', 'self', '.', '_endpoints', '.', 'get', '(', "'permission'", ')', '.', 'format', '(', 'driveitem_id', '=', 'self', '.', 'driveitem_id', ',', 'id', '=', 'self', '.', 'object_id', ')', ')', 'if', 'roles', 'in', '{', "'view'", ',', "'read'", '}', ':', 'data', '=', '{', "'roles'", ':', '[', "'read'", ']', '}', 'elif', 'roles', '==', '{', "'edit'", ',', "'write'", '}', ':', 'data', '=', '{', "'roles'", ':', '[', "'write'", ']', '}', 'else', ':', 'raise', 'ValueError', '(', '\'"{}" is not a valid share_type\'', '.', 'format', '(', 'roles', ')', ')', 'response', '=', 'self', '.', 'con', '.', 'patch', '(', 'url', ',', 'data', '=', 'data', ')', 'if', 'not', 'response', ':', 'return', 'False', 'self', '.', 'roles', '=', 'data', '.', 'get', '(', "'roles'", ',', '[', ']', ')', 'return', 'True'] | Updates the roles of this permission
:return: Success / Failure
:rtype: bool | ['Updates', 'the', 'roles', 'of', 'this', 'permission'] | train | https://github.com/O365/python-o365/blob/02a71cf3775cc6a3c042e003365d6a07c8c75a73/O365/drive.py#L357-L381 |
686 | bokeh/bokeh | bokeh/driving.py | bounce | def bounce(sequence):
''' Return a driver function that can advance a "bounced" sequence
of values.
.. code-block:: none
seq = [0, 1, 2, 3]
# bounce(seq) => [0, 1, 2, 3, 3, 2, 1, 0, 0, 1, 2, ...]
Args:
sequence (seq) : a sequence of values for the driver to bounce
'''
N = len(sequence)
def f(i):
div, mod = divmod(i, N)
if div % 2 == 0:
return sequence[mod]
else:
return sequence[N-mod-1]
return partial(force, sequence=_advance(f)) | python | def bounce(sequence):
''' Return a driver function that can advance a "bounced" sequence
of values.
.. code-block:: none
seq = [0, 1, 2, 3]
# bounce(seq) => [0, 1, 2, 3, 3, 2, 1, 0, 0, 1, 2, ...]
Args:
sequence (seq) : a sequence of values for the driver to bounce
'''
N = len(sequence)
def f(i):
div, mod = divmod(i, N)
if div % 2 == 0:
return sequence[mod]
else:
return sequence[N-mod-1]
return partial(force, sequence=_advance(f)) | ['def', 'bounce', '(', 'sequence', ')', ':', 'N', '=', 'len', '(', 'sequence', ')', 'def', 'f', '(', 'i', ')', ':', 'div', ',', 'mod', '=', 'divmod', '(', 'i', ',', 'N', ')', 'if', 'div', '%', '2', '==', '0', ':', 'return', 'sequence', '[', 'mod', ']', 'else', ':', 'return', 'sequence', '[', 'N', '-', 'mod', '-', '1', ']', 'return', 'partial', '(', 'force', ',', 'sequence', '=', '_advance', '(', 'f', ')', ')'] | Return a driver function that can advance a "bounced" sequence
of values.
.. code-block:: none
seq = [0, 1, 2, 3]
# bounce(seq) => [0, 1, 2, 3, 3, 2, 1, 0, 0, 1, 2, ...]
Args:
sequence (seq) : a sequence of values for the driver to bounce | ['Return', 'a', 'driver', 'function', 'that', 'can', 'advance', 'a', 'bounced', 'sequence', 'of', 'values', '.'] | train | https://github.com/bokeh/bokeh/blob/dc8cf49e4e4302fd38537ad089ece81fbcca4737/bokeh/driving.py#L73-L94 |
687 | Robpol86/libnl | libnl/genl/ctrl.py | genl_ctrl_grp_by_name | def genl_ctrl_grp_by_name(family, grp_name):
"""https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L446.
Positional arguments:
family -- genl_family class instance.
grp_name -- bytes.
Returns:
group ID or negative error code.
"""
for grp in nl_list_for_each_entry(genl_family_grp(), family.gf_mc_grps, 'list_'):
if grp.name == grp_name:
return grp.id_
return -NLE_OBJ_NOTFOUND | python | def genl_ctrl_grp_by_name(family, grp_name):
"""https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L446.
Positional arguments:
family -- genl_family class instance.
grp_name -- bytes.
Returns:
group ID or negative error code.
"""
for grp in nl_list_for_each_entry(genl_family_grp(), family.gf_mc_grps, 'list_'):
if grp.name == grp_name:
return grp.id_
return -NLE_OBJ_NOTFOUND | ['def', 'genl_ctrl_grp_by_name', '(', 'family', ',', 'grp_name', ')', ':', 'for', 'grp', 'in', 'nl_list_for_each_entry', '(', 'genl_family_grp', '(', ')', ',', 'family', '.', 'gf_mc_grps', ',', "'list_'", ')', ':', 'if', 'grp', '.', 'name', '==', 'grp_name', ':', 'return', 'grp', '.', 'id_', 'return', '-', 'NLE_OBJ_NOTFOUND'] | https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L446.
Positional arguments:
family -- genl_family class instance.
grp_name -- bytes.
Returns:
group ID or negative error code. | ['https', ':', '//', 'github', '.', 'com', '/', 'thom311', '/', 'libnl', '/', 'blob', '/', 'libnl3_2_25', '/', 'lib', '/', 'genl', '/', 'ctrl', '.', 'c#L446', '.'] | train | https://github.com/Robpol86/libnl/blob/274e9fdaa39822d06ef70b799ed4a95937a4d923/libnl/genl/ctrl.py#L211-L224 |
688 | jmbhughes/suvi-trainer | suvitrainer/gui.py | App.make_classify_tab | def make_classify_tab(self):
""" initial set up of classification tab"""
self.pick_frame = tk.Frame(self.tab_classify)
self.pick_frame2 = tk.Frame(self.tab_classify)
self.solar_class_var = tk.IntVar()
self.solar_class_var.set(0) # initialize to unlabeled
buttonnum = 0
frame = [self.pick_frame, self.pick_frame2]
for text, value in self.config.solar_classes:
b = tk.Radiobutton(frame[buttonnum % 2], text=text,
variable=self.solar_class_var,
value=value, background=self.config.solar_colors[text],
indicatoron=0, width=50, height=2, command=self.change_class)
b.pack(fill=tk.BOTH, expand=1)
buttonnum += 1
self.pick_frame.grid(row=0, column=0, rowspan=5, sticky=tk.W + tk.E + tk.N + tk.S)
self.pick_frame2.grid(row=0, column=1, rowspan=5, sticky=tk.W + tk.E + tk.N + tk.S)
undobutton = tk.Button(master=self.tab_classify, text="Undo",
command=self.undobutton_action)
undobutton.grid(row=6, column=0, columnspan=2, sticky=tk.W + tk.E) | python | def make_classify_tab(self):
""" initial set up of classification tab"""
self.pick_frame = tk.Frame(self.tab_classify)
self.pick_frame2 = tk.Frame(self.tab_classify)
self.solar_class_var = tk.IntVar()
self.solar_class_var.set(0) # initialize to unlabeled
buttonnum = 0
frame = [self.pick_frame, self.pick_frame2]
for text, value in self.config.solar_classes:
b = tk.Radiobutton(frame[buttonnum % 2], text=text,
variable=self.solar_class_var,
value=value, background=self.config.solar_colors[text],
indicatoron=0, width=50, height=2, command=self.change_class)
b.pack(fill=tk.BOTH, expand=1)
buttonnum += 1
self.pick_frame.grid(row=0, column=0, rowspan=5, sticky=tk.W + tk.E + tk.N + tk.S)
self.pick_frame2.grid(row=0, column=1, rowspan=5, sticky=tk.W + tk.E + tk.N + tk.S)
undobutton = tk.Button(master=self.tab_classify, text="Undo",
command=self.undobutton_action)
undobutton.grid(row=6, column=0, columnspan=2, sticky=tk.W + tk.E) | ['def', 'make_classify_tab', '(', 'self', ')', ':', 'self', '.', 'pick_frame', '=', 'tk', '.', 'Frame', '(', 'self', '.', 'tab_classify', ')', 'self', '.', 'pick_frame2', '=', 'tk', '.', 'Frame', '(', 'self', '.', 'tab_classify', ')', 'self', '.', 'solar_class_var', '=', 'tk', '.', 'IntVar', '(', ')', 'self', '.', 'solar_class_var', '.', 'set', '(', '0', ')', '# initialize to unlabeled', 'buttonnum', '=', '0', 'frame', '=', '[', 'self', '.', 'pick_frame', ',', 'self', '.', 'pick_frame2', ']', 'for', 'text', ',', 'value', 'in', 'self', '.', 'config', '.', 'solar_classes', ':', 'b', '=', 'tk', '.', 'Radiobutton', '(', 'frame', '[', 'buttonnum', '%', '2', ']', ',', 'text', '=', 'text', ',', 'variable', '=', 'self', '.', 'solar_class_var', ',', 'value', '=', 'value', ',', 'background', '=', 'self', '.', 'config', '.', 'solar_colors', '[', 'text', ']', ',', 'indicatoron', '=', '0', ',', 'width', '=', '50', ',', 'height', '=', '2', ',', 'command', '=', 'self', '.', 'change_class', ')', 'b', '.', 'pack', '(', 'fill', '=', 'tk', '.', 'BOTH', ',', 'expand', '=', '1', ')', 'buttonnum', '+=', '1', 'self', '.', 'pick_frame', '.', 'grid', '(', 'row', '=', '0', ',', 'column', '=', '0', ',', 'rowspan', '=', '5', ',', 'sticky', '=', 'tk', '.', 'W', '+', 'tk', '.', 'E', '+', 'tk', '.', 'N', '+', 'tk', '.', 'S', ')', 'self', '.', 'pick_frame2', '.', 'grid', '(', 'row', '=', '0', ',', 'column', '=', '1', ',', 'rowspan', '=', '5', ',', 'sticky', '=', 'tk', '.', 'W', '+', 'tk', '.', 'E', '+', 'tk', '.', 'N', '+', 'tk', '.', 'S', ')', 'undobutton', '=', 'tk', '.', 'Button', '(', 'master', '=', 'self', '.', 'tab_classify', ',', 'text', '=', '"Undo"', ',', 'command', '=', 'self', '.', 'undobutton_action', ')', 'undobutton', '.', 'grid', '(', 'row', '=', '6', ',', 'column', '=', '0', ',', 'columnspan', '=', '2', ',', 'sticky', '=', 'tk', '.', 'W', '+', 'tk', '.', 'E', ')'] | initial set up of classification tab | ['initial', 'set', 'up', 'of', 'classification', 'tab'] | train | https://github.com/jmbhughes/suvi-trainer/blob/3d89894a4a037286221974c7eb5634d229b4f5d4/suvitrainer/gui.py#L531-L554 |
689 | Erotemic/utool | utool/util_dev.py | autopep8_diff | def autopep8_diff(fpath):
r"""
Args:
fpath (str): file path string
CommandLine:
python -m utool.util_dev --test-autopep8_diff --fpath ingest_data.py
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> fpath = ut.get_argval('--fpath', type_=str, default='ingest_data.py')
>>> result = autopep8_diff(fpath)
>>> print(result)
"""
import utool as ut
args = ('autopep8', fpath, '--diff')
res = ut.cmd(args, verbose=False)
out, err, ret = res
ut.print_difftext(out) | python | def autopep8_diff(fpath):
r"""
Args:
fpath (str): file path string
CommandLine:
python -m utool.util_dev --test-autopep8_diff --fpath ingest_data.py
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> fpath = ut.get_argval('--fpath', type_=str, default='ingest_data.py')
>>> result = autopep8_diff(fpath)
>>> print(result)
"""
import utool as ut
args = ('autopep8', fpath, '--diff')
res = ut.cmd(args, verbose=False)
out, err, ret = res
ut.print_difftext(out) | ['def', 'autopep8_diff', '(', 'fpath', ')', ':', 'import', 'utool', 'as', 'ut', 'args', '=', '(', "'autopep8'", ',', 'fpath', ',', "'--diff'", ')', 'res', '=', 'ut', '.', 'cmd', '(', 'args', ',', 'verbose', '=', 'False', ')', 'out', ',', 'err', ',', 'ret', '=', 'res', 'ut', '.', 'print_difftext', '(', 'out', ')'] | r"""
Args:
fpath (str): file path string
CommandLine:
python -m utool.util_dev --test-autopep8_diff --fpath ingest_data.py
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_dev import * # NOQA
>>> fpath = ut.get_argval('--fpath', type_=str, default='ingest_data.py')
>>> result = autopep8_diff(fpath)
>>> print(result) | ['r', 'Args', ':', 'fpath', '(', 'str', ')', ':', 'file', 'path', 'string'] | train | https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_dev.py#L2209-L2228 |
690 | geertj/gruvi | lib/gruvi/stream.py | Stream.wrap | def wrap(self, encoding, **textio_args):
"""Return a :class:`io.TextIOWrapper` that wraps the stream.
The wrapper provides text IO on top of the byte stream, using the
specified *encoding*. The *textio_args* keyword arguments are
additional keyword arguments passed to the :class:`~io.TextIOWrapper`
constructor. Unless another buffering scheme is specified, the
*write_through* option is enabled.
"""
# By default we want write_through behavior, unless the user specifies
# something else.
if 'line_buffering' not in textio_args and 'write_through' not in textio_args:
textio_args['write_through'] = True
return compat.TextIOWrapper(self, encoding, **textio_args) | python | def wrap(self, encoding, **textio_args):
"""Return a :class:`io.TextIOWrapper` that wraps the stream.
The wrapper provides text IO on top of the byte stream, using the
specified *encoding*. The *textio_args* keyword arguments are
additional keyword arguments passed to the :class:`~io.TextIOWrapper`
constructor. Unless another buffering scheme is specified, the
*write_through* option is enabled.
"""
# By default we want write_through behavior, unless the user specifies
# something else.
if 'line_buffering' not in textio_args and 'write_through' not in textio_args:
textio_args['write_through'] = True
return compat.TextIOWrapper(self, encoding, **textio_args) | ['def', 'wrap', '(', 'self', ',', 'encoding', ',', '*', '*', 'textio_args', ')', ':', '# By default we want write_through behavior, unless the user specifies', '# something else.', 'if', "'line_buffering'", 'not', 'in', 'textio_args', 'and', "'write_through'", 'not', 'in', 'textio_args', ':', 'textio_args', '[', "'write_through'", ']', '=', 'True', 'return', 'compat', '.', 'TextIOWrapper', '(', 'self', ',', 'encoding', ',', '*', '*', 'textio_args', ')'] | Return a :class:`io.TextIOWrapper` that wraps the stream.
The wrapper provides text IO on top of the byte stream, using the
specified *encoding*. The *textio_args* keyword arguments are
additional keyword arguments passed to the :class:`~io.TextIOWrapper`
constructor. Unless another buffering scheme is specified, the
*write_through* option is enabled. | ['Return', 'a', ':', 'class', ':', 'io', '.', 'TextIOWrapper', 'that', 'wraps', 'the', 'stream', '.'] | train | https://github.com/geertj/gruvi/blob/1d77ca439600b6ea7a19aa1ee85dca0f3be3f3f8/lib/gruvi/stream.py#L177-L190 |
691 | redhat-openstack/python-tripleo-helper | tripleohelper/server.py | Server.yum_update | def yum_update(self, allow_reboot=False):
"""Do a yum update on the system.
:param allow_reboot: If True and if a new kernel has been installed,
the system will be rebooted
"""
self.run('yum clean all')
self.run('test -f /usr/bin/subscription-manager && subscription-manager repos --list-enabled',
ignore_error=True)
self.run('yum repolist')
self.run('yum update -y --quiet', retry=3)
# reboot if a new initrd has been generated since the boot
if allow_reboot:
self.run('grubby --set-default $(ls /boot/vmlinuz-*.x86_64|tail -1)')
default_kernel = self.run('grubby --default-kernel')[0].rstrip()
cur_kernel = self.run('uname -r')[0].rstrip()
if cur_kernel not in default_kernel:
self.run('reboot', ignore_error=True)
self.ssh_pool.stop_all() | python | def yum_update(self, allow_reboot=False):
"""Do a yum update on the system.
:param allow_reboot: If True and if a new kernel has been installed,
the system will be rebooted
"""
self.run('yum clean all')
self.run('test -f /usr/bin/subscription-manager && subscription-manager repos --list-enabled',
ignore_error=True)
self.run('yum repolist')
self.run('yum update -y --quiet', retry=3)
# reboot if a new initrd has been generated since the boot
if allow_reboot:
self.run('grubby --set-default $(ls /boot/vmlinuz-*.x86_64|tail -1)')
default_kernel = self.run('grubby --default-kernel')[0].rstrip()
cur_kernel = self.run('uname -r')[0].rstrip()
if cur_kernel not in default_kernel:
self.run('reboot', ignore_error=True)
self.ssh_pool.stop_all() | ['def', 'yum_update', '(', 'self', ',', 'allow_reboot', '=', 'False', ')', ':', 'self', '.', 'run', '(', "'yum clean all'", ')', 'self', '.', 'run', '(', "'test -f /usr/bin/subscription-manager && subscription-manager repos --list-enabled'", ',', 'ignore_error', '=', 'True', ')', 'self', '.', 'run', '(', "'yum repolist'", ')', 'self', '.', 'run', '(', "'yum update -y --quiet'", ',', 'retry', '=', '3', ')', '# reboot if a new initrd has been generated since the boot', 'if', 'allow_reboot', ':', 'self', '.', 'run', '(', "'grubby --set-default $(ls /boot/vmlinuz-*.x86_64|tail -1)'", ')', 'default_kernel', '=', 'self', '.', 'run', '(', "'grubby --default-kernel'", ')', '[', '0', ']', '.', 'rstrip', '(', ')', 'cur_kernel', '=', 'self', '.', 'run', '(', "'uname -r'", ')', '[', '0', ']', '.', 'rstrip', '(', ')', 'if', 'cur_kernel', 'not', 'in', 'default_kernel', ':', 'self', '.', 'run', '(', "'reboot'", ',', 'ignore_error', '=', 'True', ')', 'self', '.', 'ssh_pool', '.', 'stop_all', '(', ')'] | Do a yum update on the system.
:param allow_reboot: If True and if a new kernel has been installed,
the system will be rebooted | ['Do', 'a', 'yum', 'update', 'on', 'the', 'system', '.'] | train | https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/server.py#L242-L260 |
692 | bcbio/bcbio-nextgen | bcbio/structural/titancna.py | _run_titancna | def _run_titancna(cn_file, het_file, ploidy, num_clusters, work_dir, data):
"""Run titanCNA wrapper script on given ploidy and clusters.
"""
sample = dd.get_sample_name(data)
cores = dd.get_num_cores(data)
export_cmd = utils.get_R_exports()
ploidy_dir = utils.safe_makedir(os.path.join(work_dir, "run_ploidy%s" % ploidy))
cluster_dir = "%s_cluster%02d" % (sample, num_clusters)
out_dir = os.path.join(ploidy_dir, cluster_dir)
if not utils.file_uptodate(out_dir + ".titan.txt", cn_file):
with tx_tmpdir(data) as tmp_dir:
with utils.chdir(tmp_dir):
cmd = ("{export_cmd} && titanCNA.R --id {sample} --hetFile {het_file} --cnFile {cn_file} "
"--numClusters {num_clusters} --ploidy {ploidy} --numCores {cores} --outDir {tmp_dir} "
"--libdir None")
chroms = ["'%s'" % c.name.replace("chr", "") for c in ref.file_contigs(dd.get_ref_file(data))
if chromhacks.is_autosomal_or_x(c.name)]
if "'X'" not in chroms:
chroms += ["'X'"]
# Use UCSC style naming for human builds to support BSgenome
genome_build = ("hg19" if dd.get_genome_build(data) in ["GRCh37", "hg19"]
else dd.get_genome_build(data))
cmd += """ --chrs "c(%s)" """ % ",".join(chroms)
cmd += " --genomeBuild {genome_build}"
if data["genome_build"] in ("hg19", "hg38"):
cmd += " --genomeStyle UCSC"
if data["genome_build"] in ["hg38"]:
data_dir = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(os.path.join(
os.path.dirname(utils.Rscript_cmd()), "titanCNA.R"))),
os.pardir, os.pardir, "data"))
cytoband_file = os.path.join(data_dir, "cytoBand_hg38.txt")
assert os.path.exists(cytoband_file), cytoband_file
cmd += " --cytobandFile %s" % cytoband_file
# TitanCNA's model is influenced by the variance in read coverage data
# and data type: set reasonable defaults for non-WGS runs
# (see https://github.com/gavinha/TitanCNA/tree/master/scripts/R_scripts)
if dd.get_coverage_interval(data) != "genome":
cmd += " --alphaK=2500 --alphaKHigh=2500"
do.run(cmd.format(**locals()), "TitanCNA CNV detection: ploidy %s, cluster %s" % (ploidy, num_clusters))
for fname in glob.glob(os.path.join(tmp_dir, cluster_dir + "*")):
shutil.move(fname, ploidy_dir)
if os.path.exists(os.path.join(tmp_dir, "Rplots.pdf")):
shutil.move(os.path.join(tmp_dir, "Rplots.pdf"),
os.path.join(ploidy_dir, "%s.Rplots.pdf" % cluster_dir))
return ploidy_dir | python | def _run_titancna(cn_file, het_file, ploidy, num_clusters, work_dir, data):
"""Run titanCNA wrapper script on given ploidy and clusters.
"""
sample = dd.get_sample_name(data)
cores = dd.get_num_cores(data)
export_cmd = utils.get_R_exports()
ploidy_dir = utils.safe_makedir(os.path.join(work_dir, "run_ploidy%s" % ploidy))
cluster_dir = "%s_cluster%02d" % (sample, num_clusters)
out_dir = os.path.join(ploidy_dir, cluster_dir)
if not utils.file_uptodate(out_dir + ".titan.txt", cn_file):
with tx_tmpdir(data) as tmp_dir:
with utils.chdir(tmp_dir):
cmd = ("{export_cmd} && titanCNA.R --id {sample} --hetFile {het_file} --cnFile {cn_file} "
"--numClusters {num_clusters} --ploidy {ploidy} --numCores {cores} --outDir {tmp_dir} "
"--libdir None")
chroms = ["'%s'" % c.name.replace("chr", "") for c in ref.file_contigs(dd.get_ref_file(data))
if chromhacks.is_autosomal_or_x(c.name)]
if "'X'" not in chroms:
chroms += ["'X'"]
# Use UCSC style naming for human builds to support BSgenome
genome_build = ("hg19" if dd.get_genome_build(data) in ["GRCh37", "hg19"]
else dd.get_genome_build(data))
cmd += """ --chrs "c(%s)" """ % ",".join(chroms)
cmd += " --genomeBuild {genome_build}"
if data["genome_build"] in ("hg19", "hg38"):
cmd += " --genomeStyle UCSC"
if data["genome_build"] in ["hg38"]:
data_dir = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(os.path.join(
os.path.dirname(utils.Rscript_cmd()), "titanCNA.R"))),
os.pardir, os.pardir, "data"))
cytoband_file = os.path.join(data_dir, "cytoBand_hg38.txt")
assert os.path.exists(cytoband_file), cytoband_file
cmd += " --cytobandFile %s" % cytoband_file
# TitanCNA's model is influenced by the variance in read coverage data
# and data type: set reasonable defaults for non-WGS runs
# (see https://github.com/gavinha/TitanCNA/tree/master/scripts/R_scripts)
if dd.get_coverage_interval(data) != "genome":
cmd += " --alphaK=2500 --alphaKHigh=2500"
do.run(cmd.format(**locals()), "TitanCNA CNV detection: ploidy %s, cluster %s" % (ploidy, num_clusters))
for fname in glob.glob(os.path.join(tmp_dir, cluster_dir + "*")):
shutil.move(fname, ploidy_dir)
if os.path.exists(os.path.join(tmp_dir, "Rplots.pdf")):
shutil.move(os.path.join(tmp_dir, "Rplots.pdf"),
os.path.join(ploidy_dir, "%s.Rplots.pdf" % cluster_dir))
return ploidy_dir | ['def', '_run_titancna', '(', 'cn_file', ',', 'het_file', ',', 'ploidy', ',', 'num_clusters', ',', 'work_dir', ',', 'data', ')', ':', 'sample', '=', 'dd', '.', 'get_sample_name', '(', 'data', ')', 'cores', '=', 'dd', '.', 'get_num_cores', '(', 'data', ')', 'export_cmd', '=', 'utils', '.', 'get_R_exports', '(', ')', 'ploidy_dir', '=', 'utils', '.', 'safe_makedir', '(', 'os', '.', 'path', '.', 'join', '(', 'work_dir', ',', '"run_ploidy%s"', '%', 'ploidy', ')', ')', 'cluster_dir', '=', '"%s_cluster%02d"', '%', '(', 'sample', ',', 'num_clusters', ')', 'out_dir', '=', 'os', '.', 'path', '.', 'join', '(', 'ploidy_dir', ',', 'cluster_dir', ')', 'if', 'not', 'utils', '.', 'file_uptodate', '(', 'out_dir', '+', '".titan.txt"', ',', 'cn_file', ')', ':', 'with', 'tx_tmpdir', '(', 'data', ')', 'as', 'tmp_dir', ':', 'with', 'utils', '.', 'chdir', '(', 'tmp_dir', ')', ':', 'cmd', '=', '(', '"{export_cmd} && titanCNA.R --id {sample} --hetFile {het_file} --cnFile {cn_file} "', '"--numClusters {num_clusters} --ploidy {ploidy} --numCores {cores} --outDir {tmp_dir} "', '"--libdir None"', ')', 'chroms', '=', '[', '"\'%s\'"', '%', 'c', '.', 'name', '.', 'replace', '(', '"chr"', ',', '""', ')', 'for', 'c', 'in', 'ref', '.', 'file_contigs', '(', 'dd', '.', 'get_ref_file', '(', 'data', ')', ')', 'if', 'chromhacks', '.', 'is_autosomal_or_x', '(', 'c', '.', 'name', ')', ']', 'if', '"\'X\'"', 'not', 'in', 'chroms', ':', 'chroms', '+=', '[', '"\'X\'"', ']', '# Use UCSC style naming for human builds to support BSgenome', 'genome_build', '=', '(', '"hg19"', 'if', 'dd', '.', 'get_genome_build', '(', 'data', ')', 'in', '[', '"GRCh37"', ',', '"hg19"', ']', 'else', 'dd', '.', 'get_genome_build', '(', 'data', ')', ')', 'cmd', '+=', '""" --chrs "c(%s)" """', '%', '","', '.', 'join', '(', 'chroms', ')', 'cmd', '+=', '" --genomeBuild {genome_build}"', 'if', 'data', '[', '"genome_build"', ']', 'in', '(', '"hg19"', ',', '"hg38"', ')', ':', 'cmd', '+=', '" --genomeStyle UCSC"', 'if', 'data', '[', '"genome_build"', ']', 'in', '[', '"hg38"', ']', ':', 'data_dir', '=', 'os', '.', 'path', '.', 'normpath', '(', 'os', '.', 'path', '.', 'join', '(', 'os', '.', 'path', '.', 'dirname', '(', 'os', '.', 'path', '.', 'realpath', '(', 'os', '.', 'path', '.', 'join', '(', 'os', '.', 'path', '.', 'dirname', '(', 'utils', '.', 'Rscript_cmd', '(', ')', ')', ',', '"titanCNA.R"', ')', ')', ')', ',', 'os', '.', 'pardir', ',', 'os', '.', 'pardir', ',', '"data"', ')', ')', 'cytoband_file', '=', 'os', '.', 'path', '.', 'join', '(', 'data_dir', ',', '"cytoBand_hg38.txt"', ')', 'assert', 'os', '.', 'path', '.', 'exists', '(', 'cytoband_file', ')', ',', 'cytoband_file', 'cmd', '+=', '" --cytobandFile %s"', '%', 'cytoband_file', "# TitanCNA's model is influenced by the variance in read coverage data", '# and data type: set reasonable defaults for non-WGS runs', '# (see https://github.com/gavinha/TitanCNA/tree/master/scripts/R_scripts)', 'if', 'dd', '.', 'get_coverage_interval', '(', 'data', ')', '!=', '"genome"', ':', 'cmd', '+=', '" --alphaK=2500 --alphaKHigh=2500"', 'do', '.', 'run', '(', 'cmd', '.', 'format', '(', '*', '*', 'locals', '(', ')', ')', ',', '"TitanCNA CNV detection: ploidy %s, cluster %s"', '%', '(', 'ploidy', ',', 'num_clusters', ')', ')', 'for', 'fname', 'in', 'glob', '.', 'glob', '(', 'os', '.', 'path', '.', 'join', '(', 'tmp_dir', ',', 'cluster_dir', '+', '"*"', ')', ')', ':', 'shutil', '.', 'move', '(', 'fname', ',', 'ploidy_dir', ')', 'if', 'os', '.', 'path', '.', 'exists', '(', 'os', '.', 'path', '.', 'join', '(', 'tmp_dir', ',', '"Rplots.pdf"', ')', ')', ':', 'shutil', '.', 'move', '(', 'os', '.', 'path', '.', 'join', '(', 'tmp_dir', ',', '"Rplots.pdf"', ')', ',', 'os', '.', 'path', '.', 'join', '(', 'ploidy_dir', ',', '"%s.Rplots.pdf"', '%', 'cluster_dir', ')', ')', 'return', 'ploidy_dir'] | Run titanCNA wrapper script on given ploidy and clusters. | ['Run', 'titanCNA', 'wrapper', 'script', 'on', 'given', 'ploidy', 'and', 'clusters', '.'] | train | https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/structural/titancna.py#L97-L143 |
693 | Erotemic/utool | utool/util_project.py | ensure_user_profile | def ensure_user_profile(user_profile=None):
r"""
Args:
user_profile (UserProfile): (default = None)
Returns:
UserProfile: user_profile
CommandLine:
python -m utool.util_project --exec-ensure_user_profile --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_project import * # NOQA
>>> import utool as ut
>>> user_profile = None
>>> user_profile = ensure_user_profile(user_profile)
>>> result = ('user_profile = %s' % (ut.repr2(user_profile),))
>>> print(ut.repr3(user_profile.project_dpaths))
>>> print(result)
"""
global __GLOBAL_PROFILE__
if __GLOBAL_PROFILE__ is None:
import utool as ut
if ut.is_developer():
__GLOBAL_PROFILE__ = ibeis_user_profile()
else:
__GLOBAL_PROFILE__ = UserProfile('default')
if user_profile is None:
user_profile = __GLOBAL_PROFILE__
return user_profile | python | def ensure_user_profile(user_profile=None):
r"""
Args:
user_profile (UserProfile): (default = None)
Returns:
UserProfile: user_profile
CommandLine:
python -m utool.util_project --exec-ensure_user_profile --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_project import * # NOQA
>>> import utool as ut
>>> user_profile = None
>>> user_profile = ensure_user_profile(user_profile)
>>> result = ('user_profile = %s' % (ut.repr2(user_profile),))
>>> print(ut.repr3(user_profile.project_dpaths))
>>> print(result)
"""
global __GLOBAL_PROFILE__
if __GLOBAL_PROFILE__ is None:
import utool as ut
if ut.is_developer():
__GLOBAL_PROFILE__ = ibeis_user_profile()
else:
__GLOBAL_PROFILE__ = UserProfile('default')
if user_profile is None:
user_profile = __GLOBAL_PROFILE__
return user_profile | ['def', 'ensure_user_profile', '(', 'user_profile', '=', 'None', ')', ':', 'global', '__GLOBAL_PROFILE__', 'if', '__GLOBAL_PROFILE__', 'is', 'None', ':', 'import', 'utool', 'as', 'ut', 'if', 'ut', '.', 'is_developer', '(', ')', ':', '__GLOBAL_PROFILE__', '=', 'ibeis_user_profile', '(', ')', 'else', ':', '__GLOBAL_PROFILE__', '=', 'UserProfile', '(', "'default'", ')', 'if', 'user_profile', 'is', 'None', ':', 'user_profile', '=', '__GLOBAL_PROFILE__', 'return', 'user_profile'] | r"""
Args:
user_profile (UserProfile): (default = None)
Returns:
UserProfile: user_profile
CommandLine:
python -m utool.util_project --exec-ensure_user_profile --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_project import * # NOQA
>>> import utool as ut
>>> user_profile = None
>>> user_profile = ensure_user_profile(user_profile)
>>> result = ('user_profile = %s' % (ut.repr2(user_profile),))
>>> print(ut.repr3(user_profile.project_dpaths))
>>> print(result) | ['r', 'Args', ':', 'user_profile', '(', 'UserProfile', ')', ':', '(', 'default', '=', 'None', ')'] | train | https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_project.py#L573-L603 |
694 | Xython/Linq.py | linq/standard/general.py | Skip | def Skip(self: Iterable, n):
"""
[
{
'self': [1, 2, 3, 4, 5],
'n': 3,
'assert': lambda ret: list(ret) == [4, 5]
}
]
"""
con = iter(self)
for i, _ in enumerate(con):
if i == n:
break
return con | python | def Skip(self: Iterable, n):
"""
[
{
'self': [1, 2, 3, 4, 5],
'n': 3,
'assert': lambda ret: list(ret) == [4, 5]
}
]
"""
con = iter(self)
for i, _ in enumerate(con):
if i == n:
break
return con | ['def', 'Skip', '(', 'self', ':', 'Iterable', ',', 'n', ')', ':', 'con', '=', 'iter', '(', 'self', ')', 'for', 'i', ',', '_', 'in', 'enumerate', '(', 'con', ')', ':', 'if', 'i', '==', 'n', ':', 'break', 'return', 'con'] | [
{
'self': [1, 2, 3, 4, 5],
'n': 3,
'assert': lambda ret: list(ret) == [4, 5]
}
] | ['[', '{', 'self', ':', '[', '1', '2', '3', '4', '5', ']', 'n', ':', '3', 'assert', ':', 'lambda', 'ret', ':', 'list', '(', 'ret', ')', '==', '[', '4', '5', ']', '}', ']'] | train | https://github.com/Xython/Linq.py/blob/ffb65f92f1df0d8161d5f835f5947554f6f33d72/linq/standard/general.py#L209-L223 |
695 | michaelliao/sinaweibopy | weibo.py | APIClient.get_authorize_url | def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw)) | python | def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw)) | ['def', 'get_authorize_url', '(', 'self', ',', 'redirect_uri', '=', 'None', ',', '*', '*', 'kw', ')', ':', 'redirect', '=', 'redirect_uri', 'if', 'redirect_uri', 'else', 'self', '.', 'redirect_uri', 'if', 'not', 'redirect', ':', 'raise', 'APIError', '(', "'21305'", ',', "'Parameter absent: redirect_uri'", ',', "'OAuth2 request'", ')', 'response_type', '=', 'kw', '.', 'pop', '(', "'response_type'", ',', "'code'", ')', 'return', "'%s%s?%s'", '%', '(', 'self', '.', 'auth_url', ',', "'authorize'", ',', '_encode_params', '(', 'client_id', '=', 'self', '.', 'client_id', ',', 'response_type', '=', 'response_type', ',', 'redirect_uri', '=', 'redirect', ',', '*', '*', 'kw', ')', ')'] | return the authorization url that the user should be redirected to. | ['return', 'the', 'authorization', 'url', 'that', 'the', 'user', 'should', 'be', 'redirected', 'to', '.'] | train | https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/weibo.py#L259-L270 |
696 | influxdata/influxdb-python | influxdb/influxdb08/client.py | InfluxDBClient.get_database_users | def get_database_users(self):
"""Get list of database users."""
url = "db/{0}/users".format(self._database)
response = self.request(
url=url,
method='GET',
expected_response_code=200
)
return response.json() | python | def get_database_users(self):
"""Get list of database users."""
url = "db/{0}/users".format(self._database)
response = self.request(
url=url,
method='GET',
expected_response_code=200
)
return response.json() | ['def', 'get_database_users', '(', 'self', ')', ':', 'url', '=', '"db/{0}/users"', '.', 'format', '(', 'self', '.', '_database', ')', 'response', '=', 'self', '.', 'request', '(', 'url', '=', 'url', ',', 'method', '=', "'GET'", ',', 'expected_response_code', '=', '200', ')', 'return', 'response', '.', 'json', '(', ')'] | Get list of database users. | ['Get', 'list', 'of', 'database', 'users', '.'] | train | https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L733-L743 |
697 | nugget/python-insteonplm | insteonplm/devices/__init__.py | ALDB.find_matching_link | def find_matching_link(self, mode, group, addr):
"""Find a matching link in the current device.
Mode: r | c is the mode of the link in the linked device
This method will search for a corresponding link in the
reverse direction.
group: All-Link group number
addr: Inteon address of the linked device
"""
found_rec = None
mode_test = None
if mode.lower() in ['c', 'r']:
link_group = int(group)
link_addr = Address(addr)
for mem_addr in self:
rec = self[mem_addr]
if mode.lower() == 'r':
mode_test = rec.control_flags.is_controller
else:
mode_test = rec.control_flags.is_responder
if (mode_test and
rec.group == link_group and
rec.address == link_addr):
found_rec = rec
return found_rec | python | def find_matching_link(self, mode, group, addr):
"""Find a matching link in the current device.
Mode: r | c is the mode of the link in the linked device
This method will search for a corresponding link in the
reverse direction.
group: All-Link group number
addr: Inteon address of the linked device
"""
found_rec = None
mode_test = None
if mode.lower() in ['c', 'r']:
link_group = int(group)
link_addr = Address(addr)
for mem_addr in self:
rec = self[mem_addr]
if mode.lower() == 'r':
mode_test = rec.control_flags.is_controller
else:
mode_test = rec.control_flags.is_responder
if (mode_test and
rec.group == link_group and
rec.address == link_addr):
found_rec = rec
return found_rec | ['def', 'find_matching_link', '(', 'self', ',', 'mode', ',', 'group', ',', 'addr', ')', ':', 'found_rec', '=', 'None', 'mode_test', '=', 'None', 'if', 'mode', '.', 'lower', '(', ')', 'in', '[', "'c'", ',', "'r'", ']', ':', 'link_group', '=', 'int', '(', 'group', ')', 'link_addr', '=', 'Address', '(', 'addr', ')', 'for', 'mem_addr', 'in', 'self', ':', 'rec', '=', 'self', '[', 'mem_addr', ']', 'if', 'mode', '.', 'lower', '(', ')', '==', "'r'", ':', 'mode_test', '=', 'rec', '.', 'control_flags', '.', 'is_controller', 'else', ':', 'mode_test', '=', 'rec', '.', 'control_flags', '.', 'is_responder', 'if', '(', 'mode_test', 'and', 'rec', '.', 'group', '==', 'link_group', 'and', 'rec', '.', 'address', '==', 'link_addr', ')', ':', 'found_rec', '=', 'rec', 'return', 'found_rec'] | Find a matching link in the current device.
Mode: r | c is the mode of the link in the linked device
This method will search for a corresponding link in the
reverse direction.
group: All-Link group number
addr: Inteon address of the linked device | ['Find', 'a', 'matching', 'link', 'in', 'the', 'current', 'device', '.'] | train | https://github.com/nugget/python-insteonplm/blob/65548041f1b0729ae1ae904443dd81b0c6cbf1bf/insteonplm/devices/__init__.py#L1347-L1371 |
698 | log2timeline/dfvfs | dfvfs/file_io/encoded_stream_io.py | EncodedStream._ReadEncodedData | def _ReadEncodedData(self, read_size):
"""Reads encoded data from the file-like object.
Args:
read_size (int): number of bytes of encoded data to read.
Returns:
int: number of bytes of encoded data read.
"""
encoded_data = self._file_object.read(read_size)
read_count = len(encoded_data)
self._encoded_data = b''.join([self._encoded_data, encoded_data])
self._decoded_data, self._encoded_data = (
self._decoder.Decode(self._encoded_data))
self._decoded_data_size = len(self._decoded_data)
return read_count | python | def _ReadEncodedData(self, read_size):
"""Reads encoded data from the file-like object.
Args:
read_size (int): number of bytes of encoded data to read.
Returns:
int: number of bytes of encoded data read.
"""
encoded_data = self._file_object.read(read_size)
read_count = len(encoded_data)
self._encoded_data = b''.join([self._encoded_data, encoded_data])
self._decoded_data, self._encoded_data = (
self._decoder.Decode(self._encoded_data))
self._decoded_data_size = len(self._decoded_data)
return read_count | ['def', '_ReadEncodedData', '(', 'self', ',', 'read_size', ')', ':', 'encoded_data', '=', 'self', '.', '_file_object', '.', 'read', '(', 'read_size', ')', 'read_count', '=', 'len', '(', 'encoded_data', ')', 'self', '.', '_encoded_data', '=', "b''", '.', 'join', '(', '[', 'self', '.', '_encoded_data', ',', 'encoded_data', ']', ')', 'self', '.', '_decoded_data', ',', 'self', '.', '_encoded_data', '=', '(', 'self', '.', '_decoder', '.', 'Decode', '(', 'self', '.', '_encoded_data', ')', ')', 'self', '.', '_decoded_data_size', '=', 'len', '(', 'self', '.', '_decoded_data', ')', 'return', 'read_count'] | Reads encoded data from the file-like object.
Args:
read_size (int): number of bytes of encoded data to read.
Returns:
int: number of bytes of encoded data read. | ['Reads', 'encoded', 'data', 'from', 'the', 'file', '-', 'like', 'object', '.'] | train | https://github.com/log2timeline/dfvfs/blob/2b3ccd115f9901d89f383397d4a1376a873c83c4/dfvfs/file_io/encoded_stream_io.py#L158-L178 |
699 | sporteasy/python-poeditor | poeditor/client.py | POEditorAPI.add_language_to_project | def add_language_to_project(self, project_id, language_code):
"""
Adds a new language to project
"""
self._run(
url_path="languages/add",
id=project_id,
language=language_code
)
return True | python | def add_language_to_project(self, project_id, language_code):
"""
Adds a new language to project
"""
self._run(
url_path="languages/add",
id=project_id,
language=language_code
)
return True | ['def', 'add_language_to_project', '(', 'self', ',', 'project_id', ',', 'language_code', ')', ':', 'self', '.', '_run', '(', 'url_path', '=', '"languages/add"', ',', 'id', '=', 'project_id', ',', 'language', '=', 'language_code', ')', 'return', 'True'] | Adds a new language to project | ['Adds', 'a', 'new', 'language', 'to', 'project'] | train | https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L253-L262 |