Unnamed: 0
int64 0
10k
| repository_name
stringlengths 7
54
| func_path_in_repository
stringlengths 5
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 100
30.3k
| language
stringclasses 1
value | func_code_string
stringlengths 100
30.3k
| func_code_tokens
stringlengths 138
33.2k
| func_documentation_string
stringlengths 1
15k
| func_documentation_tokens
stringlengths 5
5.14k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
|
---|---|---|---|---|---|---|---|---|---|---|---|
100 | etingof/pysnmp | pysnmp/smi/mibs/SNMPv2-SMI.py | ManagedMibObject.getNode | def getNode(self, name, **context):
"""Return tree node found by name"""
if name == self.name:
return self
else:
return self.getBranch(name, **context).getNode(name, **context) | python | def getNode(self, name, **context):
"""Return tree node found by name"""
if name == self.name:
return self
else:
return self.getBranch(name, **context).getNode(name, **context) | ['def', 'getNode', '(', 'self', ',', 'name', ',', '*', '*', 'context', ')', ':', 'if', 'name', '==', 'self', '.', 'name', ':', 'return', 'self', 'else', ':', 'return', 'self', '.', 'getBranch', '(', 'name', ',', '*', '*', 'context', ')', '.', 'getNode', '(', 'name', ',', '*', '*', 'context', ')'] | Return tree node found by name | ['Return', 'tree', 'node', 'found', 'by', 'name'] | train | https://github.com/etingof/pysnmp/blob/cde062dd42f67dfd2d7686286a322d40e9c3a4b7/pysnmp/smi/mibs/SNMPv2-SMI.py#L478-L483 |
101 | aliyun/aliyun-log-python-sdk | aliyun/log/consumer/shard_worker.py | ShardConsumerWorker.check_and_generate_next_task | def check_and_generate_next_task(self):
"""
check if the previous task is done and proceed to fire another task
:return:
"""
# if self.task_future is None:
# # there's no any ongoing task
# self._update_status(False)
# self.generate_next_task()
# return
if self.task_future is None or self.task_future.done():
task_success = False
task_result = self.get_task_result(self.task_future)
self.task_future = None
if task_result is not None and task_result.get_exception() is None:
task_success = True
if isinstance(task_result, InitTaskResult):
# maintain check points
assert self.consumer_status == ConsumerStatus.INITIALIZING, \
ClientWorkerException("get init task result, but status is: " + str(self.consumer_status))
init_result = task_result
self.next_fetch_cursor = init_result.get_cursor()
self.fetch_end_cursor = init_result.end_cursor
self.checkpoint_tracker.set_memory_check_point(self.next_fetch_cursor)
if init_result.is_cursor_persistent():
self.checkpoint_tracker.set_persistent_check_point(self.next_fetch_cursor)
elif isinstance(task_result, ProcessTaskResult):
# maintain check points
process_task_result = task_result
roll_back_checkpoint = process_task_result.get_rollback_check_point()
if roll_back_checkpoint:
self.last_fetch_log_group = None
self.logger.info("user defined to roll-back check-point, cancel current fetching task")
self.cancel_current_fetch()
self.next_fetch_cursor = roll_back_checkpoint
# log task status
self._sample_log_error(task_result)
# update status basing on task results
self._update_status(task_success)
#
self._generate_next_task() | python | def check_and_generate_next_task(self):
"""
check if the previous task is done and proceed to fire another task
:return:
"""
# if self.task_future is None:
# # there's no any ongoing task
# self._update_status(False)
# self.generate_next_task()
# return
if self.task_future is None or self.task_future.done():
task_success = False
task_result = self.get_task_result(self.task_future)
self.task_future = None
if task_result is not None and task_result.get_exception() is None:
task_success = True
if isinstance(task_result, InitTaskResult):
# maintain check points
assert self.consumer_status == ConsumerStatus.INITIALIZING, \
ClientWorkerException("get init task result, but status is: " + str(self.consumer_status))
init_result = task_result
self.next_fetch_cursor = init_result.get_cursor()
self.fetch_end_cursor = init_result.end_cursor
self.checkpoint_tracker.set_memory_check_point(self.next_fetch_cursor)
if init_result.is_cursor_persistent():
self.checkpoint_tracker.set_persistent_check_point(self.next_fetch_cursor)
elif isinstance(task_result, ProcessTaskResult):
# maintain check points
process_task_result = task_result
roll_back_checkpoint = process_task_result.get_rollback_check_point()
if roll_back_checkpoint:
self.last_fetch_log_group = None
self.logger.info("user defined to roll-back check-point, cancel current fetching task")
self.cancel_current_fetch()
self.next_fetch_cursor = roll_back_checkpoint
# log task status
self._sample_log_error(task_result)
# update status basing on task results
self._update_status(task_success)
#
self._generate_next_task() | ['def', 'check_and_generate_next_task', '(', 'self', ')', ':', '# if self.task_future is None:', "# # there's no any ongoing task", '# self._update_status(False)', '# self.generate_next_task()', '# return', 'if', 'self', '.', 'task_future', 'is', 'None', 'or', 'self', '.', 'task_future', '.', 'done', '(', ')', ':', 'task_success', '=', 'False', 'task_result', '=', 'self', '.', 'get_task_result', '(', 'self', '.', 'task_future', ')', 'self', '.', 'task_future', '=', 'None', 'if', 'task_result', 'is', 'not', 'None', 'and', 'task_result', '.', 'get_exception', '(', ')', 'is', 'None', ':', 'task_success', '=', 'True', 'if', 'isinstance', '(', 'task_result', ',', 'InitTaskResult', ')', ':', '# maintain check points', 'assert', 'self', '.', 'consumer_status', '==', 'ConsumerStatus', '.', 'INITIALIZING', ',', 'ClientWorkerException', '(', '"get init task result, but status is: "', '+', 'str', '(', 'self', '.', 'consumer_status', ')', ')', 'init_result', '=', 'task_result', 'self', '.', 'next_fetch_cursor', '=', 'init_result', '.', 'get_cursor', '(', ')', 'self', '.', 'fetch_end_cursor', '=', 'init_result', '.', 'end_cursor', 'self', '.', 'checkpoint_tracker', '.', 'set_memory_check_point', '(', 'self', '.', 'next_fetch_cursor', ')', 'if', 'init_result', '.', 'is_cursor_persistent', '(', ')', ':', 'self', '.', 'checkpoint_tracker', '.', 'set_persistent_check_point', '(', 'self', '.', 'next_fetch_cursor', ')', 'elif', 'isinstance', '(', 'task_result', ',', 'ProcessTaskResult', ')', ':', '# maintain check points', 'process_task_result', '=', 'task_result', 'roll_back_checkpoint', '=', 'process_task_result', '.', 'get_rollback_check_point', '(', ')', 'if', 'roll_back_checkpoint', ':', 'self', '.', 'last_fetch_log_group', '=', 'None', 'self', '.', 'logger', '.', 'info', '(', '"user defined to roll-back check-point, cancel current fetching task"', ')', 'self', '.', 'cancel_current_fetch', '(', ')', 'self', '.', 'next_fetch_cursor', '=', 'roll_back_checkpoint', '# log task status', 'self', '.', '_sample_log_error', '(', 'task_result', ')', '# update status basing on task results', 'self', '.', '_update_status', '(', 'task_success', ')', '#', 'self', '.', '_generate_next_task', '(', ')'] | check if the previous task is done and proceed to fire another task
:return: | ['check', 'if', 'the', 'previous', 'task', 'is', 'done', 'and', 'proceed', 'to', 'fire', 'another', 'task', ':', 'return', ':'] | train | https://github.com/aliyun/aliyun-log-python-sdk/blob/ac383db0a16abf1e5ef7df36074374184b43516e/aliyun/log/consumer/shard_worker.py#L137-L186 |
102 | tBaxter/django-fretboard | fretboard/templatetags/fretboard_tags.py | get_active_topics_count | def get_active_topics_count(last_seen_timestamp=None):
"""
Returns count of new topics since last visit, or one day.
{% get_active_topics_count as active_topic_count %}
"""
if not last_seen_timestamp:
last_seen_timestamp = yesterday_timestamp()
return Topic.objects.filter(modified_int__gt=last_seen_timestamp).count() | python | def get_active_topics_count(last_seen_timestamp=None):
"""
Returns count of new topics since last visit, or one day.
{% get_active_topics_count as active_topic_count %}
"""
if not last_seen_timestamp:
last_seen_timestamp = yesterday_timestamp()
return Topic.objects.filter(modified_int__gt=last_seen_timestamp).count() | ['def', 'get_active_topics_count', '(', 'last_seen_timestamp', '=', 'None', ')', ':', 'if', 'not', 'last_seen_timestamp', ':', 'last_seen_timestamp', '=', 'yesterday_timestamp', '(', ')', 'return', 'Topic', '.', 'objects', '.', 'filter', '(', 'modified_int__gt', '=', 'last_seen_timestamp', ')', '.', 'count', '(', ')'] | Returns count of new topics since last visit, or one day.
{% get_active_topics_count as active_topic_count %} | ['Returns', 'count', 'of', 'new', 'topics', 'since', 'last', 'visit', 'or', 'one', 'day', '.', '{', '%', 'get_active_topics_count', 'as', 'active_topic_count', '%', '}'] | train | https://github.com/tBaxter/django-fretboard/blob/3c3f9557089821283f315a07f3e5a57a2725ab3b/fretboard/templatetags/fretboard_tags.py#L35-L42 |
103 | jciskey/pygraph | pygraph/functions/spanning_tree.py | find_minimum_spanning_tree | def find_minimum_spanning_tree(graph):
"""Calculates a minimum spanning tree for a graph.
Returns a list of edges that define the tree.
Returns an empty list for an empty graph.
"""
mst = []
if graph.num_nodes() == 0:
return mst
if graph.num_edges() == 0:
return mst
connected_components = get_connected_components(graph)
if len(connected_components) > 1:
raise DisconnectedGraphError
edge_list = kruskal_mst(graph)
return edge_list | python | def find_minimum_spanning_tree(graph):
"""Calculates a minimum spanning tree for a graph.
Returns a list of edges that define the tree.
Returns an empty list for an empty graph.
"""
mst = []
if graph.num_nodes() == 0:
return mst
if graph.num_edges() == 0:
return mst
connected_components = get_connected_components(graph)
if len(connected_components) > 1:
raise DisconnectedGraphError
edge_list = kruskal_mst(graph)
return edge_list | ['def', 'find_minimum_spanning_tree', '(', 'graph', ')', ':', 'mst', '=', '[', ']', 'if', 'graph', '.', 'num_nodes', '(', ')', '==', '0', ':', 'return', 'mst', 'if', 'graph', '.', 'num_edges', '(', ')', '==', '0', ':', 'return', 'mst', 'connected_components', '=', 'get_connected_components', '(', 'graph', ')', 'if', 'len', '(', 'connected_components', ')', '>', '1', ':', 'raise', 'DisconnectedGraphError', 'edge_list', '=', 'kruskal_mst', '(', 'graph', ')', 'return', 'edge_list'] | Calculates a minimum spanning tree for a graph.
Returns a list of edges that define the tree.
Returns an empty list for an empty graph. | ['Calculates', 'a', 'minimum', 'spanning', 'tree', 'for', 'a', 'graph', '.', 'Returns', 'a', 'list', 'of', 'edges', 'that', 'define', 'the', 'tree', '.', 'Returns', 'an', 'empty', 'list', 'for', 'an', 'empty', 'graph', '.'] | train | https://github.com/jciskey/pygraph/blob/037bb2f32503fecb60d62921f9766d54109f15e2/pygraph/functions/spanning_tree.py#L8-L26 |
104 | totalgood/nlpia | src/nlpia/anki.py | get_anki_phrases | def get_anki_phrases(lang='english', limit=None):
""" Retrieve as many anki paired-statement corpora as you can for the requested language
If `ankis` (requested languages) is more than one, then get the english texts associated with those languages.
TODO: improve modularity: def function that takes a single language and call it recursively if necessary
>>> get_anki_phrases('afr')[:2]
["'n Groen piesang is nie ryp genoeg om te eet nie.",
"'n Hond het agter die kat aan gehardloop."]
"""
lang = lang.strip().lower()[:3]
lang = LANG2ANKI[lang[:2]] if lang not in ANKI_LANGUAGES else lang
if lang[:2] == 'en':
return get_anki_phrases_english(limit=limit)
return sorted(get_data(lang).iloc[:, -1].str.strip().values) | python | def get_anki_phrases(lang='english', limit=None):
""" Retrieve as many anki paired-statement corpora as you can for the requested language
If `ankis` (requested languages) is more than one, then get the english texts associated with those languages.
TODO: improve modularity: def function that takes a single language and call it recursively if necessary
>>> get_anki_phrases('afr')[:2]
["'n Groen piesang is nie ryp genoeg om te eet nie.",
"'n Hond het agter die kat aan gehardloop."]
"""
lang = lang.strip().lower()[:3]
lang = LANG2ANKI[lang[:2]] if lang not in ANKI_LANGUAGES else lang
if lang[:2] == 'en':
return get_anki_phrases_english(limit=limit)
return sorted(get_data(lang).iloc[:, -1].str.strip().values) | ['def', 'get_anki_phrases', '(', 'lang', '=', "'english'", ',', 'limit', '=', 'None', ')', ':', 'lang', '=', 'lang', '.', 'strip', '(', ')', '.', 'lower', '(', ')', '[', ':', '3', ']', 'lang', '=', 'LANG2ANKI', '[', 'lang', '[', ':', '2', ']', ']', 'if', 'lang', 'not', 'in', 'ANKI_LANGUAGES', 'else', 'lang', 'if', 'lang', '[', ':', '2', ']', '==', "'en'", ':', 'return', 'get_anki_phrases_english', '(', 'limit', '=', 'limit', ')', 'return', 'sorted', '(', 'get_data', '(', 'lang', ')', '.', 'iloc', '[', ':', ',', '-', '1', ']', '.', 'str', '.', 'strip', '(', ')', '.', 'values', ')'] | Retrieve as many anki paired-statement corpora as you can for the requested language
If `ankis` (requested languages) is more than one, then get the english texts associated with those languages.
TODO: improve modularity: def function that takes a single language and call it recursively if necessary
>>> get_anki_phrases('afr')[:2]
["'n Groen piesang is nie ryp genoeg om te eet nie.",
"'n Hond het agter die kat aan gehardloop."] | ['Retrieve', 'as', 'many', 'anki', 'paired', '-', 'statement', 'corpora', 'as', 'you', 'can', 'for', 'the', 'requested', 'language'] | train | https://github.com/totalgood/nlpia/blob/efa01126275e9cd3c3a5151a644f1c798a9ec53f/src/nlpia/anki.py#L16-L30 |
105 | tkf/python-epc | epc/server.py | EPCServer.print_port | def print_port(self, stream=sys.stdout):
"""
Print port this EPC server runs on.
As Emacs client reads port number from STDOUT, you need to
call this just before calling :meth:`serve_forever`.
:type stream: text stream
:arg stream: A stream object to write port on.
Default is :data:`sys.stdout`.
"""
stream.write(str(self.server_address[1]))
stream.write("\n")
stream.flush() | python | def print_port(self, stream=sys.stdout):
"""
Print port this EPC server runs on.
As Emacs client reads port number from STDOUT, you need to
call this just before calling :meth:`serve_forever`.
:type stream: text stream
:arg stream: A stream object to write port on.
Default is :data:`sys.stdout`.
"""
stream.write(str(self.server_address[1]))
stream.write("\n")
stream.flush() | ['def', 'print_port', '(', 'self', ',', 'stream', '=', 'sys', '.', 'stdout', ')', ':', 'stream', '.', 'write', '(', 'str', '(', 'self', '.', 'server_address', '[', '1', ']', ')', ')', 'stream', '.', 'write', '(', '"\\n"', ')', 'stream', '.', 'flush', '(', ')'] | Print port this EPC server runs on.
As Emacs client reads port number from STDOUT, you need to
call this just before calling :meth:`serve_forever`.
:type stream: text stream
:arg stream: A stream object to write port on.
Default is :data:`sys.stdout`. | ['Print', 'port', 'this', 'EPC', 'server', 'runs', 'on', '.'] | train | https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/server.py#L138-L152 |
106 | ARMmbed/mbed-cloud-sdk-python | src/mbed_cloud/_backends/device_directory/models/device_data.py | DeviceData.mechanism | def mechanism(self, mechanism):
"""
Sets the mechanism of this DeviceData.
The ID of the channel used to communicate with the device.
:param mechanism: The mechanism of this DeviceData.
:type: str
"""
allowed_values = ["connector", "direct"]
if mechanism not in allowed_values:
raise ValueError(
"Invalid value for `mechanism` ({0}), must be one of {1}"
.format(mechanism, allowed_values)
)
self._mechanism = mechanism | python | def mechanism(self, mechanism):
"""
Sets the mechanism of this DeviceData.
The ID of the channel used to communicate with the device.
:param mechanism: The mechanism of this DeviceData.
:type: str
"""
allowed_values = ["connector", "direct"]
if mechanism not in allowed_values:
raise ValueError(
"Invalid value for `mechanism` ({0}), must be one of {1}"
.format(mechanism, allowed_values)
)
self._mechanism = mechanism | ['def', 'mechanism', '(', 'self', ',', 'mechanism', ')', ':', 'allowed_values', '=', '[', '"connector"', ',', '"direct"', ']', 'if', 'mechanism', 'not', 'in', 'allowed_values', ':', 'raise', 'ValueError', '(', '"Invalid value for `mechanism` ({0}), must be one of {1}"', '.', 'format', '(', 'mechanism', ',', 'allowed_values', ')', ')', 'self', '.', '_mechanism', '=', 'mechanism'] | Sets the mechanism of this DeviceData.
The ID of the channel used to communicate with the device.
:param mechanism: The mechanism of this DeviceData.
:type: str | ['Sets', 'the', 'mechanism', 'of', 'this', 'DeviceData', '.', 'The', 'ID', 'of', 'the', 'channel', 'used', 'to', 'communicate', 'with', 'the', 'device', '.'] | train | https://github.com/ARMmbed/mbed-cloud-sdk-python/blob/c0af86fb2cdd4dc7ed26f236139241067d293509/src/mbed_cloud/_backends/device_directory/models/device_data.py#L722-L737 |
107 | CodyKochmann/strict_functions | strict_functions/trace2.py | default_profiler | def default_profiler(f, _type, _value):
''' inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
----------------------------------------
'''
try:
profile_print(
'\n'.join([
get_frame_src(f),
get_locals(f),
'----------------------------------------'
])
)
except:
pass | python | def default_profiler(f, _type, _value):
''' inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
----------------------------------------
'''
try:
profile_print(
'\n'.join([
get_frame_src(f),
get_locals(f),
'----------------------------------------'
])
)
except:
pass | ['def', 'default_profiler', '(', 'f', ',', '_type', ',', '_value', ')', ':', 'try', ':', 'profile_print', '(', "'\\n'", '.', 'join', '(', '[', 'get_frame_src', '(', 'f', ')', ',', 'get_locals', '(', 'f', ')', ',', "'----------------------------------------'", ']', ')', ')', 'except', ':', 'pass'] | inspects an input frame and pretty prints the following:
<src-path>:<src-line> -> <function-name>
<source-code>
<local-variables>
---------------------------------------- | ['inspects', 'an', 'input', 'frame', 'and', 'pretty', 'prints', 'the', 'following', ':'] | train | https://github.com/CodyKochmann/strict_functions/blob/adaf78084c66929552d80c95f980e7e0c4331478/strict_functions/trace2.py#L63-L80 |
108 | chorsley/python-Wappalyzer | Wappalyzer/Wappalyzer.py | Wappalyzer._has_app | def _has_app(self, app, webpage):
"""
Determine whether the web page matches the app signature.
"""
# Search the easiest things first and save the full-text search of the
# HTML for last
for regex in app['url']:
if regex.search(webpage.url):
return True
for name, regex in app['headers'].items():
if name in webpage.headers:
content = webpage.headers[name]
if regex.search(content):
return True
for regex in app['script']:
for script in webpage.scripts:
if regex.search(script):
return True
for name, regex in app['meta'].items():
if name in webpage.meta:
content = webpage.meta[name]
if regex.search(content):
return True
for regex in app['html']:
if regex.search(webpage.html):
return True | python | def _has_app(self, app, webpage):
"""
Determine whether the web page matches the app signature.
"""
# Search the easiest things first and save the full-text search of the
# HTML for last
for regex in app['url']:
if regex.search(webpage.url):
return True
for name, regex in app['headers'].items():
if name in webpage.headers:
content = webpage.headers[name]
if regex.search(content):
return True
for regex in app['script']:
for script in webpage.scripts:
if regex.search(script):
return True
for name, regex in app['meta'].items():
if name in webpage.meta:
content = webpage.meta[name]
if regex.search(content):
return True
for regex in app['html']:
if regex.search(webpage.html):
return True | ['def', '_has_app', '(', 'self', ',', 'app', ',', 'webpage', ')', ':', '# Search the easiest things first and save the full-text search of the', '# HTML for last', 'for', 'regex', 'in', 'app', '[', "'url'", ']', ':', 'if', 'regex', '.', 'search', '(', 'webpage', '.', 'url', ')', ':', 'return', 'True', 'for', 'name', ',', 'regex', 'in', 'app', '[', "'headers'", ']', '.', 'items', '(', ')', ':', 'if', 'name', 'in', 'webpage', '.', 'headers', ':', 'content', '=', 'webpage', '.', 'headers', '[', 'name', ']', 'if', 'regex', '.', 'search', '(', 'content', ')', ':', 'return', 'True', 'for', 'regex', 'in', 'app', '[', "'script'", ']', ':', 'for', 'script', 'in', 'webpage', '.', 'scripts', ':', 'if', 'regex', '.', 'search', '(', 'script', ')', ':', 'return', 'True', 'for', 'name', ',', 'regex', 'in', 'app', '[', "'meta'", ']', '.', 'items', '(', ')', ':', 'if', 'name', 'in', 'webpage', '.', 'meta', ':', 'content', '=', 'webpage', '.', 'meta', '[', 'name', ']', 'if', 'regex', '.', 'search', '(', 'content', ')', ':', 'return', 'True', 'for', 'regex', 'in', 'app', '[', "'html'", ']', ':', 'if', 'regex', '.', 'search', '(', 'webpage', '.', 'html', ')', ':', 'return', 'True'] | Determine whether the web page matches the app signature. | ['Determine', 'whether', 'the', 'web', 'page', 'matches', 'the', 'app', 'signature', '.'] | train | https://github.com/chorsley/python-Wappalyzer/blob/b785e29f12c8032c54279cfa9ce01ead702a386c/Wappalyzer/Wappalyzer.py#L189-L219 |
109 | cqparts/cqparts | src/cqparts_motors/dc.py | _Cup.get_cutout | def get_cutout(self, clearance=0):
" get the cutout for the shaft"
return cq.Workplane('XY', origin=(0, 0, 0)) \
.circle((self.diam / 2) + clearance) \
.extrude(10) | python | def get_cutout(self, clearance=0):
" get the cutout for the shaft"
return cq.Workplane('XY', origin=(0, 0, 0)) \
.circle((self.diam / 2) + clearance) \
.extrude(10) | ['def', 'get_cutout', '(', 'self', ',', 'clearance', '=', '0', ')', ':', 'return', 'cq', '.', 'Workplane', '(', "'XY'", ',', 'origin', '=', '(', '0', ',', '0', ',', '0', ')', ')', '.', 'circle', '(', '(', 'self', '.', 'diam', '/', '2', ')', '+', 'clearance', ')', '.', 'extrude', '(', '10', ')'] | get the cutout for the shaft | ['get', 'the', 'cutout', 'for', 'the', 'shaft'] | train | https://github.com/cqparts/cqparts/blob/018e87e14c2c4d1d40b4bfe6a7e22bcf9baf0a53/src/cqparts_motors/dc.py#L68-L72 |
110 | ncraike/fang | fang/dependency_register.py | DependencyRegister._unwrap_func | def _unwrap_func(cls, decorated_func):
'''
This unwraps a decorated func, returning the inner wrapped func.
This may become unnecessary with Python 3.4's inspect.unwrap().
'''
if click is not None:
# Workaround for click.command() decorator not setting
# __wrapped__
if isinstance(decorated_func, click.Command):
return cls._unwrap_func(decorated_func.callback)
if hasattr(decorated_func, '__wrapped__'):
# Recursion: unwrap more if needed
return cls._unwrap_func(decorated_func.__wrapped__)
else:
# decorated_func isn't actually decorated, no more
# unwrapping to do
return decorated_func | python | def _unwrap_func(cls, decorated_func):
'''
This unwraps a decorated func, returning the inner wrapped func.
This may become unnecessary with Python 3.4's inspect.unwrap().
'''
if click is not None:
# Workaround for click.command() decorator not setting
# __wrapped__
if isinstance(decorated_func, click.Command):
return cls._unwrap_func(decorated_func.callback)
if hasattr(decorated_func, '__wrapped__'):
# Recursion: unwrap more if needed
return cls._unwrap_func(decorated_func.__wrapped__)
else:
# decorated_func isn't actually decorated, no more
# unwrapping to do
return decorated_func | ['def', '_unwrap_func', '(', 'cls', ',', 'decorated_func', ')', ':', 'if', 'click', 'is', 'not', 'None', ':', '# Workaround for click.command() decorator not setting', '# __wrapped__', 'if', 'isinstance', '(', 'decorated_func', ',', 'click', '.', 'Command', ')', ':', 'return', 'cls', '.', '_unwrap_func', '(', 'decorated_func', '.', 'callback', ')', 'if', 'hasattr', '(', 'decorated_func', ',', "'__wrapped__'", ')', ':', '# Recursion: unwrap more if needed', 'return', 'cls', '.', '_unwrap_func', '(', 'decorated_func', '.', '__wrapped__', ')', 'else', ':', "# decorated_func isn't actually decorated, no more", '# unwrapping to do', 'return', 'decorated_func'] | This unwraps a decorated func, returning the inner wrapped func.
This may become unnecessary with Python 3.4's inspect.unwrap(). | ['This', 'unwraps', 'a', 'decorated', 'func', 'returning', 'the', 'inner', 'wrapped', 'func', '.'] | train | https://github.com/ncraike/fang/blob/2d9e1216c866e450059017f83ab775f7716eda7a/fang/dependency_register.py#L21-L39 |
111 | SpriteLink/NIPAP | nipap/nipap/authlib.py | SqliteAuth.add_user | def add_user(self, username, password, full_name=None, trusted=False, readonly=False):
""" Add user to SQLite database.
* `username` [string]
Username of new user.
* `password` [string]
Password of new user.
* `full_name` [string]
Full name of new user.
* `trusted` [boolean]
Whether the new user should be trusted or not.
* `readonly` [boolean]
Whether the new user can only read or not
"""
# generate salt
char_set = string.ascii_letters + string.digits
salt = ''.join(random.choice(char_set) for x in range(8))
sql = '''INSERT INTO user
(username, pwd_salt, pwd_hash, full_name, trusted, readonly)
VALUES
(?, ?, ?, ?, ?, ?)'''
try:
self._db_curs.execute(sql, (username, salt,
self._gen_hash(password, salt), full_name, trusted or False,
readonly or False))
self._db_conn.commit()
except (sqlite3.OperationalError, sqlite3.IntegrityError) as error:
raise AuthError(error) | python | def add_user(self, username, password, full_name=None, trusted=False, readonly=False):
""" Add user to SQLite database.
* `username` [string]
Username of new user.
* `password` [string]
Password of new user.
* `full_name` [string]
Full name of new user.
* `trusted` [boolean]
Whether the new user should be trusted or not.
* `readonly` [boolean]
Whether the new user can only read or not
"""
# generate salt
char_set = string.ascii_letters + string.digits
salt = ''.join(random.choice(char_set) for x in range(8))
sql = '''INSERT INTO user
(username, pwd_salt, pwd_hash, full_name, trusted, readonly)
VALUES
(?, ?, ?, ?, ?, ?)'''
try:
self._db_curs.execute(sql, (username, salt,
self._gen_hash(password, salt), full_name, trusted or False,
readonly or False))
self._db_conn.commit()
except (sqlite3.OperationalError, sqlite3.IntegrityError) as error:
raise AuthError(error) | ['def', 'add_user', '(', 'self', ',', 'username', ',', 'password', ',', 'full_name', '=', 'None', ',', 'trusted', '=', 'False', ',', 'readonly', '=', 'False', ')', ':', '# generate salt', 'char_set', '=', 'string', '.', 'ascii_letters', '+', 'string', '.', 'digits', 'salt', '=', "''", '.', 'join', '(', 'random', '.', 'choice', '(', 'char_set', ')', 'for', 'x', 'in', 'range', '(', '8', ')', ')', 'sql', '=', "'''INSERT INTO user\n (username, pwd_salt, pwd_hash, full_name, trusted, readonly)\n VALUES\n (?, ?, ?, ?, ?, ?)'''", 'try', ':', 'self', '.', '_db_curs', '.', 'execute', '(', 'sql', ',', '(', 'username', ',', 'salt', ',', 'self', '.', '_gen_hash', '(', 'password', ',', 'salt', ')', ',', 'full_name', ',', 'trusted', 'or', 'False', ',', 'readonly', 'or', 'False', ')', ')', 'self', '.', '_db_conn', '.', 'commit', '(', ')', 'except', '(', 'sqlite3', '.', 'OperationalError', ',', 'sqlite3', '.', 'IntegrityError', ')', 'as', 'error', ':', 'raise', 'AuthError', '(', 'error', ')'] | Add user to SQLite database.
* `username` [string]
Username of new user.
* `password` [string]
Password of new user.
* `full_name` [string]
Full name of new user.
* `trusted` [boolean]
Whether the new user should be trusted or not.
* `readonly` [boolean]
Whether the new user can only read or not | ['Add', 'user', 'to', 'SQLite', 'database', '.'] | train | https://github.com/SpriteLink/NIPAP/blob/f96069f11ab952d80b13cab06e0528f2d24b3de9/nipap/nipap/authlib.py#L616-L646 |
112 | williballenthin/python-evtx | Evtx/Views.py | evtx_chunk_xml_view | def evtx_chunk_xml_view(chunk):
"""
Generate XML representations of the records in an EVTX chunk.
Does not include the XML <?xml... header.
Records are ordered by chunk.records()
Args:
chunk (Evtx.Chunk): the chunk to render.
Yields:
tuple[str, Evtx.Record]: the rendered XML document and the raw record.
"""
for record in chunk.records():
record_str = evtx_record_xml_view(record)
yield record_str, record | python | def evtx_chunk_xml_view(chunk):
"""
Generate XML representations of the records in an EVTX chunk.
Does not include the XML <?xml... header.
Records are ordered by chunk.records()
Args:
chunk (Evtx.Chunk): the chunk to render.
Yields:
tuple[str, Evtx.Record]: the rendered XML document and the raw record.
"""
for record in chunk.records():
record_str = evtx_record_xml_view(record)
yield record_str, record | ['def', 'evtx_chunk_xml_view', '(', 'chunk', ')', ':', 'for', 'record', 'in', 'chunk', '.', 'records', '(', ')', ':', 'record_str', '=', 'evtx_record_xml_view', '(', 'record', ')', 'yield', 'record_str', ',', 'record'] | Generate XML representations of the records in an EVTX chunk.
Does not include the XML <?xml... header.
Records are ordered by chunk.records()
Args:
chunk (Evtx.Chunk): the chunk to render.
Yields:
tuple[str, Evtx.Record]: the rendered XML document and the raw record. | ['Generate', 'XML', 'representations', 'of', 'the', 'records', 'in', 'an', 'EVTX', 'chunk', '.'] | train | https://github.com/williballenthin/python-evtx/blob/4e9e29544adde64c79ff9b743269ecb18c677eb4/Evtx/Views.py#L207-L222 |
113 | erik/alexandra | alexandra/session.py | Session.get | def get(self, attr, default=None):
"""Get an attribute defined by this session"""
attrs = self.body.get('attributes') or {}
return attrs.get(attr, default) | python | def get(self, attr, default=None):
"""Get an attribute defined by this session"""
attrs = self.body.get('attributes') or {}
return attrs.get(attr, default) | ['def', 'get', '(', 'self', ',', 'attr', ',', 'default', '=', 'None', ')', ':', 'attrs', '=', 'self', '.', 'body', '.', 'get', '(', "'attributes'", ')', 'or', '{', '}', 'return', 'attrs', '.', 'get', '(', 'attr', ',', 'default', ')'] | Get an attribute defined by this session | ['Get', 'an', 'attribute', 'defined', 'by', 'this', 'session'] | train | https://github.com/erik/alexandra/blob/8bea94efa1af465254a553dc4dfea3fa552b18da/alexandra/session.py#L38-L42 |
114 | ray-project/ray | python/ray/rllib/agents/ars/policies.py | rollout | def rollout(policy, env, timestep_limit=None, add_noise=False, offset=0):
"""Do a rollout.
If add_noise is True, the rollout will take noisy actions with
noise drawn from that stream. Otherwise, no action noise will be added.
Parameters
----------
policy: tf object
policy from which to draw actions
env: GymEnv
environment from which to draw rewards, done, and next state
timestep_limit: int, optional
steps after which to end the rollout
add_noise: bool, optional
indicates whether exploratory action noise should be added
offset: int, optional
value to subtract from the reward. For example, survival bonus
from humanoid
"""
env_timestep_limit = env.spec.max_episode_steps
timestep_limit = (env_timestep_limit if timestep_limit is None else min(
timestep_limit, env_timestep_limit))
rews = []
t = 0
observation = env.reset()
for _ in range(timestep_limit or 999999):
ac = policy.compute(observation, add_noise=add_noise, update=True)[0]
observation, rew, done, _ = env.step(ac)
rew -= np.abs(offset)
rews.append(rew)
t += 1
if done:
break
rews = np.array(rews, dtype=np.float32)
return rews, t | python | def rollout(policy, env, timestep_limit=None, add_noise=False, offset=0):
"""Do a rollout.
If add_noise is True, the rollout will take noisy actions with
noise drawn from that stream. Otherwise, no action noise will be added.
Parameters
----------
policy: tf object
policy from which to draw actions
env: GymEnv
environment from which to draw rewards, done, and next state
timestep_limit: int, optional
steps after which to end the rollout
add_noise: bool, optional
indicates whether exploratory action noise should be added
offset: int, optional
value to subtract from the reward. For example, survival bonus
from humanoid
"""
env_timestep_limit = env.spec.max_episode_steps
timestep_limit = (env_timestep_limit if timestep_limit is None else min(
timestep_limit, env_timestep_limit))
rews = []
t = 0
observation = env.reset()
for _ in range(timestep_limit or 999999):
ac = policy.compute(observation, add_noise=add_noise, update=True)[0]
observation, rew, done, _ = env.step(ac)
rew -= np.abs(offset)
rews.append(rew)
t += 1
if done:
break
rews = np.array(rews, dtype=np.float32)
return rews, t | ['def', 'rollout', '(', 'policy', ',', 'env', ',', 'timestep_limit', '=', 'None', ',', 'add_noise', '=', 'False', ',', 'offset', '=', '0', ')', ':', 'env_timestep_limit', '=', 'env', '.', 'spec', '.', 'max_episode_steps', 'timestep_limit', '=', '(', 'env_timestep_limit', 'if', 'timestep_limit', 'is', 'None', 'else', 'min', '(', 'timestep_limit', ',', 'env_timestep_limit', ')', ')', 'rews', '=', '[', ']', 't', '=', '0', 'observation', '=', 'env', '.', 'reset', '(', ')', 'for', '_', 'in', 'range', '(', 'timestep_limit', 'or', '999999', ')', ':', 'ac', '=', 'policy', '.', 'compute', '(', 'observation', ',', 'add_noise', '=', 'add_noise', ',', 'update', '=', 'True', ')', '[', '0', ']', 'observation', ',', 'rew', ',', 'done', ',', '_', '=', 'env', '.', 'step', '(', 'ac', ')', 'rew', '-=', 'np', '.', 'abs', '(', 'offset', ')', 'rews', '.', 'append', '(', 'rew', ')', 't', '+=', '1', 'if', 'done', ':', 'break', 'rews', '=', 'np', '.', 'array', '(', 'rews', ',', 'dtype', '=', 'np', '.', 'float32', ')', 'return', 'rews', ',', 't'] | Do a rollout.
If add_noise is True, the rollout will take noisy actions with
noise drawn from that stream. Otherwise, no action noise will be added.
Parameters
----------
policy: tf object
policy from which to draw actions
env: GymEnv
environment from which to draw rewards, done, and next state
timestep_limit: int, optional
steps after which to end the rollout
add_noise: bool, optional
indicates whether exploratory action noise should be added
offset: int, optional
value to subtract from the reward. For example, survival bonus
from humanoid | ['Do', 'a', 'rollout', '.'] | train | https://github.com/ray-project/ray/blob/4eade036a0505e244c976f36aaa2d64386b5129b/python/ray/rllib/agents/ars/policies.py#L19-L54 |
115 | christophertbrown/bioscripts | ctbBio/subset_sam.py | sort_sam | def sort_sam(sam, sort):
"""
sort sam file
"""
tempdir = '%s/' % (os.path.abspath(sam).rsplit('/', 1)[0])
if sort is True:
mapping = '%s.sorted.sam' % (sam.rsplit('.', 1)[0])
if sam != '-':
if os.path.exists(mapping) is False:
os.system("\
sort -k1 --buffer-size=%sG -T %s -o %s %s\
" % (sbuffer, tempdir, mapping, sam))
else:
mapping = 'stdin-sam.sorted.sam'
p = Popen("sort -k1 --buffer-size=%sG -T %s -o %s" \
% (sbuffer, tempdir, mapping), stdin = sys.stdin, shell = True)
p.communicate()
mapping = open(mapping)
else:
if sam == '-':
mapping = sys.stdin
else:
mapping = open(sam)
return mapping | python | def sort_sam(sam, sort):
"""
sort sam file
"""
tempdir = '%s/' % (os.path.abspath(sam).rsplit('/', 1)[0])
if sort is True:
mapping = '%s.sorted.sam' % (sam.rsplit('.', 1)[0])
if sam != '-':
if os.path.exists(mapping) is False:
os.system("\
sort -k1 --buffer-size=%sG -T %s -o %s %s\
" % (sbuffer, tempdir, mapping, sam))
else:
mapping = 'stdin-sam.sorted.sam'
p = Popen("sort -k1 --buffer-size=%sG -T %s -o %s" \
% (sbuffer, tempdir, mapping), stdin = sys.stdin, shell = True)
p.communicate()
mapping = open(mapping)
else:
if sam == '-':
mapping = sys.stdin
else:
mapping = open(sam)
return mapping | ['def', 'sort_sam', '(', 'sam', ',', 'sort', ')', ':', 'tempdir', '=', "'%s/'", '%', '(', 'os', '.', 'path', '.', 'abspath', '(', 'sam', ')', '.', 'rsplit', '(', "'/'", ',', '1', ')', '[', '0', ']', ')', 'if', 'sort', 'is', 'True', ':', 'mapping', '=', "'%s.sorted.sam'", '%', '(', 'sam', '.', 'rsplit', '(', "'.'", ',', '1', ')', '[', '0', ']', ')', 'if', 'sam', '!=', "'-'", ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'mapping', ')', 'is', 'False', ':', 'os', '.', 'system', '(', '"\\\n sort -k1 --buffer-size=%sG -T %s -o %s %s\\\n "', '%', '(', 'sbuffer', ',', 'tempdir', ',', 'mapping', ',', 'sam', ')', ')', 'else', ':', 'mapping', '=', "'stdin-sam.sorted.sam'", 'p', '=', 'Popen', '(', '"sort -k1 --buffer-size=%sG -T %s -o %s"', '%', '(', 'sbuffer', ',', 'tempdir', ',', 'mapping', ')', ',', 'stdin', '=', 'sys', '.', 'stdin', ',', 'shell', '=', 'True', ')', 'p', '.', 'communicate', '(', ')', 'mapping', '=', 'open', '(', 'mapping', ')', 'else', ':', 'if', 'sam', '==', "'-'", ':', 'mapping', '=', 'sys', '.', 'stdin', 'else', ':', 'mapping', '=', 'open', '(', 'sam', ')', 'return', 'mapping'] | sort sam file | ['sort', 'sam', 'file'] | train | https://github.com/christophertbrown/bioscripts/blob/83b2566b3a5745437ec651cd6cafddd056846240/ctbBio/subset_sam.py#L14-L37 |
116 | jtambasco/modesolverpy | modesolverpy/structure_base.py | _AbstractStructure.y | def y(self):
'''
np.array: The grid points in y.
'''
if None not in (self.y_min, self.y_max, self.y_step) and \
self.y_min != self.y_max:
y = np.arange(self.y_min, self.y_max-self.y_step*0.1, self.y_step)
else:
y = np.array([])
return y | python | def y(self):
'''
np.array: The grid points in y.
'''
if None not in (self.y_min, self.y_max, self.y_step) and \
self.y_min != self.y_max:
y = np.arange(self.y_min, self.y_max-self.y_step*0.1, self.y_step)
else:
y = np.array([])
return y | ['def', 'y', '(', 'self', ')', ':', 'if', 'None', 'not', 'in', '(', 'self', '.', 'y_min', ',', 'self', '.', 'y_max', ',', 'self', '.', 'y_step', ')', 'and', 'self', '.', 'y_min', '!=', 'self', '.', 'y_max', ':', 'y', '=', 'np', '.', 'arange', '(', 'self', '.', 'y_min', ',', 'self', '.', 'y_max', '-', 'self', '.', 'y_step', '*', '0.1', ',', 'self', '.', 'y_step', ')', 'else', ':', 'y', '=', 'np', '.', 'array', '(', '[', ']', ')', 'return', 'y'] | np.array: The grid points in y. | ['np', '.', 'array', ':', 'The', 'grid', 'points', 'in', 'y', '.'] | train | https://github.com/jtambasco/modesolverpy/blob/85254a13b5aed2404187c52ac93b9b3ce99ee3a3/modesolverpy/structure_base.py#L142-L151 |
117 | saltstack/salt | salt/modules/postgres.py | language_list | def language_list(
maintenance_db,
user=None,
host=None,
port=None,
password=None,
runas=None):
'''
.. versionadded:: 2016.3.0
Return a list of languages in a database.
CLI Example:
.. code-block:: bash
salt '*' postgres.language_list dbname
maintenance_db
The database to check
user
database username if different from config or default
password
user password if any password for a specified user
host
Database host if different from config or default
port
Database port if different from config or default
runas
System user all operations should be performed on behalf of
'''
ret = {}
query = 'SELECT lanname AS "Name" FROM pg_language'
rows = psql_query(
query,
runas=runas,
host=host,
user=user,
port=port,
maintenance_db=maintenance_db,
password=password)
for row in rows:
ret[row['Name']] = row['Name']
return ret | python | def language_list(
maintenance_db,
user=None,
host=None,
port=None,
password=None,
runas=None):
'''
.. versionadded:: 2016.3.0
Return a list of languages in a database.
CLI Example:
.. code-block:: bash
salt '*' postgres.language_list dbname
maintenance_db
The database to check
user
database username if different from config or default
password
user password if any password for a specified user
host
Database host if different from config or default
port
Database port if different from config or default
runas
System user all operations should be performed on behalf of
'''
ret = {}
query = 'SELECT lanname AS "Name" FROM pg_language'
rows = psql_query(
query,
runas=runas,
host=host,
user=user,
port=port,
maintenance_db=maintenance_db,
password=password)
for row in rows:
ret[row['Name']] = row['Name']
return ret | ['def', 'language_list', '(', 'maintenance_db', ',', 'user', '=', 'None', ',', 'host', '=', 'None', ',', 'port', '=', 'None', ',', 'password', '=', 'None', ',', 'runas', '=', 'None', ')', ':', 'ret', '=', '{', '}', 'query', '=', '\'SELECT lanname AS "Name" FROM pg_language\'', 'rows', '=', 'psql_query', '(', 'query', ',', 'runas', '=', 'runas', ',', 'host', '=', 'host', ',', 'user', '=', 'user', ',', 'port', '=', 'port', ',', 'maintenance_db', '=', 'maintenance_db', ',', 'password', '=', 'password', ')', 'for', 'row', 'in', 'rows', ':', 'ret', '[', 'row', '[', "'Name'", ']', ']', '=', 'row', '[', "'Name'", ']', 'return', 'ret'] | .. versionadded:: 2016.3.0
Return a list of languages in a database.
CLI Example:
.. code-block:: bash
salt '*' postgres.language_list dbname
maintenance_db
The database to check
user
database username if different from config or default
password
user password if any password for a specified user
host
Database host if different from config or default
port
Database port if different from config or default
runas
System user all operations should be performed on behalf of | ['..', 'versionadded', '::', '2016', '.', '3', '.', '0'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/postgres.py#L2201-L2253 |
118 | elastic/elasticsearch-py | elasticsearch/client/tasks.py | TasksClient.get | def get(self, task_id=None, params=None):
"""
Retrieve information for a particular task.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html>`_
:arg task_id: Return the task with specified id (node_id:task_number)
:arg wait_for_completion: Wait for the matching tasks to complete
(default: false)
:arg timeout: Maximum waiting time for `wait_for_completion`
"""
return self.transport.perform_request('GET', _make_path('_tasks',
task_id), params=params) | python | def get(self, task_id=None, params=None):
"""
Retrieve information for a particular task.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html>`_
:arg task_id: Return the task with specified id (node_id:task_number)
:arg wait_for_completion: Wait for the matching tasks to complete
(default: false)
:arg timeout: Maximum waiting time for `wait_for_completion`
"""
return self.transport.perform_request('GET', _make_path('_tasks',
task_id), params=params) | ['def', 'get', '(', 'self', ',', 'task_id', '=', 'None', ',', 'params', '=', 'None', ')', ':', 'return', 'self', '.', 'transport', '.', 'perform_request', '(', "'GET'", ',', '_make_path', '(', "'_tasks'", ',', 'task_id', ')', ',', 'params', '=', 'params', ')'] | Retrieve information for a particular task.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html>`_
:arg task_id: Return the task with specified id (node_id:task_number)
:arg wait_for_completion: Wait for the matching tasks to complete
(default: false)
:arg timeout: Maximum waiting time for `wait_for_completion` | ['Retrieve', 'information', 'for', 'a', 'particular', 'task', '.', '<http', ':', '//', 'www', '.', 'elastic', '.', 'co', '/', 'guide', '/', 'en', '/', 'elasticsearch', '/', 'reference', '/', 'current', '/', 'tasks', '.', 'html', '>', '_'] | train | https://github.com/elastic/elasticsearch-py/blob/2aab285c8f506f3863cbdaba3c90a685c510ba00/elasticsearch/client/tasks.py#L48-L59 |
119 | fastai/fastai | fastai/metrics.py | top_k_accuracy | def top_k_accuracy(input:Tensor, targs:Tensor, k:int=5)->Rank0Tensor:
"Computes the Top-k accuracy (target is in the top k predictions)."
input = input.topk(k=k, dim=-1)[1]
targs = targs.unsqueeze(dim=-1).expand_as(input)
return (input == targs).max(dim=-1)[0].float().mean() | python | def top_k_accuracy(input:Tensor, targs:Tensor, k:int=5)->Rank0Tensor:
"Computes the Top-k accuracy (target is in the top k predictions)."
input = input.topk(k=k, dim=-1)[1]
targs = targs.unsqueeze(dim=-1).expand_as(input)
return (input == targs).max(dim=-1)[0].float().mean() | ['def', 'top_k_accuracy', '(', 'input', ':', 'Tensor', ',', 'targs', ':', 'Tensor', ',', 'k', ':', 'int', '=', '5', ')', '->', 'Rank0Tensor', ':', 'input', '=', 'input', '.', 'topk', '(', 'k', '=', 'k', ',', 'dim', '=', '-', '1', ')', '[', '1', ']', 'targs', '=', 'targs', '.', 'unsqueeze', '(', 'dim', '=', '-', '1', ')', '.', 'expand_as', '(', 'input', ')', 'return', '(', 'input', '==', 'targs', ')', '.', 'max', '(', 'dim', '=', '-', '1', ')', '[', '0', ']', '.', 'float', '(', ')', '.', 'mean', '(', ')'] | Computes the Top-k accuracy (target is in the top k predictions). | ['Computes', 'the', 'Top', '-', 'k', 'accuracy', '(', 'target', 'is', 'in', 'the', 'top', 'k', 'predictions', ')', '.'] | train | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/metrics.py#L36-L40 |
120 | cackharot/suds-py3 | suds/client.py | Client.set_options | def set_options(self, **kwargs):
"""
Set options.
@param kwargs: keyword arguments.
@see: L{Options}
"""
p = Unskin(self.options)
p.update(kwargs) | python | def set_options(self, **kwargs):
"""
Set options.
@param kwargs: keyword arguments.
@see: L{Options}
"""
p = Unskin(self.options)
p.update(kwargs) | ['def', 'set_options', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'p', '=', 'Unskin', '(', 'self', '.', 'options', ')', 'p', '.', 'update', '(', 'kwargs', ')'] | Set options.
@param kwargs: keyword arguments.
@see: L{Options} | ['Set', 'options', '.'] | train | https://github.com/cackharot/suds-py3/blob/7387ec7806e9be29aad0a711bea5cb3c9396469c/suds/client.py#L122-L129 |
121 | palantir/python-jsonrpc-server | pyls_jsonrpc/endpoint.py | Endpoint._cancel_callback | def _cancel_callback(self, request_id):
"""Construct a cancellation callback for the given request ID."""
def callback(future):
if future.cancelled():
self.notify(CANCEL_METHOD, {'id': request_id})
future.set_exception(JsonRpcRequestCancelled())
return callback | python | def _cancel_callback(self, request_id):
"""Construct a cancellation callback for the given request ID."""
def callback(future):
if future.cancelled():
self.notify(CANCEL_METHOD, {'id': request_id})
future.set_exception(JsonRpcRequestCancelled())
return callback | ['def', '_cancel_callback', '(', 'self', ',', 'request_id', ')', ':', 'def', 'callback', '(', 'future', ')', ':', 'if', 'future', '.', 'cancelled', '(', ')', ':', 'self', '.', 'notify', '(', 'CANCEL_METHOD', ',', '{', "'id'", ':', 'request_id', '}', ')', 'future', '.', 'set_exception', '(', 'JsonRpcRequestCancelled', '(', ')', ')', 'return', 'callback'] | Construct a cancellation callback for the given request ID. | ['Construct', 'a', 'cancellation', 'callback', 'for', 'the', 'given', 'request', 'ID', '.'] | train | https://github.com/palantir/python-jsonrpc-server/blob/7021d849901705ab53c141e483a71d0779aff3d2/pyls_jsonrpc/endpoint.py#L86-L92 |
122 | nlm/nagplug | nagplug/__init__.py | Threshold.check | def check(self, value):
"""
check if a value is correct according to threshold
arguments:
value: the value to check
"""
if self._inclusive:
return False if self._min <= value <= self._max else True
else:
return False if value > self._max or value < self._min else True | python | def check(self, value):
"""
check if a value is correct according to threshold
arguments:
value: the value to check
"""
if self._inclusive:
return False if self._min <= value <= self._max else True
else:
return False if value > self._max or value < self._min else True | ['def', 'check', '(', 'self', ',', 'value', ')', ':', 'if', 'self', '.', '_inclusive', ':', 'return', 'False', 'if', 'self', '.', '_min', '<=', 'value', '<=', 'self', '.', '_max', 'else', 'True', 'else', ':', 'return', 'False', 'if', 'value', '>', 'self', '.', '_max', 'or', 'value', '<', 'self', '.', '_min', 'else', 'True'] | check if a value is correct according to threshold
arguments:
value: the value to check | ['check', 'if', 'a', 'value', 'is', 'correct', 'according', 'to', 'threshold'] | train | https://github.com/nlm/nagplug/blob/9de70d8031caffbfa57ab9d8d03567e897e9e119/nagplug/__init__.py#L453-L463 |
123 | BerkeleyAutomation/visualization | visualization/visualizer2d.py | Visualizer2D.imshow | def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off') | python | def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off') | ['def', 'imshow', '(', 'image', ',', 'auto_subplot', '=', 'False', ',', '*', '*', 'kwargs', ')', ':', 'if', 'isinstance', '(', 'image', ',', 'BinaryImage', ')', 'or', 'isinstance', '(', 'image', ',', 'GrayscaleImage', ')', ':', 'plt', '.', 'imshow', '(', 'image', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray', ',', '*', '*', 'kwargs', ')', 'elif', 'isinstance', '(', 'image', ',', 'ColorImage', ')', 'or', 'isinstance', '(', 'image', ',', 'SegmentationImage', ')', ':', 'plt', '.', 'imshow', '(', 'image', '.', 'data', ',', '*', '*', 'kwargs', ')', 'elif', 'isinstance', '(', 'image', ',', 'DepthImage', ')', ':', 'plt', '.', 'imshow', '(', 'image', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray_r', ',', '*', '*', 'kwargs', ')', 'elif', 'isinstance', '(', 'image', ',', 'RgbdImage', ')', ':', 'if', 'auto_subplot', ':', 'plt', '.', 'subplot', '(', '1', ',', '2', ',', '1', ')', 'plt', '.', 'imshow', '(', 'image', '.', 'color', '.', 'data', ',', '*', '*', 'kwargs', ')', 'plt', '.', 'axis', '(', "'off'", ')', 'plt', '.', 'subplot', '(', '1', ',', '2', ',', '2', ')', 'plt', '.', 'imshow', '(', 'image', '.', 'depth', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray_r', ',', '*', '*', 'kwargs', ')', 'else', ':', 'plt', '.', 'imshow', '(', 'image', '.', 'color', '.', 'data', ',', '*', '*', 'kwargs', ')', 'elif', 'isinstance', '(', 'image', ',', 'GdImage', ')', ':', 'if', 'auto_subplot', ':', 'plt', '.', 'subplot', '(', '1', ',', '2', ',', '1', ')', 'plt', '.', 'imshow', '(', 'image', '.', 'gray', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray', ',', '*', '*', 'kwargs', ')', 'plt', '.', 'axis', '(', "'off'", ')', 'plt', '.', 'subplot', '(', '1', ',', '2', ',', '2', ')', 'plt', '.', 'imshow', '(', 'image', '.', 'depth', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray_r', ',', '*', '*', 'kwargs', ')', 'else', ':', 'plt', '.', 'imshow', '(', 'image', '.', 'gray', '.', 'data', ',', 'cmap', '=', 'plt', '.', 'cm', '.', 'gray', ',', '*', '*', 'kwargs', ')', 'plt', '.', 'axis', '(', "'off'", ')'] | Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd | ['Displays', 'an', 'image', '.', 'Parameters', '----------', 'image', ':', ':', 'obj', ':', 'perception', '.', 'Image', 'image', 'to', 'display', 'auto_subplot', ':', 'bool', 'whether', 'or', 'not', 'to', 'automatically', 'subplot', 'for', 'multi', '-', 'channel', 'images', 'e', '.', 'g', '.', 'rgbd'] | train | https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L118-L151 |
124 | numenta/htmresearch | projects/nik/nik_analysis.py | NIKAnalysis.compute | def compute(self, xt1, yt1, xt, yt, theta1t1, theta2t1, theta1, theta2):
"""
Accumulate the various inputs.
"""
dx = xt - xt1
dy = yt - yt1
if self.numPoints < self.maxPoints:
self.dxValues[self.numPoints,0] = dx
self.dxValues[self.numPoints,1] = dy
self.thetaValues[self.numPoints,0] = theta1
self.thetaValues[self.numPoints,1] = theta2
self.numPoints += 1
# print >>sys.stderr, "Xt's: ", xt1, yt1, xt, yt, "Delta's: ", dx, dy
# print >>sys.stderr, "Theta t-1: ", theta1t1, theta2t1, "t:",theta1, theta2
elif self.numPoints == self.maxPoints:
print >> sys.stderr,"Max points exceeded, analyzing ",self.maxPoints,"points only"
self.numPoints += 1 | python | def compute(self, xt1, yt1, xt, yt, theta1t1, theta2t1, theta1, theta2):
"""
Accumulate the various inputs.
"""
dx = xt - xt1
dy = yt - yt1
if self.numPoints < self.maxPoints:
self.dxValues[self.numPoints,0] = dx
self.dxValues[self.numPoints,1] = dy
self.thetaValues[self.numPoints,0] = theta1
self.thetaValues[self.numPoints,1] = theta2
self.numPoints += 1
# print >>sys.stderr, "Xt's: ", xt1, yt1, xt, yt, "Delta's: ", dx, dy
# print >>sys.stderr, "Theta t-1: ", theta1t1, theta2t1, "t:",theta1, theta2
elif self.numPoints == self.maxPoints:
print >> sys.stderr,"Max points exceeded, analyzing ",self.maxPoints,"points only"
self.numPoints += 1 | ['def', 'compute', '(', 'self', ',', 'xt1', ',', 'yt1', ',', 'xt', ',', 'yt', ',', 'theta1t1', ',', 'theta2t1', ',', 'theta1', ',', 'theta2', ')', ':', 'dx', '=', 'xt', '-', 'xt1', 'dy', '=', 'yt', '-', 'yt1', 'if', 'self', '.', 'numPoints', '<', 'self', '.', 'maxPoints', ':', 'self', '.', 'dxValues', '[', 'self', '.', 'numPoints', ',', '0', ']', '=', 'dx', 'self', '.', 'dxValues', '[', 'self', '.', 'numPoints', ',', '1', ']', '=', 'dy', 'self', '.', 'thetaValues', '[', 'self', '.', 'numPoints', ',', '0', ']', '=', 'theta1', 'self', '.', 'thetaValues', '[', 'self', '.', 'numPoints', ',', '1', ']', '=', 'theta2', 'self', '.', 'numPoints', '+=', '1', '# print >>sys.stderr, "Xt\'s: ", xt1, yt1, xt, yt, "Delta\'s: ", dx, dy', '# print >>sys.stderr, "Theta t-1: ", theta1t1, theta2t1, "t:",theta1, theta2', 'elif', 'self', '.', 'numPoints', '==', 'self', '.', 'maxPoints', ':', 'print', '>>', 'sys', '.', 'stderr', ',', '"Max points exceeded, analyzing "', ',', 'self', '.', 'maxPoints', ',', '"points only"', 'self', '.', 'numPoints', '+=', '1'] | Accumulate the various inputs. | ['Accumulate', 'the', 'various', 'inputs', '.'] | train | https://github.com/numenta/htmresearch/blob/70c096b09a577ea0432c3f3bfff4442d4871b7aa/projects/nik/nik_analysis.py#L41-L62 |
125 | SecurityInnovation/PGPy | pgpy/pgp.py | PGPKey.is_public | def is_public(self):
"""``True`` if this is a public key, otherwise ``False``"""
return isinstance(self._key, Public) and not isinstance(self._key, Private) | python | def is_public(self):
"""``True`` if this is a public key, otherwise ``False``"""
return isinstance(self._key, Public) and not isinstance(self._key, Private) | ['def', 'is_public', '(', 'self', ')', ':', 'return', 'isinstance', '(', 'self', '.', '_key', ',', 'Public', ')', 'and', 'not', 'isinstance', '(', 'self', '.', '_key', ',', 'Private', ')'] | ``True`` if this is a public key, otherwise ``False`` | ['True', 'if', 'this', 'is', 'a', 'public', 'key', 'otherwise', 'False'] | train | https://github.com/SecurityInnovation/PGPy/blob/f1c3d68e32c334f5aa14c34580925e97f17f4fde/pgpy/pgp.py#L1282-L1284 |
126 | biocore/burrito-fillings | bfillings/sumaclust_v1.py | sumaclust_denovo_cluster | def sumaclust_denovo_cluster(seq_path=None,
result_path=None,
shortest_len=True,
similarity=0.97,
threads=1,
exact=False,
HALT_EXEC=False
):
""" Function : launch SumaClust de novo OTU picker
Parameters: seq_path, filepath to reads;
result_path, filepath to output OTU map;
shortest_len, boolean;
similarity, the similarity threshold (between (0,1]);
threads, number of threads to use;
exact, boolean to perform exact matching
Return : clusters, list of lists
"""
# Sequence path is mandatory
if (seq_path is None
or not exists(seq_path)):
raise ValueError("Error: FASTA query sequence filepath is "
"mandatory input.")
# Output directory is mandatory
if (result_path is None
or not isdir(dirname(realpath(result_path)))):
raise ValueError("Error: output directory is mandatory input.")
# Instantiate the object
sumaclust = Sumaclust(HALT_EXEC=HALT_EXEC)
# Set the OTU-map filepath
sumaclust.Parameters['-O'].on(result_path)
# Set the similarity threshold
if similarity is not None:
sumaclust.Parameters['-t'].on(similarity)
# Set the option to perform exact clustering (default: False)
if exact:
sumaclust.Parameters['-e'].on()
# Turn off option for reference sequence length to be the shortest
if not shortest_len:
sumaclust.Parameters['-l'].off()
# Set the number of threads
if threads > 0:
sumaclust.Parameters['-p'].on(threads)
else:
raise ValueError("Number of threads must be positive.")
# Launch SumaClust,
# set the data string to include the read filepath
# (to be passed as final arguments in the sumaclust command)
app_result = sumaclust(seq_path)
# Put clusters into a list of lists
f_otumap = app_result['OtuMap']
clusters = [line.strip().split('\t')[1:] for line in f_otumap]
# Return clusters
return clusters | python | def sumaclust_denovo_cluster(seq_path=None,
result_path=None,
shortest_len=True,
similarity=0.97,
threads=1,
exact=False,
HALT_EXEC=False
):
""" Function : launch SumaClust de novo OTU picker
Parameters: seq_path, filepath to reads;
result_path, filepath to output OTU map;
shortest_len, boolean;
similarity, the similarity threshold (between (0,1]);
threads, number of threads to use;
exact, boolean to perform exact matching
Return : clusters, list of lists
"""
# Sequence path is mandatory
if (seq_path is None
or not exists(seq_path)):
raise ValueError("Error: FASTA query sequence filepath is "
"mandatory input.")
# Output directory is mandatory
if (result_path is None
or not isdir(dirname(realpath(result_path)))):
raise ValueError("Error: output directory is mandatory input.")
# Instantiate the object
sumaclust = Sumaclust(HALT_EXEC=HALT_EXEC)
# Set the OTU-map filepath
sumaclust.Parameters['-O'].on(result_path)
# Set the similarity threshold
if similarity is not None:
sumaclust.Parameters['-t'].on(similarity)
# Set the option to perform exact clustering (default: False)
if exact:
sumaclust.Parameters['-e'].on()
# Turn off option for reference sequence length to be the shortest
if not shortest_len:
sumaclust.Parameters['-l'].off()
# Set the number of threads
if threads > 0:
sumaclust.Parameters['-p'].on(threads)
else:
raise ValueError("Number of threads must be positive.")
# Launch SumaClust,
# set the data string to include the read filepath
# (to be passed as final arguments in the sumaclust command)
app_result = sumaclust(seq_path)
# Put clusters into a list of lists
f_otumap = app_result['OtuMap']
clusters = [line.strip().split('\t')[1:] for line in f_otumap]
# Return clusters
return clusters | ['def', 'sumaclust_denovo_cluster', '(', 'seq_path', '=', 'None', ',', 'result_path', '=', 'None', ',', 'shortest_len', '=', 'True', ',', 'similarity', '=', '0.97', ',', 'threads', '=', '1', ',', 'exact', '=', 'False', ',', 'HALT_EXEC', '=', 'False', ')', ':', '# Sequence path is mandatory', 'if', '(', 'seq_path', 'is', 'None', 'or', 'not', 'exists', '(', 'seq_path', ')', ')', ':', 'raise', 'ValueError', '(', '"Error: FASTA query sequence filepath is "', '"mandatory input."', ')', '# Output directory is mandatory', 'if', '(', 'result_path', 'is', 'None', 'or', 'not', 'isdir', '(', 'dirname', '(', 'realpath', '(', 'result_path', ')', ')', ')', ')', ':', 'raise', 'ValueError', '(', '"Error: output directory is mandatory input."', ')', '# Instantiate the object', 'sumaclust', '=', 'Sumaclust', '(', 'HALT_EXEC', '=', 'HALT_EXEC', ')', '# Set the OTU-map filepath', 'sumaclust', '.', 'Parameters', '[', "'-O'", ']', '.', 'on', '(', 'result_path', ')', '# Set the similarity threshold', 'if', 'similarity', 'is', 'not', 'None', ':', 'sumaclust', '.', 'Parameters', '[', "'-t'", ']', '.', 'on', '(', 'similarity', ')', '# Set the option to perform exact clustering (default: False)', 'if', 'exact', ':', 'sumaclust', '.', 'Parameters', '[', "'-e'", ']', '.', 'on', '(', ')', '# Turn off option for reference sequence length to be the shortest', 'if', 'not', 'shortest_len', ':', 'sumaclust', '.', 'Parameters', '[', "'-l'", ']', '.', 'off', '(', ')', '# Set the number of threads', 'if', 'threads', '>', '0', ':', 'sumaclust', '.', 'Parameters', '[', "'-p'", ']', '.', 'on', '(', 'threads', ')', 'else', ':', 'raise', 'ValueError', '(', '"Number of threads must be positive."', ')', '# Launch SumaClust,', '# set the data string to include the read filepath', '# (to be passed as final arguments in the sumaclust command)', 'app_result', '=', 'sumaclust', '(', 'seq_path', ')', '# Put clusters into a list of lists', 'f_otumap', '=', 'app_result', '[', "'OtuMap'", ']', 'clusters', '=', '[', 'line', '.', 'strip', '(', ')', '.', 'split', '(', "'\\t'", ')', '[', '1', ':', ']', 'for', 'line', 'in', 'f_otumap', ']', '# Return clusters', 'return', 'clusters'] | Function : launch SumaClust de novo OTU picker
Parameters: seq_path, filepath to reads;
result_path, filepath to output OTU map;
shortest_len, boolean;
similarity, the similarity threshold (between (0,1]);
threads, number of threads to use;
exact, boolean to perform exact matching
Return : clusters, list of lists | ['Function', ':', 'launch', 'SumaClust', 'de', 'novo', 'OTU', 'picker'] | train | https://github.com/biocore/burrito-fillings/blob/02ab71a46119b40793bd56a4ae00ca15f6dc3329/bfillings/sumaclust_v1.py#L108-L173 |
127 | materialsproject/pymatgen | pymatgen/analysis/chemenv/utils/coordination_geometry_utils.py | Plane.distances_indices_sorted | def distances_indices_sorted(self, points, sign=False):
"""
Computes the distances from the plane to each of the points. Positive distances are on the side of the
normal of the plane while negative distances are on the other side. Indices sorting the points from closest
to furthest is also computed.
:param points: Points for which distances are computed
:param sign: Whether to add sign information in the indices sorting the points distances
:return: Distances from the plane to the points (positive values on the side of the normal to the plane,
negative values on the other side), as well as indices of the points from closest to furthest. For
the latter, when the sign parameter is True, items of the sorting list are given as tuples of
(index, sign).
"""
distances = [np.dot(self.normal_vector, pp) + self.d for pp in points]
indices = sorted(range(len(distances)), key=lambda k: np.abs(distances[k]))
if sign:
indices = [(ii, int(np.sign(distances[ii]))) for ii in indices]
return distances, indices | python | def distances_indices_sorted(self, points, sign=False):
"""
Computes the distances from the plane to each of the points. Positive distances are on the side of the
normal of the plane while negative distances are on the other side. Indices sorting the points from closest
to furthest is also computed.
:param points: Points for which distances are computed
:param sign: Whether to add sign information in the indices sorting the points distances
:return: Distances from the plane to the points (positive values on the side of the normal to the plane,
negative values on the other side), as well as indices of the points from closest to furthest. For
the latter, when the sign parameter is True, items of the sorting list are given as tuples of
(index, sign).
"""
distances = [np.dot(self.normal_vector, pp) + self.d for pp in points]
indices = sorted(range(len(distances)), key=lambda k: np.abs(distances[k]))
if sign:
indices = [(ii, int(np.sign(distances[ii]))) for ii in indices]
return distances, indices | ['def', 'distances_indices_sorted', '(', 'self', ',', 'points', ',', 'sign', '=', 'False', ')', ':', 'distances', '=', '[', 'np', '.', 'dot', '(', 'self', '.', 'normal_vector', ',', 'pp', ')', '+', 'self', '.', 'd', 'for', 'pp', 'in', 'points', ']', 'indices', '=', 'sorted', '(', 'range', '(', 'len', '(', 'distances', ')', ')', ',', 'key', '=', 'lambda', 'k', ':', 'np', '.', 'abs', '(', 'distances', '[', 'k', ']', ')', ')', 'if', 'sign', ':', 'indices', '=', '[', '(', 'ii', ',', 'int', '(', 'np', '.', 'sign', '(', 'distances', '[', 'ii', ']', ')', ')', ')', 'for', 'ii', 'in', 'indices', ']', 'return', 'distances', ',', 'indices'] | Computes the distances from the plane to each of the points. Positive distances are on the side of the
normal of the plane while negative distances are on the other side. Indices sorting the points from closest
to furthest is also computed.
:param points: Points for which distances are computed
:param sign: Whether to add sign information in the indices sorting the points distances
:return: Distances from the plane to the points (positive values on the side of the normal to the plane,
negative values on the other side), as well as indices of the points from closest to furthest. For
the latter, when the sign parameter is True, items of the sorting list are given as tuples of
(index, sign). | ['Computes', 'the', 'distances', 'from', 'the', 'plane', 'to', 'each', 'of', 'the', 'points', '.', 'Positive', 'distances', 'are', 'on', 'the', 'side', 'of', 'the', 'normal', 'of', 'the', 'plane', 'while', 'negative', 'distances', 'are', 'on', 'the', 'other', 'side', '.', 'Indices', 'sorting', 'the', 'points', 'from', 'closest', 'to', 'furthest', 'is', 'also', 'computed', '.', ':', 'param', 'points', ':', 'Points', 'for', 'which', 'distances', 'are', 'computed', ':', 'param', 'sign', ':', 'Whether', 'to', 'add', 'sign', 'information', 'in', 'the', 'indices', 'sorting', 'the', 'points', 'distances', ':', 'return', ':', 'Distances', 'from', 'the', 'plane', 'to', 'the', 'points', '(', 'positive', 'values', 'on', 'the', 'side', 'of', 'the', 'normal', 'to', 'the', 'plane', 'negative', 'values', 'on', 'the', 'other', 'side', ')', 'as', 'well', 'as', 'indices', 'of', 'the', 'points', 'from', 'closest', 'to', 'furthest', '.', 'For', 'the', 'latter', 'when', 'the', 'sign', 'parameter', 'is', 'True', 'items', 'of', 'the', 'sorting', 'list', 'are', 'given', 'as', 'tuples', 'of', '(', 'index', 'sign', ')', '.'] | train | https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/analysis/chemenv/utils/coordination_geometry_utils.py#L704-L720 |
128 | google/grr | grr/server/grr_response_server/databases/mem_paths.py | InMemoryDBPathMixin.MultiWritePathHistory | def MultiWritePathHistory(self, client_path_histories):
"""Writes a collection of hash and stat entries observed for given paths."""
for client_path, client_path_history in iteritems(client_path_histories):
if client_path.client_id not in self.metadatas:
raise db.UnknownClientError(client_path.client_id)
path_info = rdf_objects.PathInfo(
path_type=client_path.path_type, components=client_path.components)
for timestamp, stat_entry in iteritems(client_path_history.stat_entries):
path_record = self._GetPathRecord(
client_path.client_id, path_info, set_default=False)
if path_record is None:
# TODO(hanuszczak): Provide more details about paths that caused that.
raise db.AtLeastOneUnknownPathError([])
path_record.AddStatEntry(stat_entry, timestamp)
for timestamp, hash_entry in iteritems(client_path_history.hash_entries):
path_record = self._GetPathRecord(
client_path.client_id, path_info, set_default=False)
if path_record is None:
# TODO(hanuszczak): Provide more details about paths that caused that.
raise db.AtLeastOneUnknownPathError([])
path_record.AddHashEntry(hash_entry, timestamp) | python | def MultiWritePathHistory(self, client_path_histories):
"""Writes a collection of hash and stat entries observed for given paths."""
for client_path, client_path_history in iteritems(client_path_histories):
if client_path.client_id not in self.metadatas:
raise db.UnknownClientError(client_path.client_id)
path_info = rdf_objects.PathInfo(
path_type=client_path.path_type, components=client_path.components)
for timestamp, stat_entry in iteritems(client_path_history.stat_entries):
path_record = self._GetPathRecord(
client_path.client_id, path_info, set_default=False)
if path_record is None:
# TODO(hanuszczak): Provide more details about paths that caused that.
raise db.AtLeastOneUnknownPathError([])
path_record.AddStatEntry(stat_entry, timestamp)
for timestamp, hash_entry in iteritems(client_path_history.hash_entries):
path_record = self._GetPathRecord(
client_path.client_id, path_info, set_default=False)
if path_record is None:
# TODO(hanuszczak): Provide more details about paths that caused that.
raise db.AtLeastOneUnknownPathError([])
path_record.AddHashEntry(hash_entry, timestamp) | ['def', 'MultiWritePathHistory', '(', 'self', ',', 'client_path_histories', ')', ':', 'for', 'client_path', ',', 'client_path_history', 'in', 'iteritems', '(', 'client_path_histories', ')', ':', 'if', 'client_path', '.', 'client_id', 'not', 'in', 'self', '.', 'metadatas', ':', 'raise', 'db', '.', 'UnknownClientError', '(', 'client_path', '.', 'client_id', ')', 'path_info', '=', 'rdf_objects', '.', 'PathInfo', '(', 'path_type', '=', 'client_path', '.', 'path_type', ',', 'components', '=', 'client_path', '.', 'components', ')', 'for', 'timestamp', ',', 'stat_entry', 'in', 'iteritems', '(', 'client_path_history', '.', 'stat_entries', ')', ':', 'path_record', '=', 'self', '.', '_GetPathRecord', '(', 'client_path', '.', 'client_id', ',', 'path_info', ',', 'set_default', '=', 'False', ')', 'if', 'path_record', 'is', 'None', ':', '# TODO(hanuszczak): Provide more details about paths that caused that.', 'raise', 'db', '.', 'AtLeastOneUnknownPathError', '(', '[', ']', ')', 'path_record', '.', 'AddStatEntry', '(', 'stat_entry', ',', 'timestamp', ')', 'for', 'timestamp', ',', 'hash_entry', 'in', 'iteritems', '(', 'client_path_history', '.', 'hash_entries', ')', ':', 'path_record', '=', 'self', '.', '_GetPathRecord', '(', 'client_path', '.', 'client_id', ',', 'path_info', ',', 'set_default', '=', 'False', ')', 'if', 'path_record', 'is', 'None', ':', '# TODO(hanuszczak): Provide more details about paths that caused that.', 'raise', 'db', '.', 'AtLeastOneUnknownPathError', '(', '[', ']', ')', 'path_record', '.', 'AddHashEntry', '(', 'hash_entry', ',', 'timestamp', ')'] | Writes a collection of hash and stat entries observed for given paths. | ['Writes', 'a', 'collection', 'of', 'hash', 'and', 'stat', 'entries', 'observed', 'for', 'given', 'paths', '.'] | train | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/databases/mem_paths.py#L329-L354 |
129 | petl-developers/petl | petl/transform/selects.py | selectisinstance | def selectisinstance(table, field, value, complement=False):
"""Select rows where the given field is an instance of the given type."""
return selectop(table, field, value, isinstance, complement=complement) | python | def selectisinstance(table, field, value, complement=False):
"""Select rows where the given field is an instance of the given type."""
return selectop(table, field, value, isinstance, complement=complement) | ['def', 'selectisinstance', '(', 'table', ',', 'field', ',', 'value', ',', 'complement', '=', 'False', ')', ':', 'return', 'selectop', '(', 'table', ',', 'field', ',', 'value', ',', 'isinstance', ',', 'complement', '=', 'complement', ')'] | Select rows where the given field is an instance of the given type. | ['Select', 'rows', 'where', 'the', 'given', 'field', 'is', 'an', 'instance', 'of', 'the', 'given', 'type', '.'] | train | https://github.com/petl-developers/petl/blob/1d33ca055f7e04e0d28a772041c9fd30c8d415d6/petl/transform/selects.py#L274-L277 |
130 | cloudera/cm_api | python/src/cm_api/endpoints/services.py | ApiService.get_role_config_group | def get_role_config_group(self, name):
"""
Get a role configuration group in the service by name.
@param name: The name of the role config group.
@return: An ApiRoleConfigGroup object.
@since: API v3
"""
return role_config_groups.get_role_config_group(
self._get_resource_root(), self.name, name, self._get_cluster_name()) | python | def get_role_config_group(self, name):
"""
Get a role configuration group in the service by name.
@param name: The name of the role config group.
@return: An ApiRoleConfigGroup object.
@since: API v3
"""
return role_config_groups.get_role_config_group(
self._get_resource_root(), self.name, name, self._get_cluster_name()) | ['def', 'get_role_config_group', '(', 'self', ',', 'name', ')', ':', 'return', 'role_config_groups', '.', 'get_role_config_group', '(', 'self', '.', '_get_resource_root', '(', ')', ',', 'self', '.', 'name', ',', 'name', ',', 'self', '.', '_get_cluster_name', '(', ')', ')'] | Get a role configuration group in the service by name.
@param name: The name of the role config group.
@return: An ApiRoleConfigGroup object.
@since: API v3 | ['Get', 'a', 'role', 'configuration', 'group', 'in', 'the', 'service', 'by', 'name', '.'] | train | https://github.com/cloudera/cm_api/blob/5d2512375bd94684b4da36df9e0d9177865ffcbb/python/src/cm_api/endpoints/services.py#L650-L659 |
131 | auth0/auth0-python | auth0/v3/authentication/get_token.py | GetToken.refresh_token | def refresh_token(self, client_id, client_secret, refresh_token, grant_type='refresh_token'):
"""Calls oauth/token endpoint with refresh token grant type
Use this endpoint to refresh an access token, using the refresh token you got during authorization.
Args:
grant_type (str): Denotes the flow you're using. For refresh token
use refresh_token
client_id (str): your application's client Id
client_secret (str): your application's client Secret
refresh_token (str): The refresh token returned from the initial token request.
Returns:
access_token, id_token
"""
return self.post(
'https://{}/oauth/token'.format(self.domain),
data={
'client_id': client_id,
'client_secret': client_secret,
'refresh_token': refresh_token,
'grant_type': grant_type
},
headers={'Content-Type': 'application/json'}
) | python | def refresh_token(self, client_id, client_secret, refresh_token, grant_type='refresh_token'):
"""Calls oauth/token endpoint with refresh token grant type
Use this endpoint to refresh an access token, using the refresh token you got during authorization.
Args:
grant_type (str): Denotes the flow you're using. For refresh token
use refresh_token
client_id (str): your application's client Id
client_secret (str): your application's client Secret
refresh_token (str): The refresh token returned from the initial token request.
Returns:
access_token, id_token
"""
return self.post(
'https://{}/oauth/token'.format(self.domain),
data={
'client_id': client_id,
'client_secret': client_secret,
'refresh_token': refresh_token,
'grant_type': grant_type
},
headers={'Content-Type': 'application/json'}
) | ['def', 'refresh_token', '(', 'self', ',', 'client_id', ',', 'client_secret', ',', 'refresh_token', ',', 'grant_type', '=', "'refresh_token'", ')', ':', 'return', 'self', '.', 'post', '(', "'https://{}/oauth/token'", '.', 'format', '(', 'self', '.', 'domain', ')', ',', 'data', '=', '{', "'client_id'", ':', 'client_id', ',', "'client_secret'", ':', 'client_secret', ',', "'refresh_token'", ':', 'refresh_token', ',', "'grant_type'", ':', 'grant_type', '}', ',', 'headers', '=', '{', "'Content-Type'", ':', "'application/json'", '}', ')'] | Calls oauth/token endpoint with refresh token grant type
Use this endpoint to refresh an access token, using the refresh token you got during authorization.
Args:
grant_type (str): Denotes the flow you're using. For refresh token
use refresh_token
client_id (str): your application's client Id
client_secret (str): your application's client Secret
refresh_token (str): The refresh token returned from the initial token request.
Returns:
access_token, id_token | ['Calls', 'oauth', '/', 'token', 'endpoint', 'with', 'refresh', 'token', 'grant', 'type'] | train | https://github.com/auth0/auth0-python/blob/34adad3f342226aaaa6071387fa405ab840e5c02/auth0/v3/authentication/get_token.py#L171-L199 |
132 | bambinos/bambi | bambi/diagnostics.py | effective_n | def effective_n(mcmc):
"""
Args:
mcmc (MCMCResults): Pre-sliced MCMC samples to compute diagnostics for.
"""
if mcmc.n_chains < 2:
raise ValueError(
'Calculation of effective sample size requires multiple chains '
'of the same length.')
def get_neff(x):
"""Compute the effective sample size for a 2D array."""
trace_value = x.T
nchain, n_samples = trace_value.shape
acov = np.asarray([autocov(trace_value[chain]) for chain in range(nchain)])
chain_mean = trace_value.mean(axis=1)
chain_var = acov[:, 0] * n_samples / (n_samples - 1.)
acov_t = acov[:, 1] * n_samples / (n_samples - 1.)
mean_var = np.mean(chain_var)
var_plus = mean_var * (n_samples - 1.) / n_samples
var_plus += np.var(chain_mean, ddof=1)
rho_hat_t = np.zeros(n_samples)
rho_hat_even = 1.
rho_hat_t[0] = rho_hat_even
rho_hat_odd = 1. - (mean_var - np.mean(acov_t)) / var_plus
rho_hat_t[1] = rho_hat_odd
# Geyer's initial positive sequence
max_t = 1
t = 1
while t < (n_samples - 2) and (rho_hat_even + rho_hat_odd) >= 0.:
rho_hat_even = 1. - (mean_var - np.mean(acov[:, t + 1])) / var_plus
rho_hat_odd = 1. - (mean_var - np.mean(acov[:, t + 2])) / var_plus
if (rho_hat_even + rho_hat_odd) >= 0:
rho_hat_t[t + 1] = rho_hat_even
rho_hat_t[t + 2] = rho_hat_odd
max_t = t + 2
t += 2
# Geyer's initial monotone sequence
t = 3
while t <= max_t - 2:
if (rho_hat_t[t + 1] + rho_hat_t[t + 2]) > (rho_hat_t[t - 1] + rho_hat_t[t]):
rho_hat_t[t + 1] = (rho_hat_t[t - 1] + rho_hat_t[t]) / 2.
rho_hat_t[t + 2] = rho_hat_t[t + 1]
t += 2
ess = nchain * n_samples
ess = ess / (-1. + 2. * np.sum(rho_hat_t))
return ess
nvar = mcmc.data.shape[-1]
n_eff = [get_neff(mcmc.data[:, :, i]) for i in range(nvar)]
return pd.DataFrame({'effective_n': n_eff}, index=mcmc.levels) | python | def effective_n(mcmc):
"""
Args:
mcmc (MCMCResults): Pre-sliced MCMC samples to compute diagnostics for.
"""
if mcmc.n_chains < 2:
raise ValueError(
'Calculation of effective sample size requires multiple chains '
'of the same length.')
def get_neff(x):
"""Compute the effective sample size for a 2D array."""
trace_value = x.T
nchain, n_samples = trace_value.shape
acov = np.asarray([autocov(trace_value[chain]) for chain in range(nchain)])
chain_mean = trace_value.mean(axis=1)
chain_var = acov[:, 0] * n_samples / (n_samples - 1.)
acov_t = acov[:, 1] * n_samples / (n_samples - 1.)
mean_var = np.mean(chain_var)
var_plus = mean_var * (n_samples - 1.) / n_samples
var_plus += np.var(chain_mean, ddof=1)
rho_hat_t = np.zeros(n_samples)
rho_hat_even = 1.
rho_hat_t[0] = rho_hat_even
rho_hat_odd = 1. - (mean_var - np.mean(acov_t)) / var_plus
rho_hat_t[1] = rho_hat_odd
# Geyer's initial positive sequence
max_t = 1
t = 1
while t < (n_samples - 2) and (rho_hat_even + rho_hat_odd) >= 0.:
rho_hat_even = 1. - (mean_var - np.mean(acov[:, t + 1])) / var_plus
rho_hat_odd = 1. - (mean_var - np.mean(acov[:, t + 2])) / var_plus
if (rho_hat_even + rho_hat_odd) >= 0:
rho_hat_t[t + 1] = rho_hat_even
rho_hat_t[t + 2] = rho_hat_odd
max_t = t + 2
t += 2
# Geyer's initial monotone sequence
t = 3
while t <= max_t - 2:
if (rho_hat_t[t + 1] + rho_hat_t[t + 2]) > (rho_hat_t[t - 1] + rho_hat_t[t]):
rho_hat_t[t + 1] = (rho_hat_t[t - 1] + rho_hat_t[t]) / 2.
rho_hat_t[t + 2] = rho_hat_t[t + 1]
t += 2
ess = nchain * n_samples
ess = ess / (-1. + 2. * np.sum(rho_hat_t))
return ess
nvar = mcmc.data.shape[-1]
n_eff = [get_neff(mcmc.data[:, :, i]) for i in range(nvar)]
return pd.DataFrame({'effective_n': n_eff}, index=mcmc.levels) | ['def', 'effective_n', '(', 'mcmc', ')', ':', 'if', 'mcmc', '.', 'n_chains', '<', '2', ':', 'raise', 'ValueError', '(', "'Calculation of effective sample size requires multiple chains '", "'of the same length.'", ')', 'def', 'get_neff', '(', 'x', ')', ':', '"""Compute the effective sample size for a 2D array."""', 'trace_value', '=', 'x', '.', 'T', 'nchain', ',', 'n_samples', '=', 'trace_value', '.', 'shape', 'acov', '=', 'np', '.', 'asarray', '(', '[', 'autocov', '(', 'trace_value', '[', 'chain', ']', ')', 'for', 'chain', 'in', 'range', '(', 'nchain', ')', ']', ')', 'chain_mean', '=', 'trace_value', '.', 'mean', '(', 'axis', '=', '1', ')', 'chain_var', '=', 'acov', '[', ':', ',', '0', ']', '*', 'n_samples', '/', '(', 'n_samples', '-', '1.', ')', 'acov_t', '=', 'acov', '[', ':', ',', '1', ']', '*', 'n_samples', '/', '(', 'n_samples', '-', '1.', ')', 'mean_var', '=', 'np', '.', 'mean', '(', 'chain_var', ')', 'var_plus', '=', 'mean_var', '*', '(', 'n_samples', '-', '1.', ')', '/', 'n_samples', 'var_plus', '+=', 'np', '.', 'var', '(', 'chain_mean', ',', 'ddof', '=', '1', ')', 'rho_hat_t', '=', 'np', '.', 'zeros', '(', 'n_samples', ')', 'rho_hat_even', '=', '1.', 'rho_hat_t', '[', '0', ']', '=', 'rho_hat_even', 'rho_hat_odd', '=', '1.', '-', '(', 'mean_var', '-', 'np', '.', 'mean', '(', 'acov_t', ')', ')', '/', 'var_plus', 'rho_hat_t', '[', '1', ']', '=', 'rho_hat_odd', "# Geyer's initial positive sequence", 'max_t', '=', '1', 't', '=', '1', 'while', 't', '<', '(', 'n_samples', '-', '2', ')', 'and', '(', 'rho_hat_even', '+', 'rho_hat_odd', ')', '>=', '0.', ':', 'rho_hat_even', '=', '1.', '-', '(', 'mean_var', '-', 'np', '.', 'mean', '(', 'acov', '[', ':', ',', 't', '+', '1', ']', ')', ')', '/', 'var_plus', 'rho_hat_odd', '=', '1.', '-', '(', 'mean_var', '-', 'np', '.', 'mean', '(', 'acov', '[', ':', ',', 't', '+', '2', ']', ')', ')', '/', 'var_plus', 'if', '(', 'rho_hat_even', '+', 'rho_hat_odd', ')', '>=', '0', ':', 'rho_hat_t', '[', 't', '+', '1', ']', '=', 'rho_hat_even', 'rho_hat_t', '[', 't', '+', '2', ']', '=', 'rho_hat_odd', 'max_t', '=', 't', '+', '2', 't', '+=', '2', "# Geyer's initial monotone sequence", 't', '=', '3', 'while', 't', '<=', 'max_t', '-', '2', ':', 'if', '(', 'rho_hat_t', '[', 't', '+', '1', ']', '+', 'rho_hat_t', '[', 't', '+', '2', ']', ')', '>', '(', 'rho_hat_t', '[', 't', '-', '1', ']', '+', 'rho_hat_t', '[', 't', ']', ')', ':', 'rho_hat_t', '[', 't', '+', '1', ']', '=', '(', 'rho_hat_t', '[', 't', '-', '1', ']', '+', 'rho_hat_t', '[', 't', ']', ')', '/', '2.', 'rho_hat_t', '[', 't', '+', '2', ']', '=', 'rho_hat_t', '[', 't', '+', '1', ']', 't', '+=', '2', 'ess', '=', 'nchain', '*', 'n_samples', 'ess', '=', 'ess', '/', '(', '-', '1.', '+', '2.', '*', 'np', '.', 'sum', '(', 'rho_hat_t', ')', ')', 'return', 'ess', 'nvar', '=', 'mcmc', '.', 'data', '.', 'shape', '[', '-', '1', ']', 'n_eff', '=', '[', 'get_neff', '(', 'mcmc', '.', 'data', '[', ':', ',', ':', ',', 'i', ']', ')', 'for', 'i', 'in', 'range', '(', 'nvar', ')', ']', 'return', 'pd', '.', 'DataFrame', '(', '{', "'effective_n'", ':', 'n_eff', '}', ',', 'index', '=', 'mcmc', '.', 'levels', ')'] | Args:
mcmc (MCMCResults): Pre-sliced MCMC samples to compute diagnostics for. | ['Args', ':', 'mcmc', '(', 'MCMCResults', ')', ':', 'Pre', '-', 'sliced', 'MCMC', 'samples', 'to', 'compute', 'diagnostics', 'for', '.'] | train | https://github.com/bambinos/bambi/blob/b4a0ced917968bb99ca20915317417d708387946/bambi/diagnostics.py#L78-L134 |
133 | damnit/pymite | pymite/adapters.py | TimeEntries.delete | def delete(self, id):
""" delete a time entry. """
path = partial(_path, self.adapter)
path = path(id)
return self._delete(path) | python | def delete(self, id):
""" delete a time entry. """
path = partial(_path, self.adapter)
path = path(id)
return self._delete(path) | ['def', 'delete', '(', 'self', ',', 'id', ')', ':', 'path', '=', 'partial', '(', '_path', ',', 'self', '.', 'adapter', ')', 'path', '=', 'path', '(', 'id', ')', 'return', 'self', '.', '_delete', '(', 'path', ')'] | delete a time entry. | ['delete', 'a', 'time', 'entry', '.'] | train | https://github.com/damnit/pymite/blob/1e9b9bf6aef790af2d8781f9f77c098c54ca0342/pymite/adapters.py#L212-L216 |
134 | ethan92429/onshapepy | onshapepy/core/client.py | Client.copy_workspace | def copy_workspace(self, uri, new_name):
'''
Copy the current workspace.
Args:
- uri (dict): the uri of the workspace being copied. Needs to have a did and wid key.
- new_name (str): the new name of the copied workspace.
Returns:
- requests.Response: Onshape response data
'''
payload = {
'isPublic': True,
'newName': new_name
}
return self._api.request('post', '/api/documents/' + uri['did'] + '/workspaces/' + uri['wvm'] + '/copy', body=payload) | python | def copy_workspace(self, uri, new_name):
'''
Copy the current workspace.
Args:
- uri (dict): the uri of the workspace being copied. Needs to have a did and wid key.
- new_name (str): the new name of the copied workspace.
Returns:
- requests.Response: Onshape response data
'''
payload = {
'isPublic': True,
'newName': new_name
}
return self._api.request('post', '/api/documents/' + uri['did'] + '/workspaces/' + uri['wvm'] + '/copy', body=payload) | ['def', 'copy_workspace', '(', 'self', ',', 'uri', ',', 'new_name', ')', ':', 'payload', '=', '{', "'isPublic'", ':', 'True', ',', "'newName'", ':', 'new_name', '}', 'return', 'self', '.', '_api', '.', 'request', '(', "'post'", ',', "'/api/documents/'", '+', 'uri', '[', "'did'", ']', '+', "'/workspaces/'", '+', 'uri', '[', "'wvm'", ']', '+', "'/copy'", ',', 'body', '=', 'payload', ')'] | Copy the current workspace.
Args:
- uri (dict): the uri of the workspace being copied. Needs to have a did and wid key.
- new_name (str): the new name of the copied workspace.
Returns:
- requests.Response: Onshape response data | ['Copy', 'the', 'current', 'workspace', '.'] | train | https://github.com/ethan92429/onshapepy/blob/61dc7ccbdc6095fa6cc3b4a414e2f72d03d1c9df/onshapepy/core/client.py#L152-L169 |
135 | tensorflow/tensorboard | tensorboard/backend/event_processing/plugin_event_multiplexer.py | EventMultiplexer.Tensors | def Tensors(self, run, tag):
"""Retrieve the tensor events associated with a run and tag.
Args:
run: A string name of the run for which values are retrieved.
tag: A string name of the tag for which values are retrieved.
Raises:
KeyError: If the run is not found, or the tag is not available for
the given run.
Returns:
An array of `event_accumulator.TensorEvent`s.
"""
accumulator = self.GetAccumulator(run)
return accumulator.Tensors(tag) | python | def Tensors(self, run, tag):
"""Retrieve the tensor events associated with a run and tag.
Args:
run: A string name of the run for which values are retrieved.
tag: A string name of the tag for which values are retrieved.
Raises:
KeyError: If the run is not found, or the tag is not available for
the given run.
Returns:
An array of `event_accumulator.TensorEvent`s.
"""
accumulator = self.GetAccumulator(run)
return accumulator.Tensors(tag) | ['def', 'Tensors', '(', 'self', ',', 'run', ',', 'tag', ')', ':', 'accumulator', '=', 'self', '.', 'GetAccumulator', '(', 'run', ')', 'return', 'accumulator', '.', 'Tensors', '(', 'tag', ')'] | Retrieve the tensor events associated with a run and tag.
Args:
run: A string name of the run for which values are retrieved.
tag: A string name of the tag for which values are retrieved.
Raises:
KeyError: If the run is not found, or the tag is not available for
the given run.
Returns:
An array of `event_accumulator.TensorEvent`s. | ['Retrieve', 'the', 'tensor', 'events', 'associated', 'with', 'a', 'run', 'and', 'tag', '.'] | train | https://github.com/tensorflow/tensorboard/blob/8e5f497b48e40f2a774f85416b8a35ac0693c35e/tensorboard/backend/event_processing/plugin_event_multiplexer.py#L385-L400 |
136 | ninuxorg/nodeshot | nodeshot/core/layers/views.py | LayerNodeListMixin.get | def get(self, request, *args, **kwargs):
""" Retrieve list of nodes of the specified layer """
self.get_layer()
# get nodes of layer
nodes = self.get_nodes(request, *args, **kwargs)
return Response(nodes) | python | def get(self, request, *args, **kwargs):
""" Retrieve list of nodes of the specified layer """
self.get_layer()
# get nodes of layer
nodes = self.get_nodes(request, *args, **kwargs)
return Response(nodes) | ['def', 'get', '(', 'self', ',', 'request', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'self', '.', 'get_layer', '(', ')', '# get nodes of layer', 'nodes', '=', 'self', '.', 'get_nodes', '(', 'request', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', 'return', 'Response', '(', 'nodes', ')'] | Retrieve list of nodes of the specified layer | ['Retrieve', 'list', 'of', 'nodes', 'of', 'the', 'specified', 'layer'] | train | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/views.py#L75-L80 |
137 | guaix-ucm/numina | numina/core/validator.py | as_list | def as_list(callable):
"""Convert a scalar validator in a list validator"""
@wraps(callable)
def wrapper(value_iter):
return [callable(value) for value in value_iter]
return wrapper | python | def as_list(callable):
"""Convert a scalar validator in a list validator"""
@wraps(callable)
def wrapper(value_iter):
return [callable(value) for value in value_iter]
return wrapper | ['def', 'as_list', '(', 'callable', ')', ':', '@', 'wraps', '(', 'callable', ')', 'def', 'wrapper', '(', 'value_iter', ')', ':', 'return', '[', 'callable', '(', 'value', ')', 'for', 'value', 'in', 'value_iter', ']', 'return', 'wrapper'] | Convert a scalar validator in a list validator | ['Convert', 'a', 'scalar', 'validator', 'in', 'a', 'list', 'validator'] | train | https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/validator.py#L41-L47 |
138 | log2timeline/dfvfs | dfvfs/helpers/file_system_searcher.py | FileSystemSearcher.GetRelativePath | def GetRelativePath(self, path_spec):
"""Returns the relative path based on a resolved path specification.
The relative path is the location of the upper most path specification.
The the location of the mount point is stripped off if relevant.
Args:
path_spec (PathSpec): path specification.
Returns:
str: corresponding relative path or None if the relative path could not
be determined.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
if not location.startswith(self._mount_point.location):
raise errors.PathSpecError(
'Path specification does not contain mount point.')
else:
if not hasattr(path_spec, 'parent'):
raise errors.PathSpecError('Path specification missing parent.')
if path_spec.parent != self._mount_point:
raise errors.PathSpecError(
'Path specification does not contain mount point.')
path_segments = self._file_system.SplitPath(location)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
mount_point_path_segments = self._file_system.SplitPath(
self._mount_point.location)
path_segments = path_segments[len(mount_point_path_segments):]
return '{0:s}{1:s}'.format(
self._file_system.PATH_SEPARATOR,
self._file_system.PATH_SEPARATOR.join(path_segments)) | python | def GetRelativePath(self, path_spec):
"""Returns the relative path based on a resolved path specification.
The relative path is the location of the upper most path specification.
The the location of the mount point is stripped off if relevant.
Args:
path_spec (PathSpec): path specification.
Returns:
str: corresponding relative path or None if the relative path could not
be determined.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
if not location.startswith(self._mount_point.location):
raise errors.PathSpecError(
'Path specification does not contain mount point.')
else:
if not hasattr(path_spec, 'parent'):
raise errors.PathSpecError('Path specification missing parent.')
if path_spec.parent != self._mount_point:
raise errors.PathSpecError(
'Path specification does not contain mount point.')
path_segments = self._file_system.SplitPath(location)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
mount_point_path_segments = self._file_system.SplitPath(
self._mount_point.location)
path_segments = path_segments[len(mount_point_path_segments):]
return '{0:s}{1:s}'.format(
self._file_system.PATH_SEPARATOR,
self._file_system.PATH_SEPARATOR.join(path_segments)) | ['def', 'GetRelativePath', '(', 'self', ',', 'path_spec', ')', ':', 'location', '=', 'getattr', '(', 'path_spec', ',', "'location'", ',', 'None', ')', 'if', 'location', 'is', 'None', ':', 'raise', 'errors', '.', 'PathSpecError', '(', "'Path specification missing location.'", ')', 'if', 'path_spec_factory', '.', 'Factory', '.', 'IsSystemLevelTypeIndicator', '(', 'self', '.', '_file_system', '.', 'type_indicator', ')', ':', 'if', 'not', 'location', '.', 'startswith', '(', 'self', '.', '_mount_point', '.', 'location', ')', ':', 'raise', 'errors', '.', 'PathSpecError', '(', "'Path specification does not contain mount point.'", ')', 'else', ':', 'if', 'not', 'hasattr', '(', 'path_spec', ',', "'parent'", ')', ':', 'raise', 'errors', '.', 'PathSpecError', '(', "'Path specification missing parent.'", ')', 'if', 'path_spec', '.', 'parent', '!=', 'self', '.', '_mount_point', ':', 'raise', 'errors', '.', 'PathSpecError', '(', "'Path specification does not contain mount point.'", ')', 'path_segments', '=', 'self', '.', '_file_system', '.', 'SplitPath', '(', 'location', ')', 'if', 'path_spec_factory', '.', 'Factory', '.', 'IsSystemLevelTypeIndicator', '(', 'self', '.', '_file_system', '.', 'type_indicator', ')', ':', 'mount_point_path_segments', '=', 'self', '.', '_file_system', '.', 'SplitPath', '(', 'self', '.', '_mount_point', '.', 'location', ')', 'path_segments', '=', 'path_segments', '[', 'len', '(', 'mount_point_path_segments', ')', ':', ']', 'return', "'{0:s}{1:s}'", '.', 'format', '(', 'self', '.', '_file_system', '.', 'PATH_SEPARATOR', ',', 'self', '.', '_file_system', '.', 'PATH_SEPARATOR', '.', 'join', '(', 'path_segments', ')', ')'] | Returns the relative path based on a resolved path specification.
The relative path is the location of the upper most path specification.
The the location of the mount point is stripped off if relevant.
Args:
path_spec (PathSpec): path specification.
Returns:
str: corresponding relative path or None if the relative path could not
be determined.
Raises:
PathSpecError: if the path specification is incorrect. | ['Returns', 'the', 'relative', 'path', 'based', 'on', 'a', 'resolved', 'path', 'specification', '.'] | train | https://github.com/log2timeline/dfvfs/blob/2b3ccd115f9901d89f383397d4a1376a873c83c4/dfvfs/helpers/file_system_searcher.py#L484-L527 |
139 | brocade/pynos | pynos/versions/ver_6/ver_6_0_1/yang/brocade_span.py | brocade_span.monitor_session_span_command_dest_tengigabitethernet | def monitor_session_span_command_dest_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet = ET.SubElement(span_command, "dest-tengigabitethernet")
dest_tengigabitethernet.text = kwargs.pop('dest_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config) | python | def monitor_session_span_command_dest_tengigabitethernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span")
session = ET.SubElement(monitor, "session")
session_number_key = ET.SubElement(session, "session-number")
session_number_key.text = kwargs.pop('session_number')
span_command = ET.SubElement(session, "span-command")
dest_tengigabitethernet = ET.SubElement(span_command, "dest-tengigabitethernet")
dest_tengigabitethernet.text = kwargs.pop('dest_tengigabitethernet')
callback = kwargs.pop('callback', self._callback)
return callback(config) | ['def', 'monitor_session_span_command_dest_tengigabitethernet', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'monitor', '=', 'ET', '.', 'SubElement', '(', 'config', ',', '"monitor"', ',', 'xmlns', '=', '"urn:brocade.com:mgmt:brocade-span"', ')', 'session', '=', 'ET', '.', 'SubElement', '(', 'monitor', ',', '"session"', ')', 'session_number_key', '=', 'ET', '.', 'SubElement', '(', 'session', ',', '"session-number"', ')', 'session_number_key', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'session_number'", ')', 'span_command', '=', 'ET', '.', 'SubElement', '(', 'session', ',', '"span-command"', ')', 'dest_tengigabitethernet', '=', 'ET', '.', 'SubElement', '(', 'span_command', ',', '"dest-tengigabitethernet"', ')', 'dest_tengigabitethernet', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'dest_tengigabitethernet'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')'] | Auto Generated Code | ['Auto', 'Generated', 'Code'] | train | https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_6/ver_6_0_1/yang/brocade_span.py#L98-L111 |
140 | gwastro/pycbc | pycbc/sensitivity.py | volume_montecarlo | def volume_montecarlo(found_d, missed_d, found_mchirp, missed_mchirp,
distribution_param, distribution, limits_param,
min_param=None, max_param=None):
"""
Compute sensitive volume and standard error via direct Monte Carlo integral
Injections should be made over a range of distances such that sensitive
volume due to signals closer than D_min is negligible, and efficiency at
distances above D_max is negligible
TODO : Replace this function by Collin's formula given in Usman et al .. ?
OR get that coded as a new function?
Parameters
-----------
found_d: numpy.ndarray
The distances of found injections
missed_d: numpy.ndarray
The distances of missed injections
found_mchirp: numpy.ndarray
Chirp mass of found injections
missed_mchirp: numpy.ndarray
Chirp mass of missed injections
distribution_param: string
Parameter D of the injections used to generate a distribution over
distance, may be 'distance', 'chirp_distance'.
distribution: string
form of the distribution over the parameter, may be
'log' (uniform in log D)
'uniform' (uniform in D)
'distancesquared' (uniform in D**2)
'volume' (uniform in D***3)
limits_param: string
Parameter Dlim specifying limits inside which injections were made
may be 'distance', 'chirp distance'
min_param: float
minimum value of Dlim at which injections were made; only used for
log distribution, then if None the minimum actually injected value
will be used
max_param: float
maximum value of Dlim out to which injections were made; if None
the maximum actually injected value will be used
Returns
--------
volume: float
Volume estimate
volume_error: float
The standard error in the volume
"""
d_power = {
'log' : 3.,
'uniform' : 2.,
'distancesquared' : 1.,
'volume' : 0.
}[distribution]
mchirp_power = {
'log' : 0.,
'uniform' : 5. / 6.,
'distancesquared' : 5. / 3.,
'volume' : 15. / 6.
}[distribution]
# establish maximum physical distance: first for chirp distance distribution
if limits_param == 'chirp_distance':
mchirp_standard_bns = 1.4 * 2.**(-1. / 5.)
all_mchirp = numpy.concatenate((found_mchirp, missed_mchirp))
max_mchirp = all_mchirp.max()
if max_param is not None:
# use largest actually injected mchirp for conversion
max_distance = max_param * \
(max_mchirp / mchirp_standard_bns)**(5. / 6.)
else:
max_distance = max(found_d.max(), missed_d.max())
elif limits_param == 'distance':
if max_param is not None:
max_distance = max_param
else:
# if no max distance given, use max distance actually injected
max_distance = max(found_d.max(), missed_d.max())
else:
raise NotImplementedError("%s is not a recognized parameter"
% limits_param)
# volume of sphere
montecarlo_vtot = (4. / 3.) * numpy.pi * max_distance**3.
# arrays of weights for the MC integral
if distribution_param == 'distance':
found_weights = found_d ** d_power
missed_weights = missed_d ** d_power
elif distribution_param == 'chirp_distance':
# weight by a power of mchirp to rescale injection density to the
# target mass distribution
found_weights = found_d ** d_power * \
found_mchirp ** mchirp_power
missed_weights = missed_d ** d_power * \
missed_mchirp ** mchirp_power
else:
raise NotImplementedError("%s is not a recognized distance parameter"
% distribution_param)
all_weights = numpy.concatenate((found_weights, missed_weights))
# measured weighted efficiency is w_i for a found inj and 0 for missed
# MC integral is volume of sphere * (sum of found weights)/(sum of all weights)
# over injections covering the sphere
mc_weight_samples = numpy.concatenate((found_weights, 0 * missed_weights))
mc_sum = sum(mc_weight_samples)
if limits_param == 'distance':
mc_norm = sum(all_weights)
elif limits_param == 'chirp_distance':
# if injections are made up to a maximum chirp distance, account for
# extra missed injections that would occur when injecting up to
# maximum physical distance : this works out to a 'chirp volume' factor
mc_norm = sum(all_weights * (max_mchirp / all_mchirp) ** (5. / 2.))
# take out a constant factor
mc_prefactor = montecarlo_vtot / mc_norm
# count the samples
if limits_param == 'distance':
Ninj = len(mc_weight_samples)
elif limits_param == 'chirp_distance':
# find the total expected number after extending from maximum chirp
# dist up to maximum physical distance
if distribution == 'log':
# only need minimum distance in this one case
if min_param is not None:
min_distance = min_param * \
(numpy.min(all_mchirp) / mchirp_standard_bns) ** (5. / 6.)
else:
min_distance = min(numpy.min(found_d), numpy.min(missed_d))
logrange = numpy.log(max_distance / min_distance)
Ninj = len(mc_weight_samples) + (5. / 6.) * \
sum(numpy.log(max_mchirp / all_mchirp) / logrange)
else:
Ninj = sum((max_mchirp / all_mchirp) ** mchirp_power)
# sample variance of efficiency: mean of the square - square of the mean
mc_sample_variance = sum(mc_weight_samples ** 2.) / Ninj - \
(mc_sum / Ninj) ** 2.
# return MC integral and its standard deviation; variance of mc_sum scales
# relative to sample variance by Ninj (Bienayme' rule)
vol = mc_prefactor * mc_sum
vol_err = mc_prefactor * (Ninj * mc_sample_variance) ** 0.5
return vol, vol_err | python | def volume_montecarlo(found_d, missed_d, found_mchirp, missed_mchirp,
distribution_param, distribution, limits_param,
min_param=None, max_param=None):
"""
Compute sensitive volume and standard error via direct Monte Carlo integral
Injections should be made over a range of distances such that sensitive
volume due to signals closer than D_min is negligible, and efficiency at
distances above D_max is negligible
TODO : Replace this function by Collin's formula given in Usman et al .. ?
OR get that coded as a new function?
Parameters
-----------
found_d: numpy.ndarray
The distances of found injections
missed_d: numpy.ndarray
The distances of missed injections
found_mchirp: numpy.ndarray
Chirp mass of found injections
missed_mchirp: numpy.ndarray
Chirp mass of missed injections
distribution_param: string
Parameter D of the injections used to generate a distribution over
distance, may be 'distance', 'chirp_distance'.
distribution: string
form of the distribution over the parameter, may be
'log' (uniform in log D)
'uniform' (uniform in D)
'distancesquared' (uniform in D**2)
'volume' (uniform in D***3)
limits_param: string
Parameter Dlim specifying limits inside which injections were made
may be 'distance', 'chirp distance'
min_param: float
minimum value of Dlim at which injections were made; only used for
log distribution, then if None the minimum actually injected value
will be used
max_param: float
maximum value of Dlim out to which injections were made; if None
the maximum actually injected value will be used
Returns
--------
volume: float
Volume estimate
volume_error: float
The standard error in the volume
"""
d_power = {
'log' : 3.,
'uniform' : 2.,
'distancesquared' : 1.,
'volume' : 0.
}[distribution]
mchirp_power = {
'log' : 0.,
'uniform' : 5. / 6.,
'distancesquared' : 5. / 3.,
'volume' : 15. / 6.
}[distribution]
# establish maximum physical distance: first for chirp distance distribution
if limits_param == 'chirp_distance':
mchirp_standard_bns = 1.4 * 2.**(-1. / 5.)
all_mchirp = numpy.concatenate((found_mchirp, missed_mchirp))
max_mchirp = all_mchirp.max()
if max_param is not None:
# use largest actually injected mchirp for conversion
max_distance = max_param * \
(max_mchirp / mchirp_standard_bns)**(5. / 6.)
else:
max_distance = max(found_d.max(), missed_d.max())
elif limits_param == 'distance':
if max_param is not None:
max_distance = max_param
else:
# if no max distance given, use max distance actually injected
max_distance = max(found_d.max(), missed_d.max())
else:
raise NotImplementedError("%s is not a recognized parameter"
% limits_param)
# volume of sphere
montecarlo_vtot = (4. / 3.) * numpy.pi * max_distance**3.
# arrays of weights for the MC integral
if distribution_param == 'distance':
found_weights = found_d ** d_power
missed_weights = missed_d ** d_power
elif distribution_param == 'chirp_distance':
# weight by a power of mchirp to rescale injection density to the
# target mass distribution
found_weights = found_d ** d_power * \
found_mchirp ** mchirp_power
missed_weights = missed_d ** d_power * \
missed_mchirp ** mchirp_power
else:
raise NotImplementedError("%s is not a recognized distance parameter"
% distribution_param)
all_weights = numpy.concatenate((found_weights, missed_weights))
# measured weighted efficiency is w_i for a found inj and 0 for missed
# MC integral is volume of sphere * (sum of found weights)/(sum of all weights)
# over injections covering the sphere
mc_weight_samples = numpy.concatenate((found_weights, 0 * missed_weights))
mc_sum = sum(mc_weight_samples)
if limits_param == 'distance':
mc_norm = sum(all_weights)
elif limits_param == 'chirp_distance':
# if injections are made up to a maximum chirp distance, account for
# extra missed injections that would occur when injecting up to
# maximum physical distance : this works out to a 'chirp volume' factor
mc_norm = sum(all_weights * (max_mchirp / all_mchirp) ** (5. / 2.))
# take out a constant factor
mc_prefactor = montecarlo_vtot / mc_norm
# count the samples
if limits_param == 'distance':
Ninj = len(mc_weight_samples)
elif limits_param == 'chirp_distance':
# find the total expected number after extending from maximum chirp
# dist up to maximum physical distance
if distribution == 'log':
# only need minimum distance in this one case
if min_param is not None:
min_distance = min_param * \
(numpy.min(all_mchirp) / mchirp_standard_bns) ** (5. / 6.)
else:
min_distance = min(numpy.min(found_d), numpy.min(missed_d))
logrange = numpy.log(max_distance / min_distance)
Ninj = len(mc_weight_samples) + (5. / 6.) * \
sum(numpy.log(max_mchirp / all_mchirp) / logrange)
else:
Ninj = sum((max_mchirp / all_mchirp) ** mchirp_power)
# sample variance of efficiency: mean of the square - square of the mean
mc_sample_variance = sum(mc_weight_samples ** 2.) / Ninj - \
(mc_sum / Ninj) ** 2.
# return MC integral and its standard deviation; variance of mc_sum scales
# relative to sample variance by Ninj (Bienayme' rule)
vol = mc_prefactor * mc_sum
vol_err = mc_prefactor * (Ninj * mc_sample_variance) ** 0.5
return vol, vol_err | ['def', 'volume_montecarlo', '(', 'found_d', ',', 'missed_d', ',', 'found_mchirp', ',', 'missed_mchirp', ',', 'distribution_param', ',', 'distribution', ',', 'limits_param', ',', 'min_param', '=', 'None', ',', 'max_param', '=', 'None', ')', ':', 'd_power', '=', '{', "'log'", ':', '3.', ',', "'uniform'", ':', '2.', ',', "'distancesquared'", ':', '1.', ',', "'volume'", ':', '0.', '}', '[', 'distribution', ']', 'mchirp_power', '=', '{', "'log'", ':', '0.', ',', "'uniform'", ':', '5.', '/', '6.', ',', "'distancesquared'", ':', '5.', '/', '3.', ',', "'volume'", ':', '15.', '/', '6.', '}', '[', 'distribution', ']', '# establish maximum physical distance: first for chirp distance distribution', 'if', 'limits_param', '==', "'chirp_distance'", ':', 'mchirp_standard_bns', '=', '1.4', '*', '2.', '**', '(', '-', '1.', '/', '5.', ')', 'all_mchirp', '=', 'numpy', '.', 'concatenate', '(', '(', 'found_mchirp', ',', 'missed_mchirp', ')', ')', 'max_mchirp', '=', 'all_mchirp', '.', 'max', '(', ')', 'if', 'max_param', 'is', 'not', 'None', ':', '# use largest actually injected mchirp for conversion', 'max_distance', '=', 'max_param', '*', '(', 'max_mchirp', '/', 'mchirp_standard_bns', ')', '**', '(', '5.', '/', '6.', ')', 'else', ':', 'max_distance', '=', 'max', '(', 'found_d', '.', 'max', '(', ')', ',', 'missed_d', '.', 'max', '(', ')', ')', 'elif', 'limits_param', '==', "'distance'", ':', 'if', 'max_param', 'is', 'not', 'None', ':', 'max_distance', '=', 'max_param', 'else', ':', '# if no max distance given, use max distance actually injected', 'max_distance', '=', 'max', '(', 'found_d', '.', 'max', '(', ')', ',', 'missed_d', '.', 'max', '(', ')', ')', 'else', ':', 'raise', 'NotImplementedError', '(', '"%s is not a recognized parameter"', '%', 'limits_param', ')', '# volume of sphere', 'montecarlo_vtot', '=', '(', '4.', '/', '3.', ')', '*', 'numpy', '.', 'pi', '*', 'max_distance', '**', '3.', '# arrays of weights for the MC integral', 'if', 'distribution_param', '==', "'distance'", ':', 'found_weights', '=', 'found_d', '**', 'd_power', 'missed_weights', '=', 'missed_d', '**', 'd_power', 'elif', 'distribution_param', '==', "'chirp_distance'", ':', '# weight by a power of mchirp to rescale injection density to the', '# target mass distribution', 'found_weights', '=', 'found_d', '**', 'd_power', '*', 'found_mchirp', '**', 'mchirp_power', 'missed_weights', '=', 'missed_d', '**', 'd_power', '*', 'missed_mchirp', '**', 'mchirp_power', 'else', ':', 'raise', 'NotImplementedError', '(', '"%s is not a recognized distance parameter"', '%', 'distribution_param', ')', 'all_weights', '=', 'numpy', '.', 'concatenate', '(', '(', 'found_weights', ',', 'missed_weights', ')', ')', '# measured weighted efficiency is w_i for a found inj and 0 for missed', '# MC integral is volume of sphere * (sum of found weights)/(sum of all weights)', '# over injections covering the sphere', 'mc_weight_samples', '=', 'numpy', '.', 'concatenate', '(', '(', 'found_weights', ',', '0', '*', 'missed_weights', ')', ')', 'mc_sum', '=', 'sum', '(', 'mc_weight_samples', ')', 'if', 'limits_param', '==', "'distance'", ':', 'mc_norm', '=', 'sum', '(', 'all_weights', ')', 'elif', 'limits_param', '==', "'chirp_distance'", ':', '# if injections are made up to a maximum chirp distance, account for', '# extra missed injections that would occur when injecting up to', "# maximum physical distance : this works out to a 'chirp volume' factor", 'mc_norm', '=', 'sum', '(', 'all_weights', '*', '(', 'max_mchirp', '/', 'all_mchirp', ')', '**', '(', '5.', '/', '2.', ')', ')', '# take out a constant factor', 'mc_prefactor', '=', 'montecarlo_vtot', '/', 'mc_norm', '# count the samples', 'if', 'limits_param', '==', "'distance'", ':', 'Ninj', '=', 'len', '(', 'mc_weight_samples', ')', 'elif', 'limits_param', '==', "'chirp_distance'", ':', '# find the total expected number after extending from maximum chirp', '# dist up to maximum physical distance', 'if', 'distribution', '==', "'log'", ':', '# only need minimum distance in this one case', 'if', 'min_param', 'is', 'not', 'None', ':', 'min_distance', '=', 'min_param', '*', '(', 'numpy', '.', 'min', '(', 'all_mchirp', ')', '/', 'mchirp_standard_bns', ')', '**', '(', '5.', '/', '6.', ')', 'else', ':', 'min_distance', '=', 'min', '(', 'numpy', '.', 'min', '(', 'found_d', ')', ',', 'numpy', '.', 'min', '(', 'missed_d', ')', ')', 'logrange', '=', 'numpy', '.', 'log', '(', 'max_distance', '/', 'min_distance', ')', 'Ninj', '=', 'len', '(', 'mc_weight_samples', ')', '+', '(', '5.', '/', '6.', ')', '*', 'sum', '(', 'numpy', '.', 'log', '(', 'max_mchirp', '/', 'all_mchirp', ')', '/', 'logrange', ')', 'else', ':', 'Ninj', '=', 'sum', '(', '(', 'max_mchirp', '/', 'all_mchirp', ')', '**', 'mchirp_power', ')', '# sample variance of efficiency: mean of the square - square of the mean', 'mc_sample_variance', '=', 'sum', '(', 'mc_weight_samples', '**', '2.', ')', '/', 'Ninj', '-', '(', 'mc_sum', '/', 'Ninj', ')', '**', '2.', '# return MC integral and its standard deviation; variance of mc_sum scales', "# relative to sample variance by Ninj (Bienayme' rule)", 'vol', '=', 'mc_prefactor', '*', 'mc_sum', 'vol_err', '=', 'mc_prefactor', '*', '(', 'Ninj', '*', 'mc_sample_variance', ')', '**', '0.5', 'return', 'vol', ',', 'vol_err'] | Compute sensitive volume and standard error via direct Monte Carlo integral
Injections should be made over a range of distances such that sensitive
volume due to signals closer than D_min is negligible, and efficiency at
distances above D_max is negligible
TODO : Replace this function by Collin's formula given in Usman et al .. ?
OR get that coded as a new function?
Parameters
-----------
found_d: numpy.ndarray
The distances of found injections
missed_d: numpy.ndarray
The distances of missed injections
found_mchirp: numpy.ndarray
Chirp mass of found injections
missed_mchirp: numpy.ndarray
Chirp mass of missed injections
distribution_param: string
Parameter D of the injections used to generate a distribution over
distance, may be 'distance', 'chirp_distance'.
distribution: string
form of the distribution over the parameter, may be
'log' (uniform in log D)
'uniform' (uniform in D)
'distancesquared' (uniform in D**2)
'volume' (uniform in D***3)
limits_param: string
Parameter Dlim specifying limits inside which injections were made
may be 'distance', 'chirp distance'
min_param: float
minimum value of Dlim at which injections were made; only used for
log distribution, then if None the minimum actually injected value
will be used
max_param: float
maximum value of Dlim out to which injections were made; if None
the maximum actually injected value will be used
Returns
--------
volume: float
Volume estimate
volume_error: float
The standard error in the volume | ['Compute', 'sensitive', 'volume', 'and', 'standard', 'error', 'via', 'direct', 'Monte', 'Carlo', 'integral'] | train | https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/sensitivity.py#L85-L232 |
141 | wonambi-python/wonambi | wonambi/trans/montage.py | montage | def montage(data, ref_chan=None, ref_to_avg=False, bipolar=None,
method='average'):
"""Apply linear transformation to the channels.
Parameters
----------
data : instance of DataRaw
the data to filter
ref_chan : list of str
list of channels used as reference
ref_to_avg : bool
if re-reference to average or not
bipolar : float
distance in mm to consider two channels as neighbors and then compute
the bipolar montage between them.
method : str
'average' or 'regression'. 'average' takes the
average across the channels selected as reference (it can be all) and
subtract it from each channel. 'regression' keeps the residuals after
regressing out the mean across channels.
Returns
-------
filtered_data : instance of DataRaw
filtered data
Notes
-----
If you don't change anything, it returns the same instance of data.
"""
if ref_to_avg and ref_chan is not None:
raise TypeError('You cannot specify reference to the average and '
'the channels to use as reference')
if ref_chan is not None:
if (not isinstance(ref_chan, (list, tuple)) or
not all(isinstance(x, str) for x in ref_chan)):
raise TypeError('chan should be a list of strings')
if ref_chan is None:
ref_chan = [] # TODO: check bool for ref_chan
if bipolar:
if not data.attr['chan']:
raise ValueError('Data should have Chan information in attr')
_assert_equal_channels(data.axis['chan'])
chan_in_data = data.axis['chan'][0]
chan = data.attr['chan']
chan = chan(lambda x: x.label in chan_in_data)
chan, trans = create_bipolar_chan(chan, bipolar)
data.attr['chan'] = chan
if ref_to_avg or ref_chan or bipolar:
mdata = data._copy()
idx_chan = mdata.index_of('chan')
for i in range(mdata.number_of('trial')):
if ref_to_avg or ref_chan:
if ref_to_avg:
ref_chan = data.axis['chan'][i]
ref_data = data(trial=i, chan=ref_chan)
if method == 'average':
mdata.data[i] = (data(trial=i) - mean(ref_data, axis=idx_chan))
elif method == 'regression':
mdata.data[i] = compute_average_regress(data(trial=i), idx_chan)
elif bipolar:
if not data.index_of('chan') == 0:
raise ValueError('For matrix multiplication to work, '
'the first dimension should be chan')
mdata.data[i] = dot(trans, data(trial=i))
mdata.axis['chan'][i] = asarray(chan.return_label(),
dtype='U')
else:
mdata = data
return mdata | python | def montage(data, ref_chan=None, ref_to_avg=False, bipolar=None,
method='average'):
"""Apply linear transformation to the channels.
Parameters
----------
data : instance of DataRaw
the data to filter
ref_chan : list of str
list of channels used as reference
ref_to_avg : bool
if re-reference to average or not
bipolar : float
distance in mm to consider two channels as neighbors and then compute
the bipolar montage between them.
method : str
'average' or 'regression'. 'average' takes the
average across the channels selected as reference (it can be all) and
subtract it from each channel. 'regression' keeps the residuals after
regressing out the mean across channels.
Returns
-------
filtered_data : instance of DataRaw
filtered data
Notes
-----
If you don't change anything, it returns the same instance of data.
"""
if ref_to_avg and ref_chan is not None:
raise TypeError('You cannot specify reference to the average and '
'the channels to use as reference')
if ref_chan is not None:
if (not isinstance(ref_chan, (list, tuple)) or
not all(isinstance(x, str) for x in ref_chan)):
raise TypeError('chan should be a list of strings')
if ref_chan is None:
ref_chan = [] # TODO: check bool for ref_chan
if bipolar:
if not data.attr['chan']:
raise ValueError('Data should have Chan information in attr')
_assert_equal_channels(data.axis['chan'])
chan_in_data = data.axis['chan'][0]
chan = data.attr['chan']
chan = chan(lambda x: x.label in chan_in_data)
chan, trans = create_bipolar_chan(chan, bipolar)
data.attr['chan'] = chan
if ref_to_avg or ref_chan or bipolar:
mdata = data._copy()
idx_chan = mdata.index_of('chan')
for i in range(mdata.number_of('trial')):
if ref_to_avg or ref_chan:
if ref_to_avg:
ref_chan = data.axis['chan'][i]
ref_data = data(trial=i, chan=ref_chan)
if method == 'average':
mdata.data[i] = (data(trial=i) - mean(ref_data, axis=idx_chan))
elif method == 'regression':
mdata.data[i] = compute_average_regress(data(trial=i), idx_chan)
elif bipolar:
if not data.index_of('chan') == 0:
raise ValueError('For matrix multiplication to work, '
'the first dimension should be chan')
mdata.data[i] = dot(trans, data(trial=i))
mdata.axis['chan'][i] = asarray(chan.return_label(),
dtype='U')
else:
mdata = data
return mdata | ['def', 'montage', '(', 'data', ',', 'ref_chan', '=', 'None', ',', 'ref_to_avg', '=', 'False', ',', 'bipolar', '=', 'None', ',', 'method', '=', "'average'", ')', ':', 'if', 'ref_to_avg', 'and', 'ref_chan', 'is', 'not', 'None', ':', 'raise', 'TypeError', '(', "'You cannot specify reference to the average and '", "'the channels to use as reference'", ')', 'if', 'ref_chan', 'is', 'not', 'None', ':', 'if', '(', 'not', 'isinstance', '(', 'ref_chan', ',', '(', 'list', ',', 'tuple', ')', ')', 'or', 'not', 'all', '(', 'isinstance', '(', 'x', ',', 'str', ')', 'for', 'x', 'in', 'ref_chan', ')', ')', ':', 'raise', 'TypeError', '(', "'chan should be a list of strings'", ')', 'if', 'ref_chan', 'is', 'None', ':', 'ref_chan', '=', '[', ']', '# TODO: check bool for ref_chan', 'if', 'bipolar', ':', 'if', 'not', 'data', '.', 'attr', '[', "'chan'", ']', ':', 'raise', 'ValueError', '(', "'Data should have Chan information in attr'", ')', '_assert_equal_channels', '(', 'data', '.', 'axis', '[', "'chan'", ']', ')', 'chan_in_data', '=', 'data', '.', 'axis', '[', "'chan'", ']', '[', '0', ']', 'chan', '=', 'data', '.', 'attr', '[', "'chan'", ']', 'chan', '=', 'chan', '(', 'lambda', 'x', ':', 'x', '.', 'label', 'in', 'chan_in_data', ')', 'chan', ',', 'trans', '=', 'create_bipolar_chan', '(', 'chan', ',', 'bipolar', ')', 'data', '.', 'attr', '[', "'chan'", ']', '=', 'chan', 'if', 'ref_to_avg', 'or', 'ref_chan', 'or', 'bipolar', ':', 'mdata', '=', 'data', '.', '_copy', '(', ')', 'idx_chan', '=', 'mdata', '.', 'index_of', '(', "'chan'", ')', 'for', 'i', 'in', 'range', '(', 'mdata', '.', 'number_of', '(', "'trial'", ')', ')', ':', 'if', 'ref_to_avg', 'or', 'ref_chan', ':', 'if', 'ref_to_avg', ':', 'ref_chan', '=', 'data', '.', 'axis', '[', "'chan'", ']', '[', 'i', ']', 'ref_data', '=', 'data', '(', 'trial', '=', 'i', ',', 'chan', '=', 'ref_chan', ')', 'if', 'method', '==', "'average'", ':', 'mdata', '.', 'data', '[', 'i', ']', '=', '(', 'data', '(', 'trial', '=', 'i', ')', '-', 'mean', '(', 'ref_data', ',', 'axis', '=', 'idx_chan', ')', ')', 'elif', 'method', '==', "'regression'", ':', 'mdata', '.', 'data', '[', 'i', ']', '=', 'compute_average_regress', '(', 'data', '(', 'trial', '=', 'i', ')', ',', 'idx_chan', ')', 'elif', 'bipolar', ':', 'if', 'not', 'data', '.', 'index_of', '(', "'chan'", ')', '==', '0', ':', 'raise', 'ValueError', '(', "'For matrix multiplication to work, '", "'the first dimension should be chan'", ')', 'mdata', '.', 'data', '[', 'i', ']', '=', 'dot', '(', 'trans', ',', 'data', '(', 'trial', '=', 'i', ')', ')', 'mdata', '.', 'axis', '[', "'chan'", ']', '[', 'i', ']', '=', 'asarray', '(', 'chan', '.', 'return_label', '(', ')', ',', 'dtype', '=', "'U'", ')', 'else', ':', 'mdata', '=', 'data', 'return', 'mdata'] | Apply linear transformation to the channels.
Parameters
----------
data : instance of DataRaw
the data to filter
ref_chan : list of str
list of channels used as reference
ref_to_avg : bool
if re-reference to average or not
bipolar : float
distance in mm to consider two channels as neighbors and then compute
the bipolar montage between them.
method : str
'average' or 'regression'. 'average' takes the
average across the channels selected as reference (it can be all) and
subtract it from each channel. 'regression' keeps the residuals after
regressing out the mean across channels.
Returns
-------
filtered_data : instance of DataRaw
filtered data
Notes
-----
If you don't change anything, it returns the same instance of data. | ['Apply', 'linear', 'transformation', 'to', 'the', 'channels', '.'] | train | https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/trans/montage.py#L18-L99 |
142 | ttinoco/OPTALG | optalg/opt_solver/problem.py | cast_problem | def cast_problem(problem):
"""
Casts problem object with known interface as OptProblem.
Parameters
----------
problem : Object
"""
# Optproblem
if isinstance(problem,OptProblem):
return problem
# Other
else:
# Type Base
if (not hasattr(problem,'G') or
(problem.G.shape[0] == problem.G.shape[1] and
problem.G.shape[0] == problem.G.nnz and
np.all(problem.G.row == problem.G.col) and
np.all(problem.G.data == 1.))):
return create_problem_from_type_base(problem)
# Type A
else:
return create_problem_from_type_A(problem) | python | def cast_problem(problem):
"""
Casts problem object with known interface as OptProblem.
Parameters
----------
problem : Object
"""
# Optproblem
if isinstance(problem,OptProblem):
return problem
# Other
else:
# Type Base
if (not hasattr(problem,'G') or
(problem.G.shape[0] == problem.G.shape[1] and
problem.G.shape[0] == problem.G.nnz and
np.all(problem.G.row == problem.G.col) and
np.all(problem.G.data == 1.))):
return create_problem_from_type_base(problem)
# Type A
else:
return create_problem_from_type_A(problem) | ['def', 'cast_problem', '(', 'problem', ')', ':', '# Optproblem', 'if', 'isinstance', '(', 'problem', ',', 'OptProblem', ')', ':', 'return', 'problem', '# Other', 'else', ':', '# Type Base', 'if', '(', 'not', 'hasattr', '(', 'problem', ',', "'G'", ')', 'or', '(', 'problem', '.', 'G', '.', 'shape', '[', '0', ']', '==', 'problem', '.', 'G', '.', 'shape', '[', '1', ']', 'and', 'problem', '.', 'G', '.', 'shape', '[', '0', ']', '==', 'problem', '.', 'G', '.', 'nnz', 'and', 'np', '.', 'all', '(', 'problem', '.', 'G', '.', 'row', '==', 'problem', '.', 'G', '.', 'col', ')', 'and', 'np', '.', 'all', '(', 'problem', '.', 'G', '.', 'data', '==', '1.', ')', ')', ')', ':', 'return', 'create_problem_from_type_base', '(', 'problem', ')', '# Type A', 'else', ':', 'return', 'create_problem_from_type_A', '(', 'problem', ')'] | Casts problem object with known interface as OptProblem.
Parameters
----------
problem : Object | ['Casts', 'problem', 'object', 'with', 'known', 'interface', 'as', 'OptProblem', '.'] | train | https://github.com/ttinoco/OPTALG/blob/d4f141292f281eea4faa71473258139e7f433001/optalg/opt_solver/problem.py#L185-L211 |
143 | Erotemic/utool | utool/Preferences.py | Pref.iteritems | def iteritems(self):
"""
Wow this class is messed up. I had to overwrite items when
moving to python3, just because I haden't called it yet
"""
for (key, val) in six.iteritems(self.__dict__):
if key in self._printable_exclude:
continue
yield (key, val) | python | def iteritems(self):
"""
Wow this class is messed up. I had to overwrite items when
moving to python3, just because I haden't called it yet
"""
for (key, val) in six.iteritems(self.__dict__):
if key in self._printable_exclude:
continue
yield (key, val) | ['def', 'iteritems', '(', 'self', ')', ':', 'for', '(', 'key', ',', 'val', ')', 'in', 'six', '.', 'iteritems', '(', 'self', '.', '__dict__', ')', ':', 'if', 'key', 'in', 'self', '.', '_printable_exclude', ':', 'continue', 'yield', '(', 'key', ',', 'val', ')'] | Wow this class is messed up. I had to overwrite items when
moving to python3, just because I haden't called it yet | ['Wow', 'this', 'class', 'is', 'messed', 'up', '.', 'I', 'had', 'to', 'overwrite', 'items', 'when', 'moving', 'to', 'python3', 'just', 'because', 'I', 'haden', 't', 'called', 'it', 'yet'] | train | https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L320-L328 |
144 | klahnakoski/pyLibrary | pyLibrary/env/elasticsearch.py | retro_schema | def retro_schema(schema):
"""
CONVERT SCHEMA FROM 5.x to 1.x
:param schema:
:return:
"""
output = wrap({
"mappings":{
typename: {
"dynamic_templates": [
retro_dynamic_template(*(t.items()[0]))
for t in details.dynamic_templates
],
"properties": retro_properties(details.properties)
}
for typename, details in schema.mappings.items()
},
"settings": schema.settings
})
return output | python | def retro_schema(schema):
"""
CONVERT SCHEMA FROM 5.x to 1.x
:param schema:
:return:
"""
output = wrap({
"mappings":{
typename: {
"dynamic_templates": [
retro_dynamic_template(*(t.items()[0]))
for t in details.dynamic_templates
],
"properties": retro_properties(details.properties)
}
for typename, details in schema.mappings.items()
},
"settings": schema.settings
})
return output | ['def', 'retro_schema', '(', 'schema', ')', ':', 'output', '=', 'wrap', '(', '{', '"mappings"', ':', '{', 'typename', ':', '{', '"dynamic_templates"', ':', '[', 'retro_dynamic_template', '(', '*', '(', 't', '.', 'items', '(', ')', '[', '0', ']', ')', ')', 'for', 't', 'in', 'details', '.', 'dynamic_templates', ']', ',', '"properties"', ':', 'retro_properties', '(', 'details', '.', 'properties', ')', '}', 'for', 'typename', ',', 'details', 'in', 'schema', '.', 'mappings', '.', 'items', '(', ')', '}', ',', '"settings"', ':', 'schema', '.', 'settings', '}', ')', 'return', 'output'] | CONVERT SCHEMA FROM 5.x to 1.x
:param schema:
:return: | ['CONVERT', 'SCHEMA', 'FROM', '5', '.', 'x', 'to', '1', '.', 'x', ':', 'param', 'schema', ':', ':', 'return', ':'] | train | https://github.com/klahnakoski/pyLibrary/blob/fa2dcbc48fda8d26999baef400e9a98149e0b982/pyLibrary/env/elasticsearch.py#L1425-L1444 |
145 | unbit/sftpclone | sftpclone/sftpclone.py | SFTPClone.file_upload | def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st) | python | def file_upload(self, local_path, remote_path, l_st):
"""Upload local_path to remote_path and set permission and mtime."""
self.sftp.put(local_path, remote_path)
self._match_modes(remote_path, l_st) | ['def', 'file_upload', '(', 'self', ',', 'local_path', ',', 'remote_path', ',', 'l_st', ')', ':', 'self', '.', 'sftp', '.', 'put', '(', 'local_path', ',', 'remote_path', ')', 'self', '.', '_match_modes', '(', 'remote_path', ',', 'l_st', ')'] | Upload local_path to remote_path and set permission and mtime. | ['Upload', 'local_path', 'to', 'remote_path', 'and', 'set', 'permission', 'and', 'mtime', '.'] | train | https://github.com/unbit/sftpclone/blob/1cc89478e680fc4e0d12b1a15b5bafd0390d05da/sftpclone/sftpclone.py#L392-L395 |
146 | pytroll/trollimage | trollimage/image.py | Image._add_channel | def _add_channel(self, chn, color_min, color_max):
"""Adds a channel to the image object
"""
if isinstance(chn, np.ma.core.MaskedArray):
chn_data = chn.data
chn_mask = chn.mask
else:
chn_data = np.array(chn)
chn_mask = False
scaled = ((chn_data - color_min) *
1.0 / (color_max - color_min))
self.channels.append(np.ma.array(scaled, mask=chn_mask)) | python | def _add_channel(self, chn, color_min, color_max):
"""Adds a channel to the image object
"""
if isinstance(chn, np.ma.core.MaskedArray):
chn_data = chn.data
chn_mask = chn.mask
else:
chn_data = np.array(chn)
chn_mask = False
scaled = ((chn_data - color_min) *
1.0 / (color_max - color_min))
self.channels.append(np.ma.array(scaled, mask=chn_mask)) | ['def', '_add_channel', '(', 'self', ',', 'chn', ',', 'color_min', ',', 'color_max', ')', ':', 'if', 'isinstance', '(', 'chn', ',', 'np', '.', 'ma', '.', 'core', '.', 'MaskedArray', ')', ':', 'chn_data', '=', 'chn', '.', 'data', 'chn_mask', '=', 'chn', '.', 'mask', 'else', ':', 'chn_data', '=', 'np', '.', 'array', '(', 'chn', ')', 'chn_mask', '=', 'False', 'scaled', '=', '(', '(', 'chn_data', '-', 'color_min', ')', '*', '1.0', '/', '(', 'color_max', '-', 'color_min', ')', ')', 'self', '.', 'channels', '.', 'append', '(', 'np', '.', 'ma', '.', 'array', '(', 'scaled', ',', 'mask', '=', 'chn_mask', ')', ')'] | Adds a channel to the image object | ['Adds', 'a', 'channel', 'to', 'the', 'image', 'object'] | train | https://github.com/pytroll/trollimage/blob/d35a7665ad475ff230e457085523e21f2cd3f454/trollimage/image.py#L221-L232 |
147 | autokey/autokey | lib/autokey/scripting.py | GtkClipboard.get_selection | def get_selection(self):
"""
Read text from the X selection
Usage: C{clipboard.get_selection()}
@return: text contents of the mouse selection
@rtype: C{str}
@raise Exception: if no text was found in the selection
"""
Gdk.threads_enter()
text = self.selection.wait_for_text()
Gdk.threads_leave()
if text is not None:
return text
else:
raise Exception("No text found in X selection") | python | def get_selection(self):
"""
Read text from the X selection
Usage: C{clipboard.get_selection()}
@return: text contents of the mouse selection
@rtype: C{str}
@raise Exception: if no text was found in the selection
"""
Gdk.threads_enter()
text = self.selection.wait_for_text()
Gdk.threads_leave()
if text is not None:
return text
else:
raise Exception("No text found in X selection") | ['def', 'get_selection', '(', 'self', ')', ':', 'Gdk', '.', 'threads_enter', '(', ')', 'text', '=', 'self', '.', 'selection', '.', 'wait_for_text', '(', ')', 'Gdk', '.', 'threads_leave', '(', ')', 'if', 'text', 'is', 'not', 'None', ':', 'return', 'text', 'else', ':', 'raise', 'Exception', '(', '"No text found in X selection"', ')'] | Read text from the X selection
Usage: C{clipboard.get_selection()}
@return: text contents of the mouse selection
@rtype: C{str}
@raise Exception: if no text was found in the selection | ['Read', 'text', 'from', 'the', 'X', 'selection', 'Usage', ':', 'C', '{', 'clipboard', '.', 'get_selection', '()', '}'] | train | https://github.com/autokey/autokey/blob/35decb72f286ce68cd2a1f09ace8891a520b58d1/lib/autokey/scripting.py#L841-L857 |
148 | juju/charm-helpers | charmhelpers/core/host.py | mounts | def mounts():
"""Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
with open('/proc/mounts') as f:
# [['/mount/point','/dev/path'],[...]]
system_mounts = [m[1::-1] for m in [l.strip().split()
for l in f.readlines()]]
return system_mounts | python | def mounts():
"""Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
with open('/proc/mounts') as f:
# [['/mount/point','/dev/path'],[...]]
system_mounts = [m[1::-1] for m in [l.strip().split()
for l in f.readlines()]]
return system_mounts | ['def', 'mounts', '(', ')', ':', 'with', 'open', '(', "'/proc/mounts'", ')', 'as', 'f', ':', "# [['/mount/point','/dev/path'],[...]]", 'system_mounts', '=', '[', 'm', '[', '1', ':', ':', '-', '1', ']', 'for', 'm', 'in', '[', 'l', '.', 'strip', '(', ')', '.', 'split', '(', ')', 'for', 'l', 'in', 'f', '.', 'readlines', '(', ')', ']', ']', 'return', 'system_mounts'] | Get a list of all mounted volumes as [[mountpoint,device],[...]] | ['Get', 'a', 'list', 'of', 'all', 'mounted', 'volumes', 'as', '[[', 'mountpoint', 'device', ']', '[', '...', ']]'] | train | https://github.com/juju/charm-helpers/blob/aa785c40c3b7a8c69dbfbc7921d6b9f30142e171/charmhelpers/core/host.py#L620-L626 |
149 | allenai/allennlp | allennlp/data/dataset_readers/dataset_reader.py | DatasetReader.read | def read(self, file_path: str) -> Iterable[Instance]:
"""
Returns an ``Iterable`` containing all the instances
in the specified dataset.
If ``self.lazy`` is False, this calls ``self._read()``,
ensures that the result is a list, then returns the resulting list.
If ``self.lazy`` is True, this returns an object whose
``__iter__`` method calls ``self._read()`` each iteration.
In this case your implementation of ``_read()`` must also be lazy
(that is, not load all instances into memory at once), otherwise
you will get a ``ConfigurationError``.
In either case, the returned ``Iterable`` can be iterated
over multiple times. It's unlikely you want to override this function,
but if you do your result should likewise be repeatedly iterable.
"""
lazy = getattr(self, 'lazy', None)
if lazy is None:
logger.warning("DatasetReader.lazy is not set, "
"did you forget to call the superclass constructor?")
if self._cache_directory:
cache_file = self._get_cache_location_for_file_path(file_path)
else:
cache_file = None
if lazy:
return _LazyInstances(lambda: self._read(file_path),
cache_file,
self.deserialize_instance,
self.serialize_instance)
else:
# First we read the instances, either from a cache or from the original file.
if cache_file and os.path.exists(cache_file):
instances = self._instances_from_cache_file(cache_file)
else:
instances = self._read(file_path)
# Then some validation.
if not isinstance(instances, list):
instances = [instance for instance in Tqdm.tqdm(instances)]
if not instances:
raise ConfigurationError("No instances were read from the given filepath {}. "
"Is the path correct?".format(file_path))
# And finally we write to the cache if we need to.
if cache_file and not os.path.exists(cache_file):
logger.info(f"Caching instances to {cache_file}")
with open(cache_file, 'w') as cache:
for instance in Tqdm.tqdm(instances):
cache.write(self.serialize_instance(instance) + '\n')
return instances | python | def read(self, file_path: str) -> Iterable[Instance]:
"""
Returns an ``Iterable`` containing all the instances
in the specified dataset.
If ``self.lazy`` is False, this calls ``self._read()``,
ensures that the result is a list, then returns the resulting list.
If ``self.lazy`` is True, this returns an object whose
``__iter__`` method calls ``self._read()`` each iteration.
In this case your implementation of ``_read()`` must also be lazy
(that is, not load all instances into memory at once), otherwise
you will get a ``ConfigurationError``.
In either case, the returned ``Iterable`` can be iterated
over multiple times. It's unlikely you want to override this function,
but if you do your result should likewise be repeatedly iterable.
"""
lazy = getattr(self, 'lazy', None)
if lazy is None:
logger.warning("DatasetReader.lazy is not set, "
"did you forget to call the superclass constructor?")
if self._cache_directory:
cache_file = self._get_cache_location_for_file_path(file_path)
else:
cache_file = None
if lazy:
return _LazyInstances(lambda: self._read(file_path),
cache_file,
self.deserialize_instance,
self.serialize_instance)
else:
# First we read the instances, either from a cache or from the original file.
if cache_file and os.path.exists(cache_file):
instances = self._instances_from_cache_file(cache_file)
else:
instances = self._read(file_path)
# Then some validation.
if not isinstance(instances, list):
instances = [instance for instance in Tqdm.tqdm(instances)]
if not instances:
raise ConfigurationError("No instances were read from the given filepath {}. "
"Is the path correct?".format(file_path))
# And finally we write to the cache if we need to.
if cache_file and not os.path.exists(cache_file):
logger.info(f"Caching instances to {cache_file}")
with open(cache_file, 'w') as cache:
for instance in Tqdm.tqdm(instances):
cache.write(self.serialize_instance(instance) + '\n')
return instances | ['def', 'read', '(', 'self', ',', 'file_path', ':', 'str', ')', '->', 'Iterable', '[', 'Instance', ']', ':', 'lazy', '=', 'getattr', '(', 'self', ',', "'lazy'", ',', 'None', ')', 'if', 'lazy', 'is', 'None', ':', 'logger', '.', 'warning', '(', '"DatasetReader.lazy is not set, "', '"did you forget to call the superclass constructor?"', ')', 'if', 'self', '.', '_cache_directory', ':', 'cache_file', '=', 'self', '.', '_get_cache_location_for_file_path', '(', 'file_path', ')', 'else', ':', 'cache_file', '=', 'None', 'if', 'lazy', ':', 'return', '_LazyInstances', '(', 'lambda', ':', 'self', '.', '_read', '(', 'file_path', ')', ',', 'cache_file', ',', 'self', '.', 'deserialize_instance', ',', 'self', '.', 'serialize_instance', ')', 'else', ':', '# First we read the instances, either from a cache or from the original file.', 'if', 'cache_file', 'and', 'os', '.', 'path', '.', 'exists', '(', 'cache_file', ')', ':', 'instances', '=', 'self', '.', '_instances_from_cache_file', '(', 'cache_file', ')', 'else', ':', 'instances', '=', 'self', '.', '_read', '(', 'file_path', ')', '# Then some validation.', 'if', 'not', 'isinstance', '(', 'instances', ',', 'list', ')', ':', 'instances', '=', '[', 'instance', 'for', 'instance', 'in', 'Tqdm', '.', 'tqdm', '(', 'instances', ')', ']', 'if', 'not', 'instances', ':', 'raise', 'ConfigurationError', '(', '"No instances were read from the given filepath {}. "', '"Is the path correct?"', '.', 'format', '(', 'file_path', ')', ')', '# And finally we write to the cache if we need to.', 'if', 'cache_file', 'and', 'not', 'os', '.', 'path', '.', 'exists', '(', 'cache_file', ')', ':', 'logger', '.', 'info', '(', 'f"Caching instances to {cache_file}"', ')', 'with', 'open', '(', 'cache_file', ',', "'w'", ')', 'as', 'cache', ':', 'for', 'instance', 'in', 'Tqdm', '.', 'tqdm', '(', 'instances', ')', ':', 'cache', '.', 'write', '(', 'self', '.', 'serialize_instance', '(', 'instance', ')', '+', "'\\n'", ')', 'return', 'instances'] | Returns an ``Iterable`` containing all the instances
in the specified dataset.
If ``self.lazy`` is False, this calls ``self._read()``,
ensures that the result is a list, then returns the resulting list.
If ``self.lazy`` is True, this returns an object whose
``__iter__`` method calls ``self._read()`` each iteration.
In this case your implementation of ``_read()`` must also be lazy
(that is, not load all instances into memory at once), otherwise
you will get a ``ConfigurationError``.
In either case, the returned ``Iterable`` can be iterated
over multiple times. It's unlikely you want to override this function,
but if you do your result should likewise be repeatedly iterable. | ['Returns', 'an', 'Iterable', 'containing', 'all', 'the', 'instances', 'in', 'the', 'specified', 'dataset', '.'] | train | https://github.com/allenai/allennlp/blob/648a36f77db7e45784c047176074f98534c76636/allennlp/data/dataset_readers/dataset_reader.py#L91-L145 |
150 | eight04/pyAPNG | apng/__init__.py | make_chunk | def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out | python | def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out | ['def', 'make_chunk', '(', 'chunk_type', ',', 'chunk_data', ')', ':', 'out', '=', 'struct', '.', 'pack', '(', '"!I"', ',', 'len', '(', 'chunk_data', ')', ')', 'chunk_data', '=', 'chunk_type', '.', 'encode', '(', '"latin-1"', ')', '+', 'chunk_data', 'out', '+=', 'chunk_data', '+', 'struct', '.', 'pack', '(', '"!I"', ',', 'binascii', '.', 'crc32', '(', 'chunk_data', ')', '&', '0xffffffff', ')', 'return', 'out'] | Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes | ['Create', 'a', 'raw', 'chunk', 'by', 'composing', 'chunk', 'type', 'and', 'data', '.', 'It', 'calculates', 'chunk', 'length', 'and', 'CRC', 'for', 'you', '.'] | train | https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L43-L54 |
151 | qacafe/cdrouter.py | cdrouter/packages.py | PackagesService.get | def get(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Get a package.
:param id: Package ID as an int.
:return: :class:`packages.Package <packages.Package>` object
:rtype: packages.Package
"""
schema = PackageSchema()
resp = self.service.get_id(self.base, id)
return self.service.decode(schema, resp) | python | def get(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Get a package.
:param id: Package ID as an int.
:return: :class:`packages.Package <packages.Package>` object
:rtype: packages.Package
"""
schema = PackageSchema()
resp = self.service.get_id(self.base, id)
return self.service.decode(schema, resp) | ['def', 'get', '(', 'self', ',', 'id', ')', ':', '# pylint: disable=invalid-name,redefined-builtin', 'schema', '=', 'PackageSchema', '(', ')', 'resp', '=', 'self', '.', 'service', '.', 'get_id', '(', 'self', '.', 'base', ',', 'id', ')', 'return', 'self', '.', 'service', '.', 'decode', '(', 'schema', ',', 'resp', ')'] | Get a package.
:param id: Package ID as an int.
:return: :class:`packages.Package <packages.Package>` object
:rtype: packages.Package | ['Get', 'a', 'package', '.'] | train | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L218-L227 |
152 | RIPE-NCC/ripe-atlas-cousteau | ripe/atlas/cousteau/source.py | AtlasChangeSource.set_type | def set_type(self, value):
"""Setter for type attribute"""
if self.action == "remove" and value != "probes":
log = "Sources field 'type' when action is remove should always be 'probes'."
raise MalFormattedSource(log)
self._type = value | python | def set_type(self, value):
"""Setter for type attribute"""
if self.action == "remove" and value != "probes":
log = "Sources field 'type' when action is remove should always be 'probes'."
raise MalFormattedSource(log)
self._type = value | ['def', 'set_type', '(', 'self', ',', 'value', ')', ':', 'if', 'self', '.', 'action', '==', '"remove"', 'and', 'value', '!=', '"probes"', ':', 'log', '=', '"Sources field \'type\' when action is remove should always be \'probes\'."', 'raise', 'MalFormattedSource', '(', 'log', ')', 'self', '.', '_type', '=', 'value'] | Setter for type attribute | ['Setter', 'for', 'type', 'attribute'] | train | https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L173-L178 |
153 | poppy-project/pypot | pypot/vrep/remoteApiBindings/vrep.py | simxGetDistanceHandle | def simxGetDistanceHandle(clientID, distanceObjectName, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
handle = ct.c_int()
if (sys.version_info[0] == 3) and (type(distanceObjectName) is str):
distanceObjectName=distanceObjectName.encode('utf-8')
return c_GetDistanceHandle(clientID, distanceObjectName, ct.byref(handle), operationMode), handle.value | python | def simxGetDistanceHandle(clientID, distanceObjectName, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
handle = ct.c_int()
if (sys.version_info[0] == 3) and (type(distanceObjectName) is str):
distanceObjectName=distanceObjectName.encode('utf-8')
return c_GetDistanceHandle(clientID, distanceObjectName, ct.byref(handle), operationMode), handle.value | ['def', 'simxGetDistanceHandle', '(', 'clientID', ',', 'distanceObjectName', ',', 'operationMode', ')', ':', 'handle', '=', 'ct', '.', 'c_int', '(', ')', 'if', '(', 'sys', '.', 'version_info', '[', '0', ']', '==', '3', ')', 'and', '(', 'type', '(', 'distanceObjectName', ')', 'is', 'str', ')', ':', 'distanceObjectName', '=', 'distanceObjectName', '.', 'encode', '(', "'utf-8'", ')', 'return', 'c_GetDistanceHandle', '(', 'clientID', ',', 'distanceObjectName', ',', 'ct', '.', 'byref', '(', 'handle', ')', ',', 'operationMode', ')', ',', 'handle', '.', 'value'] | Please have a look at the function description/documentation in the V-REP user manual | ['Please', 'have', 'a', 'look', 'at', 'the', 'function', 'description', '/', 'documentation', 'in', 'the', 'V', '-', 'REP', 'user', 'manual'] | train | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L725-L733 |
154 | eqcorrscan/EQcorrscan | eqcorrscan/utils/correlate.py | _get_registerd_func | def _get_registerd_func(name_or_func):
""" get a xcorr function from a str or callable. """
# get the function or register callable
if callable(name_or_func):
func = register_array_xcorr(name_or_func)
else:
func = XCOR_FUNCS[name_or_func or 'default']
assert callable(func), 'func is not callable'
# ensure func has the added methods
if not hasattr(func, 'registered'):
func = register_array_xcorr(func)
return func | python | def _get_registerd_func(name_or_func):
""" get a xcorr function from a str or callable. """
# get the function or register callable
if callable(name_or_func):
func = register_array_xcorr(name_or_func)
else:
func = XCOR_FUNCS[name_or_func or 'default']
assert callable(func), 'func is not callable'
# ensure func has the added methods
if not hasattr(func, 'registered'):
func = register_array_xcorr(func)
return func | ['def', '_get_registerd_func', '(', 'name_or_func', ')', ':', '# get the function or register callable', 'if', 'callable', '(', 'name_or_func', ')', ':', 'func', '=', 'register_array_xcorr', '(', 'name_or_func', ')', 'else', ':', 'func', '=', 'XCOR_FUNCS', '[', 'name_or_func', 'or', "'default'", ']', 'assert', 'callable', '(', 'func', ')', ',', "'func is not callable'", '# ensure func has the added methods', 'if', 'not', 'hasattr', '(', 'func', ',', "'registered'", ')', ':', 'func', '=', 'register_array_xcorr', '(', 'func', ')', 'return', 'func'] | get a xcorr function from a str or callable. | ['get', 'a', 'xcorr', 'function', 'from', 'a', 'str', 'or', 'callable', '.'] | train | https://github.com/eqcorrscan/EQcorrscan/blob/3121b4aca801ee5d38f56ca297ce1c0f9515d9ff/eqcorrscan/utils/correlate.py#L318-L329 |
155 | ktdreyer/txkoji | txkoji/build.py | Build.task | def task(self):
"""
Find the task for this build.
Wraps the getTaskInfo RPC.
:returns: deferred that when fired returns the Task object, or None if
we could not determine the task for this build.
"""
# If we have no .task_id, this is a no-op to return None.
if not self.task_id:
return defer.succeed(None)
return self.connection.getTaskInfo(self.task_id) | python | def task(self):
"""
Find the task for this build.
Wraps the getTaskInfo RPC.
:returns: deferred that when fired returns the Task object, or None if
we could not determine the task for this build.
"""
# If we have no .task_id, this is a no-op to return None.
if not self.task_id:
return defer.succeed(None)
return self.connection.getTaskInfo(self.task_id) | ['def', 'task', '(', 'self', ')', ':', '# If we have no .task_id, this is a no-op to return None.', 'if', 'not', 'self', '.', 'task_id', ':', 'return', 'defer', '.', 'succeed', '(', 'None', ')', 'return', 'self', '.', 'connection', '.', 'getTaskInfo', '(', 'self', '.', 'task_id', ')'] | Find the task for this build.
Wraps the getTaskInfo RPC.
:returns: deferred that when fired returns the Task object, or None if
we could not determine the task for this build. | ['Find', 'the', 'task', 'for', 'this', 'build', '.'] | train | https://github.com/ktdreyer/txkoji/blob/a7de380f29f745bf11730b27217208f6d4da7733/txkoji/build.py#L132-L144 |
156 | KnorrFG/pyparadigm | pyparadigm/surface_composition.py | _inner_func_anot | def _inner_func_anot(func):
"""must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface"""
@wraps(func)
def new_func(*args):
return func(*_lmap(_wrap_surface, args))
return new_func | python | def _inner_func_anot(func):
"""must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface"""
@wraps(func)
def new_func(*args):
return func(*_lmap(_wrap_surface, args))
return new_func | ['def', '_inner_func_anot', '(', 'func', ')', ':', '@', 'wraps', '(', 'func', ')', 'def', 'new_func', '(', '*', 'args', ')', ':', 'return', 'func', '(', '*', '_lmap', '(', '_wrap_surface', ',', 'args', ')', ')', 'return', 'new_func'] | must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface | ['must', 'be', 'applied', 'to', 'all', 'inner', 'functions', 'that', 'return', 'contexts', '.', 'Wraps', 'all', 'instances', 'of', 'pygame', '.', 'Surface', 'in', 'the', 'input', 'in', 'Surface'] | train | https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L23-L30 |
157 | raiden-network/raiden | raiden/transfer/mediated_transfer/mediator.py | events_for_balanceproof | def events_for_balanceproof(
channelidentifiers_to_channels: ChannelMap,
transfers_pair: List[MediationPairState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
secret: Secret,
secrethash: SecretHash,
) -> List[Event]:
""" While it's safe do the off-chain unlock. """
events: List[Event] = list()
for pair in reversed(transfers_pair):
payee_knows_secret = pair.payee_state in STATE_SECRET_KNOWN
payee_payed = pair.payee_state in STATE_TRANSFER_PAID
payee_channel = get_payee_channel(channelidentifiers_to_channels, pair)
payee_channel_open = (
payee_channel and channel.get_status(payee_channel) == CHANNEL_STATE_OPENED
)
payer_channel = get_payer_channel(channelidentifiers_to_channels, pair)
# The mediator must not send to the payee a balance proof if the lock
# is in the danger zone, because the payer may not do the same and the
# on-chain unlock may fail. If the lock is nearing it's expiration
# block, then on-chain unlock should be done, and if successful it can
# be unlocked off-chain.
is_safe_to_send_balanceproof = False
if payer_channel:
is_safe_to_send_balanceproof, _ = is_safe_to_wait(
pair.payer_transfer.lock.expiration,
payer_channel.reveal_timeout,
block_number,
)
should_send_balanceproof_to_payee = (
payee_channel_open and
payee_knows_secret and
not payee_payed and
is_safe_to_send_balanceproof
)
if should_send_balanceproof_to_payee:
# At this point we are sure that payee_channel exists due to the
# payee_channel_open check above. So let mypy know about this
assert payee_channel
payee_channel = cast(NettingChannelState, payee_channel)
pair.payee_state = 'payee_balance_proof'
message_identifier = message_identifier_from_prng(pseudo_random_generator)
unlock_lock = channel.send_unlock(
channel_state=payee_channel,
message_identifier=message_identifier,
payment_identifier=pair.payee_transfer.payment_identifier,
secret=secret,
secrethash=secrethash,
)
unlock_success = EventUnlockSuccess(
pair.payer_transfer.payment_identifier,
pair.payer_transfer.lock.secrethash,
)
events.append(unlock_lock)
events.append(unlock_success)
return events | python | def events_for_balanceproof(
channelidentifiers_to_channels: ChannelMap,
transfers_pair: List[MediationPairState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
secret: Secret,
secrethash: SecretHash,
) -> List[Event]:
""" While it's safe do the off-chain unlock. """
events: List[Event] = list()
for pair in reversed(transfers_pair):
payee_knows_secret = pair.payee_state in STATE_SECRET_KNOWN
payee_payed = pair.payee_state in STATE_TRANSFER_PAID
payee_channel = get_payee_channel(channelidentifiers_to_channels, pair)
payee_channel_open = (
payee_channel and channel.get_status(payee_channel) == CHANNEL_STATE_OPENED
)
payer_channel = get_payer_channel(channelidentifiers_to_channels, pair)
# The mediator must not send to the payee a balance proof if the lock
# is in the danger zone, because the payer may not do the same and the
# on-chain unlock may fail. If the lock is nearing it's expiration
# block, then on-chain unlock should be done, and if successful it can
# be unlocked off-chain.
is_safe_to_send_balanceproof = False
if payer_channel:
is_safe_to_send_balanceproof, _ = is_safe_to_wait(
pair.payer_transfer.lock.expiration,
payer_channel.reveal_timeout,
block_number,
)
should_send_balanceproof_to_payee = (
payee_channel_open and
payee_knows_secret and
not payee_payed and
is_safe_to_send_balanceproof
)
if should_send_balanceproof_to_payee:
# At this point we are sure that payee_channel exists due to the
# payee_channel_open check above. So let mypy know about this
assert payee_channel
payee_channel = cast(NettingChannelState, payee_channel)
pair.payee_state = 'payee_balance_proof'
message_identifier = message_identifier_from_prng(pseudo_random_generator)
unlock_lock = channel.send_unlock(
channel_state=payee_channel,
message_identifier=message_identifier,
payment_identifier=pair.payee_transfer.payment_identifier,
secret=secret,
secrethash=secrethash,
)
unlock_success = EventUnlockSuccess(
pair.payer_transfer.payment_identifier,
pair.payer_transfer.lock.secrethash,
)
events.append(unlock_lock)
events.append(unlock_success)
return events | ['def', 'events_for_balanceproof', '(', 'channelidentifiers_to_channels', ':', 'ChannelMap', ',', 'transfers_pair', ':', 'List', '[', 'MediationPairState', ']', ',', 'pseudo_random_generator', ':', 'random', '.', 'Random', ',', 'block_number', ':', 'BlockNumber', ',', 'secret', ':', 'Secret', ',', 'secrethash', ':', 'SecretHash', ',', ')', '->', 'List', '[', 'Event', ']', ':', 'events', ':', 'List', '[', 'Event', ']', '=', 'list', '(', ')', 'for', 'pair', 'in', 'reversed', '(', 'transfers_pair', ')', ':', 'payee_knows_secret', '=', 'pair', '.', 'payee_state', 'in', 'STATE_SECRET_KNOWN', 'payee_payed', '=', 'pair', '.', 'payee_state', 'in', 'STATE_TRANSFER_PAID', 'payee_channel', '=', 'get_payee_channel', '(', 'channelidentifiers_to_channels', ',', 'pair', ')', 'payee_channel_open', '=', '(', 'payee_channel', 'and', 'channel', '.', 'get_status', '(', 'payee_channel', ')', '==', 'CHANNEL_STATE_OPENED', ')', 'payer_channel', '=', 'get_payer_channel', '(', 'channelidentifiers_to_channels', ',', 'pair', ')', '# The mediator must not send to the payee a balance proof if the lock', '# is in the danger zone, because the payer may not do the same and the', "# on-chain unlock may fail. If the lock is nearing it's expiration", '# block, then on-chain unlock should be done, and if successful it can', '# be unlocked off-chain.', 'is_safe_to_send_balanceproof', '=', 'False', 'if', 'payer_channel', ':', 'is_safe_to_send_balanceproof', ',', '_', '=', 'is_safe_to_wait', '(', 'pair', '.', 'payer_transfer', '.', 'lock', '.', 'expiration', ',', 'payer_channel', '.', 'reveal_timeout', ',', 'block_number', ',', ')', 'should_send_balanceproof_to_payee', '=', '(', 'payee_channel_open', 'and', 'payee_knows_secret', 'and', 'not', 'payee_payed', 'and', 'is_safe_to_send_balanceproof', ')', 'if', 'should_send_balanceproof_to_payee', ':', '# At this point we are sure that payee_channel exists due to the', '# payee_channel_open check above. So let mypy know about this', 'assert', 'payee_channel', 'payee_channel', '=', 'cast', '(', 'NettingChannelState', ',', 'payee_channel', ')', 'pair', '.', 'payee_state', '=', "'payee_balance_proof'", 'message_identifier', '=', 'message_identifier_from_prng', '(', 'pseudo_random_generator', ')', 'unlock_lock', '=', 'channel', '.', 'send_unlock', '(', 'channel_state', '=', 'payee_channel', ',', 'message_identifier', '=', 'message_identifier', ',', 'payment_identifier', '=', 'pair', '.', 'payee_transfer', '.', 'payment_identifier', ',', 'secret', '=', 'secret', ',', 'secrethash', '=', 'secrethash', ',', ')', 'unlock_success', '=', 'EventUnlockSuccess', '(', 'pair', '.', 'payer_transfer', '.', 'payment_identifier', ',', 'pair', '.', 'payer_transfer', '.', 'lock', '.', 'secrethash', ',', ')', 'events', '.', 'append', '(', 'unlock_lock', ')', 'events', '.', 'append', '(', 'unlock_success', ')', 'return', 'events'] | While it's safe do the off-chain unlock. | ['While', 'it', 's', 'safe', 'do', 'the', 'off', '-', 'chain', 'unlock', '.'] | train | https://github.com/raiden-network/raiden/blob/407ba15c72074e9de88771d6b9661ff4dc36bef5/raiden/transfer/mediated_transfer/mediator.py#L797-L862 |
158 | d0c-s4vage/pfp | pfp/fields.py | Field._pfp__width | def _pfp__width(self):
"""Return the width of the field (sizeof)
"""
raw_output = six.BytesIO()
output = bitwrap.BitwrappedStream(raw_output)
self._pfp__build(output)
output.flush()
return len(raw_output.getvalue()) | python | def _pfp__width(self):
"""Return the width of the field (sizeof)
"""
raw_output = six.BytesIO()
output = bitwrap.BitwrappedStream(raw_output)
self._pfp__build(output)
output.flush()
return len(raw_output.getvalue()) | ['def', '_pfp__width', '(', 'self', ')', ':', 'raw_output', '=', 'six', '.', 'BytesIO', '(', ')', 'output', '=', 'bitwrap', '.', 'BitwrappedStream', '(', 'raw_output', ')', 'self', '.', '_pfp__build', '(', 'output', ')', 'output', '.', 'flush', '(', ')', 'return', 'len', '(', 'raw_output', '.', 'getvalue', '(', ')', ')'] | Return the width of the field (sizeof) | ['Return', 'the', 'width', 'of', 'the', 'field', '(', 'sizeof', ')'] | train | https://github.com/d0c-s4vage/pfp/blob/32f2d34fdec1c70019fa83c7006d5e3be0f92fcd/pfp/fields.py#L423-L430 |
159 | JdeRobot/base | src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_map/__init__.py | MapModule.drawing_update | def drawing_update(self):
'''update line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_line.append(self.click_position)
if len(self.draw_line) > 1:
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('drawing', self.draw_line,
layer='Drawing', linewidth=2, colour=(128,128,255))) | python | def drawing_update(self):
'''update line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_line.append(self.click_position)
if len(self.draw_line) > 1:
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('drawing', self.draw_line,
layer='Drawing', linewidth=2, colour=(128,128,255))) | ['def', 'drawing_update', '(', 'self', ')', ':', 'from', 'MAVProxy', '.', 'modules', '.', 'mavproxy_map', 'import', 'mp_slipmap', 'if', 'self', '.', 'draw_callback', 'is', 'None', ':', 'return', 'self', '.', 'draw_line', '.', 'append', '(', 'self', '.', 'click_position', ')', 'if', 'len', '(', 'self', '.', 'draw_line', ')', '>', '1', ':', 'self', '.', 'mpstate', '.', 'map', '.', 'add_object', '(', 'mp_slipmap', '.', 'SlipPolygon', '(', "'drawing'", ',', 'self', '.', 'draw_line', ',', 'layer', '=', "'Drawing'", ',', 'linewidth', '=', '2', ',', 'colour', '=', '(', '128', ',', '128', ',', '255', ')', ')', ')'] | update line drawing | ['update', 'line', 'drawing'] | train | https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_map/__init__.py#L350-L358 |
160 | google/grr | grr/core/grr_response_core/lib/config_lib.py | GrrConfigManager.DEFINE_integer_list | def DEFINE_integer_list(self, name, default, help, constant=False):
"""A helper for defining lists of integer options."""
self.AddOption(
type_info.List(
name=name,
default=default,
description=help,
validator=type_info.Integer()),
constant=constant) | python | def DEFINE_integer_list(self, name, default, help, constant=False):
"""A helper for defining lists of integer options."""
self.AddOption(
type_info.List(
name=name,
default=default,
description=help,
validator=type_info.Integer()),
constant=constant) | ['def', 'DEFINE_integer_list', '(', 'self', ',', 'name', ',', 'default', ',', 'help', ',', 'constant', '=', 'False', ')', ':', 'self', '.', 'AddOption', '(', 'type_info', '.', 'List', '(', 'name', '=', 'name', ',', 'default', '=', 'default', ',', 'description', '=', 'help', ',', 'validator', '=', 'type_info', '.', 'Integer', '(', ')', ')', ',', 'constant', '=', 'constant', ')'] | A helper for defining lists of integer options. | ['A', 'helper', 'for', 'defining', 'lists', 'of', 'integer', 'options', '.'] | train | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/core/grr_response_core/lib/config_lib.py#L1523-L1531 |
161 | bennyrowland/suspect | suspect/processing/_apodize.py | gaussian_window | def gaussian_window(t, params):
"""
Calculates a Gaussian window function in the time domain which will broaden
peaks in the frequency domain by params["line_broadening"] Hertz.
:param t:
:param params:
:return:
"""
window = suspect.basis.gaussian(t, 0, 0, params["line_broadening"])
# the above gaussian function returns an area 1 fid, for a windowing
# function we need to be area preserving (first point must be 1)
return window / window[0] | python | def gaussian_window(t, params):
"""
Calculates a Gaussian window function in the time domain which will broaden
peaks in the frequency domain by params["line_broadening"] Hertz.
:param t:
:param params:
:return:
"""
window = suspect.basis.gaussian(t, 0, 0, params["line_broadening"])
# the above gaussian function returns an area 1 fid, for a windowing
# function we need to be area preserving (first point must be 1)
return window / window[0] | ['def', 'gaussian_window', '(', 't', ',', 'params', ')', ':', 'window', '=', 'suspect', '.', 'basis', '.', 'gaussian', '(', 't', ',', '0', ',', '0', ',', 'params', '[', '"line_broadening"', ']', ')', '# the above gaussian function returns an area 1 fid, for a windowing', '# function we need to be area preserving (first point must be 1)', 'return', 'window', '/', 'window', '[', '0', ']'] | Calculates a Gaussian window function in the time domain which will broaden
peaks in the frequency domain by params["line_broadening"] Hertz.
:param t:
:param params:
:return: | ['Calculates', 'a', 'Gaussian', 'window', 'function', 'in', 'the', 'time', 'domain', 'which', 'will', 'broaden', 'peaks', 'in', 'the', 'frequency', 'domain', 'by', 'params', '[', 'line_broadening', ']', 'Hertz', '.', ':', 'param', 't', ':', ':', 'param', 'params', ':', ':', 'return', ':'] | train | https://github.com/bennyrowland/suspect/blob/c09ab0a5013c5a199218214cdd791659243d7e41/suspect/processing/_apodize.py#L6-L18 |
162 | sorgerlab/indra | indra/tools/assemble_corpus.py | merge_groundings | def merge_groundings(stmts_in):
"""Gather and merge original grounding information from evidences.
Each Statement's evidences are traversed to find original grounding
information. These groundings are then merged into an overall consensus
grounding dict with as much detail as possible.
The current implementation is only applicable to Statements whose
concept/agent roles are fixed. Complexes, Associations and Conversions
cannot be handled correctly.
Parameters
----------
stmts_in : list[indra.statements.Statement]
A list of INDRA Statements whose groundings should be merged. These
Statements are meant to have been preassembled and potentially have
multiple pieces of evidence.
Returns
-------
stmts_out : list[indra.statements.Statement]
The list of Statements now with groundings merged at the Statement
level.
"""
def surface_grounding(stmt):
# Find the "best" grounding for a given concept and its evidences
# and surface that
for idx, concept in enumerate(stmt.agent_list()):
if concept is None:
continue
aggregate_groundings = {}
for ev in stmt.evidence:
if 'agents' in ev.annotations:
groundings = ev.annotations['agents']['raw_grounding'][idx]
for ns, value in groundings.items():
if ns not in aggregate_groundings:
aggregate_groundings[ns] = []
if isinstance(value, list):
aggregate_groundings[ns] += value
else:
aggregate_groundings[ns].append(value)
best_groundings = get_best_groundings(aggregate_groundings)
concept.db_refs = best_groundings
def get_best_groundings(aggregate_groundings):
best_groundings = {}
for ns, values in aggregate_groundings.items():
# There are 3 possibilities here
# 1. All the entries in the list are scored in which case we
# get unique entries and sort them by score
if all([isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = []
for unique_value in {v[0] for v in values}:
scores = [v[1] for v in values if v[0] == unique_value]
best_groundings[ns].append((unique_value, max(scores)))
best_groundings[ns] = \
sorted(best_groundings[ns], key=lambda x: x[1],
reverse=True)
# 2. All the entries in the list are unscored in which case we
# get the highest frequency entry
elif all([not isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = max(set(values), key=values.count)
# 3. There is a mixture, which can happen when some entries were
# mapped with scores and others had no scores to begin with.
# In this case, we again pick the highest frequency non-scored
# entry assuming that the unmapped version is more reliable.
else:
unscored_vals = [v for v in values
if not isinstance(v, (tuple, list))]
best_groundings[ns] = max(set(unscored_vals),
key=unscored_vals.count)
return best_groundings
stmts_out = []
for stmt in stmts_in:
if not isinstance(stmt, (Complex, Conversion)):
surface_grounding(stmt)
stmts_out.append(stmt)
return stmts_out | python | def merge_groundings(stmts_in):
"""Gather and merge original grounding information from evidences.
Each Statement's evidences are traversed to find original grounding
information. These groundings are then merged into an overall consensus
grounding dict with as much detail as possible.
The current implementation is only applicable to Statements whose
concept/agent roles are fixed. Complexes, Associations and Conversions
cannot be handled correctly.
Parameters
----------
stmts_in : list[indra.statements.Statement]
A list of INDRA Statements whose groundings should be merged. These
Statements are meant to have been preassembled and potentially have
multiple pieces of evidence.
Returns
-------
stmts_out : list[indra.statements.Statement]
The list of Statements now with groundings merged at the Statement
level.
"""
def surface_grounding(stmt):
# Find the "best" grounding for a given concept and its evidences
# and surface that
for idx, concept in enumerate(stmt.agent_list()):
if concept is None:
continue
aggregate_groundings = {}
for ev in stmt.evidence:
if 'agents' in ev.annotations:
groundings = ev.annotations['agents']['raw_grounding'][idx]
for ns, value in groundings.items():
if ns not in aggregate_groundings:
aggregate_groundings[ns] = []
if isinstance(value, list):
aggregate_groundings[ns] += value
else:
aggregate_groundings[ns].append(value)
best_groundings = get_best_groundings(aggregate_groundings)
concept.db_refs = best_groundings
def get_best_groundings(aggregate_groundings):
best_groundings = {}
for ns, values in aggregate_groundings.items():
# There are 3 possibilities here
# 1. All the entries in the list are scored in which case we
# get unique entries and sort them by score
if all([isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = []
for unique_value in {v[0] for v in values}:
scores = [v[1] for v in values if v[0] == unique_value]
best_groundings[ns].append((unique_value, max(scores)))
best_groundings[ns] = \
sorted(best_groundings[ns], key=lambda x: x[1],
reverse=True)
# 2. All the entries in the list are unscored in which case we
# get the highest frequency entry
elif all([not isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = max(set(values), key=values.count)
# 3. There is a mixture, which can happen when some entries were
# mapped with scores and others had no scores to begin with.
# In this case, we again pick the highest frequency non-scored
# entry assuming that the unmapped version is more reliable.
else:
unscored_vals = [v for v in values
if not isinstance(v, (tuple, list))]
best_groundings[ns] = max(set(unscored_vals),
key=unscored_vals.count)
return best_groundings
stmts_out = []
for stmt in stmts_in:
if not isinstance(stmt, (Complex, Conversion)):
surface_grounding(stmt)
stmts_out.append(stmt)
return stmts_out | ['def', 'merge_groundings', '(', 'stmts_in', ')', ':', 'def', 'surface_grounding', '(', 'stmt', ')', ':', '# Find the "best" grounding for a given concept and its evidences', '# and surface that', 'for', 'idx', ',', 'concept', 'in', 'enumerate', '(', 'stmt', '.', 'agent_list', '(', ')', ')', ':', 'if', 'concept', 'is', 'None', ':', 'continue', 'aggregate_groundings', '=', '{', '}', 'for', 'ev', 'in', 'stmt', '.', 'evidence', ':', 'if', "'agents'", 'in', 'ev', '.', 'annotations', ':', 'groundings', '=', 'ev', '.', 'annotations', '[', "'agents'", ']', '[', "'raw_grounding'", ']', '[', 'idx', ']', 'for', 'ns', ',', 'value', 'in', 'groundings', '.', 'items', '(', ')', ':', 'if', 'ns', 'not', 'in', 'aggregate_groundings', ':', 'aggregate_groundings', '[', 'ns', ']', '=', '[', ']', 'if', 'isinstance', '(', 'value', ',', 'list', ')', ':', 'aggregate_groundings', '[', 'ns', ']', '+=', 'value', 'else', ':', 'aggregate_groundings', '[', 'ns', ']', '.', 'append', '(', 'value', ')', 'best_groundings', '=', 'get_best_groundings', '(', 'aggregate_groundings', ')', 'concept', '.', 'db_refs', '=', 'best_groundings', 'def', 'get_best_groundings', '(', 'aggregate_groundings', ')', ':', 'best_groundings', '=', '{', '}', 'for', 'ns', ',', 'values', 'in', 'aggregate_groundings', '.', 'items', '(', ')', ':', '# There are 3 possibilities here', '# 1. All the entries in the list are scored in which case we', '# get unique entries and sort them by score', 'if', 'all', '(', '[', 'isinstance', '(', 'v', ',', '(', 'tuple', ',', 'list', ')', ')', 'for', 'v', 'in', 'values', ']', ')', ':', 'best_groundings', '[', 'ns', ']', '=', '[', ']', 'for', 'unique_value', 'in', '{', 'v', '[', '0', ']', 'for', 'v', 'in', 'values', '}', ':', 'scores', '=', '[', 'v', '[', '1', ']', 'for', 'v', 'in', 'values', 'if', 'v', '[', '0', ']', '==', 'unique_value', ']', 'best_groundings', '[', 'ns', ']', '.', 'append', '(', '(', 'unique_value', ',', 'max', '(', 'scores', ')', ')', ')', 'best_groundings', '[', 'ns', ']', '=', 'sorted', '(', 'best_groundings', '[', 'ns', ']', ',', 'key', '=', 'lambda', 'x', ':', 'x', '[', '1', ']', ',', 'reverse', '=', 'True', ')', '# 2. All the entries in the list are unscored in which case we', '# get the highest frequency entry', 'elif', 'all', '(', '[', 'not', 'isinstance', '(', 'v', ',', '(', 'tuple', ',', 'list', ')', ')', 'for', 'v', 'in', 'values', ']', ')', ':', 'best_groundings', '[', 'ns', ']', '=', 'max', '(', 'set', '(', 'values', ')', ',', 'key', '=', 'values', '.', 'count', ')', '# 3. There is a mixture, which can happen when some entries were', '# mapped with scores and others had no scores to begin with.', '# In this case, we again pick the highest frequency non-scored', '# entry assuming that the unmapped version is more reliable.', 'else', ':', 'unscored_vals', '=', '[', 'v', 'for', 'v', 'in', 'values', 'if', 'not', 'isinstance', '(', 'v', ',', '(', 'tuple', ',', 'list', ')', ')', ']', 'best_groundings', '[', 'ns', ']', '=', 'max', '(', 'set', '(', 'unscored_vals', ')', ',', 'key', '=', 'unscored_vals', '.', 'count', ')', 'return', 'best_groundings', 'stmts_out', '=', '[', ']', 'for', 'stmt', 'in', 'stmts_in', ':', 'if', 'not', 'isinstance', '(', 'stmt', ',', '(', 'Complex', ',', 'Conversion', ')', ')', ':', 'surface_grounding', '(', 'stmt', ')', 'stmts_out', '.', 'append', '(', 'stmt', ')', 'return', 'stmts_out'] | Gather and merge original grounding information from evidences.
Each Statement's evidences are traversed to find original grounding
information. These groundings are then merged into an overall consensus
grounding dict with as much detail as possible.
The current implementation is only applicable to Statements whose
concept/agent roles are fixed. Complexes, Associations and Conversions
cannot be handled correctly.
Parameters
----------
stmts_in : list[indra.statements.Statement]
A list of INDRA Statements whose groundings should be merged. These
Statements are meant to have been preassembled and potentially have
multiple pieces of evidence.
Returns
-------
stmts_out : list[indra.statements.Statement]
The list of Statements now with groundings merged at the Statement
level. | ['Gather', 'and', 'merge', 'original', 'grounding', 'information', 'from', 'evidences', '.'] | train | https://github.com/sorgerlab/indra/blob/79a70415832c5702d7a820c7c9ccc8e25010124b/indra/tools/assemble_corpus.py#L122-L201 |
163 | openstack/networking-cisco | networking_cisco/apps/saf/server/services/firewall/native/fw_mgr.py | FwMgr.network_sub_create_notif | def network_sub_create_notif(self, tenant_id, tenant_name, cidr):
"""Network create notification. """
if not self.fw_init:
return
self.network_create_notif(tenant_id, tenant_name, cidr) | python | def network_sub_create_notif(self, tenant_id, tenant_name, cidr):
"""Network create notification. """
if not self.fw_init:
return
self.network_create_notif(tenant_id, tenant_name, cidr) | ['def', 'network_sub_create_notif', '(', 'self', ',', 'tenant_id', ',', 'tenant_name', ',', 'cidr', ')', ':', 'if', 'not', 'self', '.', 'fw_init', ':', 'return', 'self', '.', 'network_create_notif', '(', 'tenant_id', ',', 'tenant_name', ',', 'cidr', ')'] | Network create notification. | ['Network', 'create', 'notification', '.'] | train | https://github.com/openstack/networking-cisco/blob/aa58a30aec25b86f9aa5952b0863045975debfa9/networking_cisco/apps/saf/server/services/firewall/native/fw_mgr.py#L342-L346 |
164 | tjvr/kurt | kurt/plugin.py | Kurt.block_by_command | def block_by_command(cls, command):
"""Return the block with the given :attr:`command`.
Returns None if the block is not found.
"""
for block in cls.blocks:
if block.has_command(command):
return block | python | def block_by_command(cls, command):
"""Return the block with the given :attr:`command`.
Returns None if the block is not found.
"""
for block in cls.blocks:
if block.has_command(command):
return block | ['def', 'block_by_command', '(', 'cls', ',', 'command', ')', ':', 'for', 'block', 'in', 'cls', '.', 'blocks', ':', 'if', 'block', '.', 'has_command', '(', 'command', ')', ':', 'return', 'block'] | Return the block with the given :attr:`command`.
Returns None if the block is not found. | ['Return', 'the', 'block', 'with', 'the', 'given', ':', 'attr', ':', 'command', '.'] | train | https://github.com/tjvr/kurt/blob/fcccd80cae11dc233f6dd02b40ec9a388c62f259/kurt/plugin.py#L226-L234 |
165 | gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extract dictionaries of coefficients specific to required
# intensity measure type and for PGA
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
# compute median pga on rock (vs30=1100), needed for site response
# term calculation
# For spectral accelerations at periods between 0.0 and 0.25 s, Sa (T)
# cannot be less than PGA on soil, therefore if the IMT is in this
# period range it is necessary to calculate PGA on soil
if imt.name == 'SA' and imt.period > 0.0 and imt.period < 0.25:
get_pga_site = True
else:
get_pga_site = False
pga1100, pga_site = self._compute_imt1100(C_PGA,
sites,
rup,
dists,
get_pga_site)
# Get the median ground motion
mean = (self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_shallow_site_response(C, sites, pga1100) +
self._compute_basin_response_term(C, sites.z2pt5))
# If it is necessary to ensure that Sa(T) >= PGA (see previous comment)
if get_pga_site:
idx = mean < np.log(pga_site)
mean[idx] = np.log(pga_site[idx])
stddevs = self._get_stddevs(C,
sites,
pga1100,
C_PGA['s_lny'],
stddev_types)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extract dictionaries of coefficients specific to required
# intensity measure type and for PGA
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
# compute median pga on rock (vs30=1100), needed for site response
# term calculation
# For spectral accelerations at periods between 0.0 and 0.25 s, Sa (T)
# cannot be less than PGA on soil, therefore if the IMT is in this
# period range it is necessary to calculate PGA on soil
if imt.name == 'SA' and imt.period > 0.0 and imt.period < 0.25:
get_pga_site = True
else:
get_pga_site = False
pga1100, pga_site = self._compute_imt1100(C_PGA,
sites,
rup,
dists,
get_pga_site)
# Get the median ground motion
mean = (self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_shallow_site_response(C, sites, pga1100) +
self._compute_basin_response_term(C, sites.z2pt5))
# If it is necessary to ensure that Sa(T) >= PGA (see previous comment)
if get_pga_site:
idx = mean < np.log(pga_site)
mean[idx] = np.log(pga_site[idx])
stddevs = self._get_stddevs(C,
sites,
pga1100,
C_PGA['s_lny'],
stddev_types)
return mean, stddevs | ['def', 'get_mean_and_stddevs', '(', 'self', ',', 'sites', ',', 'rup', ',', 'dists', ',', 'imt', ',', 'stddev_types', ')', ':', '# extract dictionaries of coefficients specific to required', '# intensity measure type and for PGA', 'C', '=', 'self', '.', 'COEFFS', '[', 'imt', ']', 'C_PGA', '=', 'self', '.', 'COEFFS', '[', 'PGA', '(', ')', ']', '# compute median pga on rock (vs30=1100), needed for site response', '# term calculation', '# For spectral accelerations at periods between 0.0 and 0.25 s, Sa (T)', '# cannot be less than PGA on soil, therefore if the IMT is in this', '# period range it is necessary to calculate PGA on soil', 'if', 'imt', '.', 'name', '==', "'SA'", 'and', 'imt', '.', 'period', '>', '0.0', 'and', 'imt', '.', 'period', '<', '0.25', ':', 'get_pga_site', '=', 'True', 'else', ':', 'get_pga_site', '=', 'False', 'pga1100', ',', 'pga_site', '=', 'self', '.', '_compute_imt1100', '(', 'C_PGA', ',', 'sites', ',', 'rup', ',', 'dists', ',', 'get_pga_site', ')', '# Get the median ground motion', 'mean', '=', '(', 'self', '.', '_compute_magnitude_term', '(', 'C', ',', 'rup', '.', 'mag', ')', '+', 'self', '.', '_compute_distance_term', '(', 'C', ',', 'rup', ',', 'dists', ')', '+', 'self', '.', '_compute_style_of_faulting_term', '(', 'C', ',', 'rup', ')', '+', 'self', '.', '_compute_hanging_wall_term', '(', 'C', ',', 'rup', ',', 'dists', ')', '+', 'self', '.', '_compute_shallow_site_response', '(', 'C', ',', 'sites', ',', 'pga1100', ')', '+', 'self', '.', '_compute_basin_response_term', '(', 'C', ',', 'sites', '.', 'z2pt5', ')', ')', '# If it is necessary to ensure that Sa(T) >= PGA (see previous comment)', 'if', 'get_pga_site', ':', 'idx', '=', 'mean', '<', 'np', '.', 'log', '(', 'pga_site', ')', 'mean', '[', 'idx', ']', '=', 'np', '.', 'log', '(', 'pga_site', '[', 'idx', ']', ')', 'stddevs', '=', 'self', '.', '_get_stddevs', '(', 'C', ',', 'sites', ',', 'pga1100', ',', 'C_PGA', '[', "'s_lny'", ']', ',', 'stddev_types', ')', 'return', 'mean', ',', 'stddevs'] | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | ['See', ':', 'meth', ':', 'superclass', 'method', '<', '.', 'base', '.', 'GroundShakingIntensityModel', '.', 'get_mean_and_stddevs', '>', 'for', 'spec', 'of', 'input', 'and', 'result', 'values', '.'] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L83-L127 |
166 | elastic/elasticsearch-py | elasticsearch/client/xpack/security.py | SecurityClient.put_user | def put_user(self, username, body, params=None):
"""
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html>`_
:arg username: The username of the User
:arg body: The user to add
:arg refresh: If `true` (the default) then refresh the affected shards
to make this operation visible to search, if `wait_for` then wait
for a refresh to make this operation visible to search, if `false`
then do nothing with refreshes., valid choices are: 'true', 'false',
'wait_for'
"""
for param in (username, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request(
"PUT", _make_path("_security", "user", username), params=params, body=body
) | python | def put_user(self, username, body, params=None):
"""
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html>`_
:arg username: The username of the User
:arg body: The user to add
:arg refresh: If `true` (the default) then refresh the affected shards
to make this operation visible to search, if `wait_for` then wait
for a refresh to make this operation visible to search, if `false`
then do nothing with refreshes., valid choices are: 'true', 'false',
'wait_for'
"""
for param in (username, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request(
"PUT", _make_path("_security", "user", username), params=params, body=body
) | ['def', 'put_user', '(', 'self', ',', 'username', ',', 'body', ',', 'params', '=', 'None', ')', ':', 'for', 'param', 'in', '(', 'username', ',', 'body', ')', ':', 'if', 'param', 'in', 'SKIP_IN_PATH', ':', 'raise', 'ValueError', '(', '"Empty value passed for a required argument."', ')', 'return', 'self', '.', 'transport', '.', 'perform_request', '(', '"PUT"', ',', '_make_path', '(', '"_security"', ',', '"user"', ',', 'username', ')', ',', 'params', '=', 'params', ',', 'body', '=', 'body', ')'] | `<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html>`_
:arg username: The username of the User
:arg body: The user to add
:arg refresh: If `true` (the default) then refresh the affected shards
to make this operation visible to search, if `wait_for` then wait
for a refresh to make this operation visible to search, if `false`
then do nothing with refreshes., valid choices are: 'true', 'false',
'wait_for' | ['<https', ':', '//', 'www', '.', 'elastic', '.', 'co', '/', 'guide', '/', 'en', '/', 'elasticsearch', '/', 'reference', '/', 'current', '/', 'security', '-', 'api', '-', 'put', '-', 'user', '.', 'html', '>', '_'] | train | https://github.com/elastic/elasticsearch-py/blob/2aab285c8f506f3863cbdaba3c90a685c510ba00/elasticsearch/client/xpack/security.py#L386-L403 |
167 | bhmm/bhmm | bhmm/_external/sklearn/mixture/gmm.py | _covar_mstep_spherical | def _covar_mstep_spherical(*args):
"""Performing the covariance M step for spherical cases"""
cv = _covar_mstep_diag(*args)
return np.tile(cv.mean(axis=1)[:, np.newaxis], (1, cv.shape[1])) | python | def _covar_mstep_spherical(*args):
"""Performing the covariance M step for spherical cases"""
cv = _covar_mstep_diag(*args)
return np.tile(cv.mean(axis=1)[:, np.newaxis], (1, cv.shape[1])) | ['def', '_covar_mstep_spherical', '(', '*', 'args', ')', ':', 'cv', '=', '_covar_mstep_diag', '(', '*', 'args', ')', 'return', 'np', '.', 'tile', '(', 'cv', '.', 'mean', '(', 'axis', '=', '1', ')', '[', ':', ',', 'np', '.', 'newaxis', ']', ',', '(', '1', ',', 'cv', '.', 'shape', '[', '1', ']', ')', ')'] | Performing the covariance M step for spherical cases | ['Performing', 'the', 'covariance', 'M', 'step', 'for', 'spherical', 'cases'] | train | https://github.com/bhmm/bhmm/blob/9804d18c2ddb684fb4d90b544cc209617a89ca9a/bhmm/_external/sklearn/mixture/gmm.py#L693-L696 |
168 | juju/charm-helpers | charmhelpers/core/kernel.py | modprobe | def modprobe(module, persist=True):
"""Load a kernel module and configure for auto-load on reboot."""
cmd = ['modprobe', module]
log('Loading kernel module %s' % module, level=INFO)
subprocess.check_call(cmd)
if persist:
persistent_modprobe(module) | python | def modprobe(module, persist=True):
"""Load a kernel module and configure for auto-load on reboot."""
cmd = ['modprobe', module]
log('Loading kernel module %s' % module, level=INFO)
subprocess.check_call(cmd)
if persist:
persistent_modprobe(module) | ['def', 'modprobe', '(', 'module', ',', 'persist', '=', 'True', ')', ':', 'cmd', '=', '[', "'modprobe'", ',', 'module', ']', 'log', '(', "'Loading kernel module %s'", '%', 'module', ',', 'level', '=', 'INFO', ')', 'subprocess', '.', 'check_call', '(', 'cmd', ')', 'if', 'persist', ':', 'persistent_modprobe', '(', 'module', ')'] | Load a kernel module and configure for auto-load on reboot. | ['Load', 'a', 'kernel', 'module', 'and', 'configure', 'for', 'auto', '-', 'load', 'on', 'reboot', '.'] | train | https://github.com/juju/charm-helpers/blob/aa785c40c3b7a8c69dbfbc7921d6b9f30142e171/charmhelpers/core/kernel.py#L42-L50 |
169 | saltstack/salt | salt/modules/kubernetesmod.py | create_namespace | def create_namespace(
name,
**kwargs):
'''
Creates a namespace with the specified name.
CLI Example:
salt '*' kubernetes.create_namespace salt
salt '*' kubernetes.create_namespace name=salt
'''
meta_obj = kubernetes.client.V1ObjectMeta(name=name)
body = kubernetes.client.V1Namespace(metadata=meta_obj)
body.metadata.name = name
cfg = _setup_conn(**kwargs)
try:
api_instance = kubernetes.client.CoreV1Api()
api_response = api_instance.create_namespace(body)
return api_response.to_dict()
except (ApiException, HTTPError) as exc:
if isinstance(exc, ApiException) and exc.status == 404:
return None
else:
log.exception(
'Exception when calling '
'CoreV1Api->create_namespace'
)
raise CommandExecutionError(exc)
finally:
_cleanup(**cfg) | python | def create_namespace(
name,
**kwargs):
'''
Creates a namespace with the specified name.
CLI Example:
salt '*' kubernetes.create_namespace salt
salt '*' kubernetes.create_namespace name=salt
'''
meta_obj = kubernetes.client.V1ObjectMeta(name=name)
body = kubernetes.client.V1Namespace(metadata=meta_obj)
body.metadata.name = name
cfg = _setup_conn(**kwargs)
try:
api_instance = kubernetes.client.CoreV1Api()
api_response = api_instance.create_namespace(body)
return api_response.to_dict()
except (ApiException, HTTPError) as exc:
if isinstance(exc, ApiException) and exc.status == 404:
return None
else:
log.exception(
'Exception when calling '
'CoreV1Api->create_namespace'
)
raise CommandExecutionError(exc)
finally:
_cleanup(**cfg) | ['def', 'create_namespace', '(', 'name', ',', '*', '*', 'kwargs', ')', ':', 'meta_obj', '=', 'kubernetes', '.', 'client', '.', 'V1ObjectMeta', '(', 'name', '=', 'name', ')', 'body', '=', 'kubernetes', '.', 'client', '.', 'V1Namespace', '(', 'metadata', '=', 'meta_obj', ')', 'body', '.', 'metadata', '.', 'name', '=', 'name', 'cfg', '=', '_setup_conn', '(', '*', '*', 'kwargs', ')', 'try', ':', 'api_instance', '=', 'kubernetes', '.', 'client', '.', 'CoreV1Api', '(', ')', 'api_response', '=', 'api_instance', '.', 'create_namespace', '(', 'body', ')', 'return', 'api_response', '.', 'to_dict', '(', ')', 'except', '(', 'ApiException', ',', 'HTTPError', ')', 'as', 'exc', ':', 'if', 'isinstance', '(', 'exc', ',', 'ApiException', ')', 'and', 'exc', '.', 'status', '==', '404', ':', 'return', 'None', 'else', ':', 'log', '.', 'exception', '(', "'Exception when calling '", "'CoreV1Api->create_namespace'", ')', 'raise', 'CommandExecutionError', '(', 'exc', ')', 'finally', ':', '_cleanup', '(', '*', '*', 'cfg', ')'] | Creates a namespace with the specified name.
CLI Example:
salt '*' kubernetes.create_namespace salt
salt '*' kubernetes.create_namespace name=salt | ['Creates', 'a', 'namespace', 'with', 'the', 'specified', 'name', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/kubernetesmod.py#L1199-L1231 |
170 | chrisjsewell/jsonextended | jsonextended/edict.py | split_lists | def split_lists(d, split_keys, new_name='split',
check_length=True, deepcopy=True):
"""split_lists key:list pairs into dicts for each item in the lists
NB: will only split if all split_keys are present
Parameters
----------
d : dict
split_keys : list
keys to split
new_name : str
top level key for split items
check_length : bool
if true, raise error if any lists are of a different length
deepcopy: bool
deepcopy values
Examples
--------
>>> from pprint import pprint
>>> d = {'path_key':{'x':[1,2],'y':[3,4],'a':1}}
>>> new_d = split_lists(d,['x','y'])
>>> pprint(new_d)
{'path_key': {'a': 1, 'split': [{'x': 1, 'y': 3}, {'x': 2, 'y': 4}]}}
>>> split_lists(d,['x','a'])
Traceback (most recent call last):
...
ValueError: "a" data at the following path is not a list ('path_key',)
>>> d2 = {'path_key':{'x':[1,7],'y':[3,4,5]}}
>>> split_lists(d2,['x','y'])
Traceback (most recent call last):
...
ValueError: lists at the following path do not have the same size ('path_key',)
""" # noqa: E501
flattened = flatten2d(d)
new_d = {}
for key, value in flattened.items():
if set(split_keys).issubset(value.keys()):
# combine_d = {}
combine_d = []
sub_d = {}
length = None
for subkey, subvalue in value.items():
if subkey in split_keys:
if not isinstance(subvalue, list):
raise ValueError(
'"{0}" data at the following path is not a list '
'{1}'.format(subkey, key))
if check_length and length is not None:
if len(subvalue) != length:
raise ValueError(
'lists at the following path '
'do not have the same size {0}'.format(key))
if length is None:
combine_d = [{subkey: v} for v in subvalue]
else:
for item, val in zip(combine_d, subvalue):
item[subkey] = val
length = len(subvalue)
# new_combine = {k:{subkey:v}
# for k,v in enumerate(subvalue)}
# combine_d = merge([combine_d,new_combine])
else:
sub_d[subkey] = subvalue
try:
new_d[key] = merge([sub_d, {new_name: combine_d}])
except ValueError:
raise ValueError(
'split data key: {0}, already exists at '
'this level for {1}'.format(new_name, key))
else:
new_d[key] = value
return unflatten(new_d, deepcopy=deepcopy) | python | def split_lists(d, split_keys, new_name='split',
check_length=True, deepcopy=True):
"""split_lists key:list pairs into dicts for each item in the lists
NB: will only split if all split_keys are present
Parameters
----------
d : dict
split_keys : list
keys to split
new_name : str
top level key for split items
check_length : bool
if true, raise error if any lists are of a different length
deepcopy: bool
deepcopy values
Examples
--------
>>> from pprint import pprint
>>> d = {'path_key':{'x':[1,2],'y':[3,4],'a':1}}
>>> new_d = split_lists(d,['x','y'])
>>> pprint(new_d)
{'path_key': {'a': 1, 'split': [{'x': 1, 'y': 3}, {'x': 2, 'y': 4}]}}
>>> split_lists(d,['x','a'])
Traceback (most recent call last):
...
ValueError: "a" data at the following path is not a list ('path_key',)
>>> d2 = {'path_key':{'x':[1,7],'y':[3,4,5]}}
>>> split_lists(d2,['x','y'])
Traceback (most recent call last):
...
ValueError: lists at the following path do not have the same size ('path_key',)
""" # noqa: E501
flattened = flatten2d(d)
new_d = {}
for key, value in flattened.items():
if set(split_keys).issubset(value.keys()):
# combine_d = {}
combine_d = []
sub_d = {}
length = None
for subkey, subvalue in value.items():
if subkey in split_keys:
if not isinstance(subvalue, list):
raise ValueError(
'"{0}" data at the following path is not a list '
'{1}'.format(subkey, key))
if check_length and length is not None:
if len(subvalue) != length:
raise ValueError(
'lists at the following path '
'do not have the same size {0}'.format(key))
if length is None:
combine_d = [{subkey: v} for v in subvalue]
else:
for item, val in zip(combine_d, subvalue):
item[subkey] = val
length = len(subvalue)
# new_combine = {k:{subkey:v}
# for k,v in enumerate(subvalue)}
# combine_d = merge([combine_d,new_combine])
else:
sub_d[subkey] = subvalue
try:
new_d[key] = merge([sub_d, {new_name: combine_d}])
except ValueError:
raise ValueError(
'split data key: {0}, already exists at '
'this level for {1}'.format(new_name, key))
else:
new_d[key] = value
return unflatten(new_d, deepcopy=deepcopy) | ['def', 'split_lists', '(', 'd', ',', 'split_keys', ',', 'new_name', '=', "'split'", ',', 'check_length', '=', 'True', ',', 'deepcopy', '=', 'True', ')', ':', '# noqa: E501', 'flattened', '=', 'flatten2d', '(', 'd', ')', 'new_d', '=', '{', '}', 'for', 'key', ',', 'value', 'in', 'flattened', '.', 'items', '(', ')', ':', 'if', 'set', '(', 'split_keys', ')', '.', 'issubset', '(', 'value', '.', 'keys', '(', ')', ')', ':', '# combine_d = {}', 'combine_d', '=', '[', ']', 'sub_d', '=', '{', '}', 'length', '=', 'None', 'for', 'subkey', ',', 'subvalue', 'in', 'value', '.', 'items', '(', ')', ':', 'if', 'subkey', 'in', 'split_keys', ':', 'if', 'not', 'isinstance', '(', 'subvalue', ',', 'list', ')', ':', 'raise', 'ValueError', '(', '\'"{0}" data at the following path is not a list \'', "'{1}'", '.', 'format', '(', 'subkey', ',', 'key', ')', ')', 'if', 'check_length', 'and', 'length', 'is', 'not', 'None', ':', 'if', 'len', '(', 'subvalue', ')', '!=', 'length', ':', 'raise', 'ValueError', '(', "'lists at the following path '", "'do not have the same size {0}'", '.', 'format', '(', 'key', ')', ')', 'if', 'length', 'is', 'None', ':', 'combine_d', '=', '[', '{', 'subkey', ':', 'v', '}', 'for', 'v', 'in', 'subvalue', ']', 'else', ':', 'for', 'item', ',', 'val', 'in', 'zip', '(', 'combine_d', ',', 'subvalue', ')', ':', 'item', '[', 'subkey', ']', '=', 'val', 'length', '=', 'len', '(', 'subvalue', ')', '# new_combine = {k:{subkey:v}', '# for k,v in enumerate(subvalue)}', '# combine_d = merge([combine_d,new_combine])', 'else', ':', 'sub_d', '[', 'subkey', ']', '=', 'subvalue', 'try', ':', 'new_d', '[', 'key', ']', '=', 'merge', '(', '[', 'sub_d', ',', '{', 'new_name', ':', 'combine_d', '}', ']', ')', 'except', 'ValueError', ':', 'raise', 'ValueError', '(', "'split data key: {0}, already exists at '", "'this level for {1}'", '.', 'format', '(', 'new_name', ',', 'key', ')', ')', 'else', ':', 'new_d', '[', 'key', ']', '=', 'value', 'return', 'unflatten', '(', 'new_d', ',', 'deepcopy', '=', 'deepcopy', ')'] | split_lists key:list pairs into dicts for each item in the lists
NB: will only split if all split_keys are present
Parameters
----------
d : dict
split_keys : list
keys to split
new_name : str
top level key for split items
check_length : bool
if true, raise error if any lists are of a different length
deepcopy: bool
deepcopy values
Examples
--------
>>> from pprint import pprint
>>> d = {'path_key':{'x':[1,2],'y':[3,4],'a':1}}
>>> new_d = split_lists(d,['x','y'])
>>> pprint(new_d)
{'path_key': {'a': 1, 'split': [{'x': 1, 'y': 3}, {'x': 2, 'y': 4}]}}
>>> split_lists(d,['x','a'])
Traceback (most recent call last):
...
ValueError: "a" data at the following path is not a list ('path_key',)
>>> d2 = {'path_key':{'x':[1,7],'y':[3,4,5]}}
>>> split_lists(d2,['x','y'])
Traceback (most recent call last):
...
ValueError: lists at the following path do not have the same size ('path_key',) | ['split_lists', 'key', ':', 'list', 'pairs', 'into', 'dicts', 'for', 'each', 'item', 'in', 'the', 'lists', 'NB', ':', 'will', 'only', 'split', 'if', 'all', 'split_keys', 'are', 'present'] | train | https://github.com/chrisjsewell/jsonextended/blob/c3a7a880cc09789b3c61204265dcbb127be76c8a/jsonextended/edict.py#L1588-L1670 |
171 | mitsei/dlkit | dlkit/handcar/utilities.py | BankHierarchyUrls.children | def children(self, alias, bank_id):
"""
URL for getting or setting child relationships for the specified bank
:param alias:
:param bank_id:
:return:
"""
return self._root + self._safe_alias(alias) + '/child/ids/' + str(bank_id) | python | def children(self, alias, bank_id):
"""
URL for getting or setting child relationships for the specified bank
:param alias:
:param bank_id:
:return:
"""
return self._root + self._safe_alias(alias) + '/child/ids/' + str(bank_id) | ['def', 'children', '(', 'self', ',', 'alias', ',', 'bank_id', ')', ':', 'return', 'self', '.', '_root', '+', 'self', '.', '_safe_alias', '(', 'alias', ')', '+', "'/child/ids/'", '+', 'str', '(', 'bank_id', ')'] | URL for getting or setting child relationships for the specified bank
:param alias:
:param bank_id:
:return: | ['URL', 'for', 'getting', 'or', 'setting', 'child', 'relationships', 'for', 'the', 'specified', 'bank', ':', 'param', 'alias', ':', ':', 'param', 'bank_id', ':', ':', 'return', ':'] | train | https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/utilities.py#L47-L54 |
172 | leancloud/python-sdk | leancloud/user.py | User.unlink_from | def unlink_from(self, provider):
'''
解绑特定第三方平台
'''
if type(provider) != str:
raise TypeError('input should be a string')
self.link_with(provider, None)
# self._sync_auth_data(provider)
return self | python | def unlink_from(self, provider):
'''
解绑特定第三方平台
'''
if type(provider) != str:
raise TypeError('input should be a string')
self.link_with(provider, None)
# self._sync_auth_data(provider)
return self | ['def', 'unlink_from', '(', 'self', ',', 'provider', ')', ':', 'if', 'type', '(', 'provider', ')', '!=', 'str', ':', 'raise', 'TypeError', '(', "'input should be a string'", ')', 'self', '.', 'link_with', '(', 'provider', ',', 'None', ')', '# self._sync_auth_data(provider)', 'return', 'self'] | 解绑特定第三方平台 | ['解绑特定第三方平台'] | train | https://github.com/leancloud/python-sdk/blob/fea3240257ce65e6a32c7312a5cee1f94a51a587/leancloud/user.py#L210-L218 |
173 | google/grr | grr/core/grr_response_core/lib/fingerprint.py | Fingerprinter.EvalGeneric | def EvalGeneric(self, hashers=None):
"""Causes the entire file to be hashed by the given hash functions.
This sets up a 'finger' for fingerprinting, where the entire file
is passed through a pre-defined (or user defined) set of hash functions.
Args:
hashers: An iterable of hash classes (e.g. out of hashlib) which will
be instantiated for use. If hashers is not provided, or is
provided as 'None', the default hashers will get used. To
invoke this without hashers, provide an empty list.
Returns:
Always True, as all files are 'generic' files.
"""
if hashers is None:
hashers = Fingerprinter.GENERIC_HASH_CLASSES
hashfuncs = [x() for x in hashers]
finger = Finger(hashfuncs, [Range(0, self.filelength)], {'name': 'generic'})
self.fingers.append(finger)
return True | python | def EvalGeneric(self, hashers=None):
"""Causes the entire file to be hashed by the given hash functions.
This sets up a 'finger' for fingerprinting, where the entire file
is passed through a pre-defined (or user defined) set of hash functions.
Args:
hashers: An iterable of hash classes (e.g. out of hashlib) which will
be instantiated for use. If hashers is not provided, or is
provided as 'None', the default hashers will get used. To
invoke this without hashers, provide an empty list.
Returns:
Always True, as all files are 'generic' files.
"""
if hashers is None:
hashers = Fingerprinter.GENERIC_HASH_CLASSES
hashfuncs = [x() for x in hashers]
finger = Finger(hashfuncs, [Range(0, self.filelength)], {'name': 'generic'})
self.fingers.append(finger)
return True | ['def', 'EvalGeneric', '(', 'self', ',', 'hashers', '=', 'None', ')', ':', 'if', 'hashers', 'is', 'None', ':', 'hashers', '=', 'Fingerprinter', '.', 'GENERIC_HASH_CLASSES', 'hashfuncs', '=', '[', 'x', '(', ')', 'for', 'x', 'in', 'hashers', ']', 'finger', '=', 'Finger', '(', 'hashfuncs', ',', '[', 'Range', '(', '0', ',', 'self', '.', 'filelength', ')', ']', ',', '{', "'name'", ':', "'generic'", '}', ')', 'self', '.', 'fingers', '.', 'append', '(', 'finger', ')', 'return', 'True'] | Causes the entire file to be hashed by the given hash functions.
This sets up a 'finger' for fingerprinting, where the entire file
is passed through a pre-defined (or user defined) set of hash functions.
Args:
hashers: An iterable of hash classes (e.g. out of hashlib) which will
be instantiated for use. If hashers is not provided, or is
provided as 'None', the default hashers will get used. To
invoke this without hashers, provide an empty list.
Returns:
Always True, as all files are 'generic' files. | ['Causes', 'the', 'entire', 'file', 'to', 'be', 'hashed', 'by', 'the', 'given', 'hash', 'functions', '.'] | train | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/core/grr_response_core/lib/fingerprint.py#L241-L261 |
174 | dropbox/pyannotate | pyannotate_tools/fixes/fix_annotate_json.py | count_args | def count_args(node, results):
# type: (Node, Dict[str, Base]) -> Tuple[int, bool, bool, bool]
"""Count arguments and check for self and *args, **kwds.
Return (selfish, count, star, starstar) where:
- count is total number of args (including *args, **kwds)
- selfish is True if the initial arg is named 'self' or 'cls'
- star is True iff *args is found
- starstar is True iff **kwds is found
"""
count = 0
selfish = False
star = False
starstar = False
args = results.get('args')
if isinstance(args, Node):
children = args.children
elif isinstance(args, Leaf):
children = [args]
else:
children = []
# Interpret children according to the following grammar:
# (('*'|'**')? NAME ['=' expr] ','?)*
skip = False
previous_token_is_star = False
for child in children:
if skip:
skip = False
elif isinstance(child, Leaf):
# A single '*' indicates the rest of the arguments are keyword only
# and shouldn't be counted as a `*`.
if child.type == token.STAR:
previous_token_is_star = True
elif child.type == token.DOUBLESTAR:
starstar = True
elif child.type == token.NAME:
if count == 0:
if child.value in ('self', 'cls'):
selfish = True
count += 1
if previous_token_is_star:
star = True
elif child.type == token.EQUAL:
skip = True
if child.type != token.STAR:
previous_token_is_star = False
return count, selfish, star, starstar | python | def count_args(node, results):
# type: (Node, Dict[str, Base]) -> Tuple[int, bool, bool, bool]
"""Count arguments and check for self and *args, **kwds.
Return (selfish, count, star, starstar) where:
- count is total number of args (including *args, **kwds)
- selfish is True if the initial arg is named 'self' or 'cls'
- star is True iff *args is found
- starstar is True iff **kwds is found
"""
count = 0
selfish = False
star = False
starstar = False
args = results.get('args')
if isinstance(args, Node):
children = args.children
elif isinstance(args, Leaf):
children = [args]
else:
children = []
# Interpret children according to the following grammar:
# (('*'|'**')? NAME ['=' expr] ','?)*
skip = False
previous_token_is_star = False
for child in children:
if skip:
skip = False
elif isinstance(child, Leaf):
# A single '*' indicates the rest of the arguments are keyword only
# and shouldn't be counted as a `*`.
if child.type == token.STAR:
previous_token_is_star = True
elif child.type == token.DOUBLESTAR:
starstar = True
elif child.type == token.NAME:
if count == 0:
if child.value in ('self', 'cls'):
selfish = True
count += 1
if previous_token_is_star:
star = True
elif child.type == token.EQUAL:
skip = True
if child.type != token.STAR:
previous_token_is_star = False
return count, selfish, star, starstar | ['def', 'count_args', '(', 'node', ',', 'results', ')', ':', '# type: (Node, Dict[str, Base]) -> Tuple[int, bool, bool, bool]', 'count', '=', '0', 'selfish', '=', 'False', 'star', '=', 'False', 'starstar', '=', 'False', 'args', '=', 'results', '.', 'get', '(', "'args'", ')', 'if', 'isinstance', '(', 'args', ',', 'Node', ')', ':', 'children', '=', 'args', '.', 'children', 'elif', 'isinstance', '(', 'args', ',', 'Leaf', ')', ':', 'children', '=', '[', 'args', ']', 'else', ':', 'children', '=', '[', ']', '# Interpret children according to the following grammar:', "# (('*'|'**')? NAME ['=' expr] ','?)*", 'skip', '=', 'False', 'previous_token_is_star', '=', 'False', 'for', 'child', 'in', 'children', ':', 'if', 'skip', ':', 'skip', '=', 'False', 'elif', 'isinstance', '(', 'child', ',', 'Leaf', ')', ':', "# A single '*' indicates the rest of the arguments are keyword only", "# and shouldn't be counted as a `*`.", 'if', 'child', '.', 'type', '==', 'token', '.', 'STAR', ':', 'previous_token_is_star', '=', 'True', 'elif', 'child', '.', 'type', '==', 'token', '.', 'DOUBLESTAR', ':', 'starstar', '=', 'True', 'elif', 'child', '.', 'type', '==', 'token', '.', 'NAME', ':', 'if', 'count', '==', '0', ':', 'if', 'child', '.', 'value', 'in', '(', "'self'", ',', "'cls'", ')', ':', 'selfish', '=', 'True', 'count', '+=', '1', 'if', 'previous_token_is_star', ':', 'star', '=', 'True', 'elif', 'child', '.', 'type', '==', 'token', '.', 'EQUAL', ':', 'skip', '=', 'True', 'if', 'child', '.', 'type', '!=', 'token', '.', 'STAR', ':', 'previous_token_is_star', '=', 'False', 'return', 'count', ',', 'selfish', ',', 'star', ',', 'starstar'] | Count arguments and check for self and *args, **kwds.
Return (selfish, count, star, starstar) where:
- count is total number of args (including *args, **kwds)
- selfish is True if the initial arg is named 'self' or 'cls'
- star is True iff *args is found
- starstar is True iff **kwds is found | ['Count', 'arguments', 'and', 'check', 'for', 'self', 'and', '*', 'args', '**', 'kwds', '.'] | train | https://github.com/dropbox/pyannotate/blob/d128c76b8a86f208e5c78716f2a917003650cebc/pyannotate_tools/fixes/fix_annotate_json.py#L101-L147 |
175 | alexmojaki/outdated | outdated/utils.py | cache_file | def cache_file(package, mode):
"""
Yields a file-like object for the purpose of writing to or
reading from the cache.
The code:
with cache_file(...) as f:
# do stuff with f
is guaranteed to convert any exceptions to warnings (*),
both in the cache_file(...) call and the 'do stuff with f'
block.
The file is automatically closed upon exiting the with block.
If getting an actual file fails, yields a DummyFile.
:param package: the name of the package being checked as a string
:param mode: the mode to open the file in, either 'r' or 'w'
"""
f = DummyFile()
# We have to wrap the whole function body in this block to guarantee
# catching all exceptions. In particular the yield needs to be inside
# to catch exceptions coming from the with block.
with exception_to_warning('use cache while checking for outdated package',
OutdatedCacheFailedWarning):
try:
cache_path = os.path.join(tempfile.gettempdir(),
get_cache_filename(package))
if mode == 'w' or os.path.exists(cache_path):
f = open(cache_path, mode)
finally:
# Putting the yield in the finally section ensures that exactly
# one thing is yielded once, otherwise @contextmanager would
# raise an exception.
with f: # closes the file afterards
yield f | python | def cache_file(package, mode):
"""
Yields a file-like object for the purpose of writing to or
reading from the cache.
The code:
with cache_file(...) as f:
# do stuff with f
is guaranteed to convert any exceptions to warnings (*),
both in the cache_file(...) call and the 'do stuff with f'
block.
The file is automatically closed upon exiting the with block.
If getting an actual file fails, yields a DummyFile.
:param package: the name of the package being checked as a string
:param mode: the mode to open the file in, either 'r' or 'w'
"""
f = DummyFile()
# We have to wrap the whole function body in this block to guarantee
# catching all exceptions. In particular the yield needs to be inside
# to catch exceptions coming from the with block.
with exception_to_warning('use cache while checking for outdated package',
OutdatedCacheFailedWarning):
try:
cache_path = os.path.join(tempfile.gettempdir(),
get_cache_filename(package))
if mode == 'w' or os.path.exists(cache_path):
f = open(cache_path, mode)
finally:
# Putting the yield in the finally section ensures that exactly
# one thing is yielded once, otherwise @contextmanager would
# raise an exception.
with f: # closes the file afterards
yield f | ['def', 'cache_file', '(', 'package', ',', 'mode', ')', ':', 'f', '=', 'DummyFile', '(', ')', '# We have to wrap the whole function body in this block to guarantee', '# catching all exceptions. In particular the yield needs to be inside', '# to catch exceptions coming from the with block.', 'with', 'exception_to_warning', '(', "'use cache while checking for outdated package'", ',', 'OutdatedCacheFailedWarning', ')', ':', 'try', ':', 'cache_path', '=', 'os', '.', 'path', '.', 'join', '(', 'tempfile', '.', 'gettempdir', '(', ')', ',', 'get_cache_filename', '(', 'package', ')', ')', 'if', 'mode', '==', "'w'", 'or', 'os', '.', 'path', '.', 'exists', '(', 'cache_path', ')', ':', 'f', '=', 'open', '(', 'cache_path', ',', 'mode', ')', 'finally', ':', '# Putting the yield in the finally section ensures that exactly', '# one thing is yielded once, otherwise @contextmanager would', '# raise an exception.', 'with', 'f', ':', '# closes the file afterards', 'yield', 'f'] | Yields a file-like object for the purpose of writing to or
reading from the cache.
The code:
with cache_file(...) as f:
# do stuff with f
is guaranteed to convert any exceptions to warnings (*),
both in the cache_file(...) call and the 'do stuff with f'
block.
The file is automatically closed upon exiting the with block.
If getting an actual file fails, yields a DummyFile.
:param package: the name of the package being checked as a string
:param mode: the mode to open the file in, either 'r' or 'w' | ['Yields', 'a', 'file', '-', 'like', 'object', 'for', 'the', 'purpose', 'of', 'writing', 'to', 'or', 'reading', 'from', 'the', 'cache', '.'] | train | https://github.com/alexmojaki/outdated/blob/565bb3fe1adc30da5e50249912cd2ac494662659/outdated/utils.py#L37-L76 |
176 | shaypal5/utilitime | utilitime/timestamp/timestamp.py | timestamp_to_local_time_str | def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str | python | def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str | ['def', 'timestamp_to_local_time_str', '(', 'timestamp', ',', 'timezone_name', ',', 'fmt', '=', '"yyyy-MM-dd HH:mm:ss"', ')', ':', 'localized_d', '=', 'timestamp_to_local_time', '(', 'timestamp', ',', 'timezone_name', ')', 'localized_datetime_str', '=', 'localized_d', '.', 'format_datetime', '(', 'fmt', ')', 'return', 'localized_datetime_str'] | Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string. | ['Convert', 'epoch', 'timestamp', 'to', 'a', 'localized', 'datetime', 'string', '.'] | train | https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/timestamp/timestamp.py#L35-L55 |
177 | django-leonardo/django-leonardo | leonardo/utils/compress_patch.py | output | def output(self, mode='file', forced=False, context=None):
"""
The general output method, override in subclass if you need to do
any custom modification. Calls other mode specific methods or simply
returns the content directly.
"""
output = '\n'.join(self.filter_input(forced, context=context))
if not output:
return ''
if settings.COMPRESS_ENABLED or forced:
filtered_output = self.filter_output(output)
return self.handle_output(mode, filtered_output, forced)
return output | python | def output(self, mode='file', forced=False, context=None):
"""
The general output method, override in subclass if you need to do
any custom modification. Calls other mode specific methods or simply
returns the content directly.
"""
output = '\n'.join(self.filter_input(forced, context=context))
if not output:
return ''
if settings.COMPRESS_ENABLED or forced:
filtered_output = self.filter_output(output)
return self.handle_output(mode, filtered_output, forced)
return output | ['def', 'output', '(', 'self', ',', 'mode', '=', "'file'", ',', 'forced', '=', 'False', ',', 'context', '=', 'None', ')', ':', 'output', '=', "'\\n'", '.', 'join', '(', 'self', '.', 'filter_input', '(', 'forced', ',', 'context', '=', 'context', ')', ')', 'if', 'not', 'output', ':', 'return', "''", 'if', 'settings', '.', 'COMPRESS_ENABLED', 'or', 'forced', ':', 'filtered_output', '=', 'self', '.', 'filter_output', '(', 'output', ')', 'return', 'self', '.', 'handle_output', '(', 'mode', ',', 'filtered_output', ',', 'forced', ')', 'return', 'output'] | The general output method, override in subclass if you need to do
any custom modification. Calls other mode specific methods or simply
returns the content directly. | ['The', 'general', 'output', 'method', 'override', 'in', 'subclass', 'if', 'you', 'need', 'to', 'do', 'any', 'custom', 'modification', '.', 'Calls', 'other', 'mode', 'specific', 'methods', 'or', 'simply', 'returns', 'the', 'content', 'directly', '.'] | train | https://github.com/django-leonardo/django-leonardo/blob/4b933e1792221a13b4028753d5f1d3499b0816d4/leonardo/utils/compress_patch.py#L165-L180 |
178 | shi-cong/PYSTUDY | PYSTUDY/html_parserlib.py | ReParser.replace | def replace(self, re_text, replace_str, text):
"""
正则表达式替换
:param re_text: 正则表达式
:param replace_str: 替换字符串
:param text: 搜索文档
:return: 替换后的字符串
"""
return re.sub(re_text, replace_str, text) | python | def replace(self, re_text, replace_str, text):
"""
正则表达式替换
:param re_text: 正则表达式
:param replace_str: 替换字符串
:param text: 搜索文档
:return: 替换后的字符串
"""
return re.sub(re_text, replace_str, text) | ['def', 'replace', '(', 'self', ',', 're_text', ',', 'replace_str', ',', 'text', ')', ':', 'return', 're', '.', 'sub', '(', 're_text', ',', 'replace_str', ',', 'text', ')'] | 正则表达式替换
:param re_text: 正则表达式
:param replace_str: 替换字符串
:param text: 搜索文档
:return: 替换后的字符串 | ['正则表达式替换', ':', 'param', 're_text', ':', '正则表达式', ':', 'param', 'replace_str', ':', '替换字符串', ':', 'param', 'text', ':', '搜索文档', ':', 'return', ':', '替换后的字符串'] | train | https://github.com/shi-cong/PYSTUDY/blob/c8da7128ea18ecaa5849f2066d321e70d6f97f70/PYSTUDY/html_parserlib.py#L53-L61 |
179 | esterhui/pypu | scripts/build_json_from_gps.py | parsePositionFile | def parsePositionFile(filename):
"""
Parses Android GPS logger csv file and returns list of dictionaries
"""
l=[]
with open( filename, "rb" ) as theFile:
reader = csv.DictReader( theFile )
for line in reader:
# Convert the time string to something
# a bit more human readable
mytime=dateparser.parse(line['time'])
line['strtime']=mytime.strftime("%d %b %Y, %H:%M UTC")
l.append(line)
return l | python | def parsePositionFile(filename):
"""
Parses Android GPS logger csv file and returns list of dictionaries
"""
l=[]
with open( filename, "rb" ) as theFile:
reader = csv.DictReader( theFile )
for line in reader:
# Convert the time string to something
# a bit more human readable
mytime=dateparser.parse(line['time'])
line['strtime']=mytime.strftime("%d %b %Y, %H:%M UTC")
l.append(line)
return l | ['def', 'parsePositionFile', '(', 'filename', ')', ':', 'l', '=', '[', ']', 'with', 'open', '(', 'filename', ',', '"rb"', ')', 'as', 'theFile', ':', 'reader', '=', 'csv', '.', 'DictReader', '(', 'theFile', ')', 'for', 'line', 'in', 'reader', ':', '# Convert the time string to something', '# a bit more human readable', 'mytime', '=', 'dateparser', '.', 'parse', '(', 'line', '[', "'time'", ']', ')', 'line', '[', "'strtime'", ']', '=', 'mytime', '.', 'strftime', '(', '"%d %b %Y, %H:%M UTC"', ')', 'l', '.', 'append', '(', 'line', ')', 'return', 'l'] | Parses Android GPS logger csv file and returns list of dictionaries | ['Parses', 'Android', 'GPS', 'logger', 'csv', 'file', 'and', 'returns', 'list', 'of', 'dictionaries'] | train | https://github.com/esterhui/pypu/blob/cc3e259d59f024c2c4c0fbb9c8a1547e51de75ec/scripts/build_json_from_gps.py#L154-L167 |
180 | pkgw/pwkit | pwkit/fk10.py | Calculator.set_trapezoidal_integration | def set_trapezoidal_integration(self, n):
"""Set the code to use trapezoidal integration.
**Call signature**
*n*
Use this many nodes
Returns
*self* for convenience in chaining.
"""
if not (n >= 2):
raise ValueError('must have n >= 2; got %r' % (n,))
self.in_vals[IN_VAL_INTEG_METH] = n + 1
return self | python | def set_trapezoidal_integration(self, n):
"""Set the code to use trapezoidal integration.
**Call signature**
*n*
Use this many nodes
Returns
*self* for convenience in chaining.
"""
if not (n >= 2):
raise ValueError('must have n >= 2; got %r' % (n,))
self.in_vals[IN_VAL_INTEG_METH] = n + 1
return self | ['def', 'set_trapezoidal_integration', '(', 'self', ',', 'n', ')', ':', 'if', 'not', '(', 'n', '>=', '2', ')', ':', 'raise', 'ValueError', '(', "'must have n >= 2; got %r'", '%', '(', 'n', ',', ')', ')', 'self', '.', 'in_vals', '[', 'IN_VAL_INTEG_METH', ']', '=', 'n', '+', '1', 'return', 'self'] | Set the code to use trapezoidal integration.
**Call signature**
*n*
Use this many nodes
Returns
*self* for convenience in chaining. | ['Set', 'the', 'code', 'to', 'use', 'trapezoidal', 'integration', '.'] | train | https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/fk10.py#L672-L687 |
181 | svenevs/exhale | exhale/graph.py | ExhaleRoot.generateNamespaceChildrenString | def generateNamespaceChildrenString(self, nspace):
'''
Helper method for :func:`~exhale.graph.ExhaleRoot.generateSingleNamespace`, and
:func:`~exhale.graph.ExhaleRoot.generateFileNodeDocuments`. Builds the
body text for the namespace node document that links to all of the child
namespaces, structs, classes, functions, typedefs, unions, and variables
associated with this namespace.
:Parameters:
``nspace`` (ExhaleNode)
The namespace node we are generating the body text for.
:Return (str):
The string to be written to the namespace node's reStructuredText document.
'''
# sort the children
nsp_namespaces = []
nsp_nested_class_like = []
nsp_enums = []
nsp_functions = []
nsp_typedefs = []
nsp_unions = []
nsp_variables = []
for child in nspace.children:
# Skip children whose names were requested to be explicitly ignored.
should_exclude = False
for exclude in configs._compiled_listing_exclude:
if exclude.match(child.name):
should_exclude = True
if should_exclude:
continue
if child.kind == "namespace":
nsp_namespaces.append(child)
elif child.kind == "struct" or child.kind == "class":
child.findNestedClassLike(nsp_nested_class_like)
child.findNestedEnums(nsp_enums)
child.findNestedUnions(nsp_unions)
elif child.kind == "enum":
nsp_enums.append(child)
elif child.kind == "function":
nsp_functions.append(child)
elif child.kind == "typedef":
nsp_typedefs.append(child)
elif child.kind == "union":
nsp_unions.append(child)
elif child.kind == "variable":
nsp_variables.append(child)
# generate their headings if they exist (no Defines...that's not a C++ thing...)
children_stream = StringIO()
self.generateSortedChildListString(children_stream, "Namespaces", nsp_namespaces)
self.generateSortedChildListString(children_stream, "Classes", nsp_nested_class_like)
self.generateSortedChildListString(children_stream, "Enums", nsp_enums)
self.generateSortedChildListString(children_stream, "Functions", nsp_functions)
self.generateSortedChildListString(children_stream, "Typedefs", nsp_typedefs)
self.generateSortedChildListString(children_stream, "Unions", nsp_unions)
self.generateSortedChildListString(children_stream, "Variables", nsp_variables)
# read out the buffer contents, close it and return the desired string
children_string = children_stream.getvalue()
children_stream.close()
return children_string | python | def generateNamespaceChildrenString(self, nspace):
'''
Helper method for :func:`~exhale.graph.ExhaleRoot.generateSingleNamespace`, and
:func:`~exhale.graph.ExhaleRoot.generateFileNodeDocuments`. Builds the
body text for the namespace node document that links to all of the child
namespaces, structs, classes, functions, typedefs, unions, and variables
associated with this namespace.
:Parameters:
``nspace`` (ExhaleNode)
The namespace node we are generating the body text for.
:Return (str):
The string to be written to the namespace node's reStructuredText document.
'''
# sort the children
nsp_namespaces = []
nsp_nested_class_like = []
nsp_enums = []
nsp_functions = []
nsp_typedefs = []
nsp_unions = []
nsp_variables = []
for child in nspace.children:
# Skip children whose names were requested to be explicitly ignored.
should_exclude = False
for exclude in configs._compiled_listing_exclude:
if exclude.match(child.name):
should_exclude = True
if should_exclude:
continue
if child.kind == "namespace":
nsp_namespaces.append(child)
elif child.kind == "struct" or child.kind == "class":
child.findNestedClassLike(nsp_nested_class_like)
child.findNestedEnums(nsp_enums)
child.findNestedUnions(nsp_unions)
elif child.kind == "enum":
nsp_enums.append(child)
elif child.kind == "function":
nsp_functions.append(child)
elif child.kind == "typedef":
nsp_typedefs.append(child)
elif child.kind == "union":
nsp_unions.append(child)
elif child.kind == "variable":
nsp_variables.append(child)
# generate their headings if they exist (no Defines...that's not a C++ thing...)
children_stream = StringIO()
self.generateSortedChildListString(children_stream, "Namespaces", nsp_namespaces)
self.generateSortedChildListString(children_stream, "Classes", nsp_nested_class_like)
self.generateSortedChildListString(children_stream, "Enums", nsp_enums)
self.generateSortedChildListString(children_stream, "Functions", nsp_functions)
self.generateSortedChildListString(children_stream, "Typedefs", nsp_typedefs)
self.generateSortedChildListString(children_stream, "Unions", nsp_unions)
self.generateSortedChildListString(children_stream, "Variables", nsp_variables)
# read out the buffer contents, close it and return the desired string
children_string = children_stream.getvalue()
children_stream.close()
return children_string | ['def', 'generateNamespaceChildrenString', '(', 'self', ',', 'nspace', ')', ':', '# sort the children', 'nsp_namespaces', '=', '[', ']', 'nsp_nested_class_like', '=', '[', ']', 'nsp_enums', '=', '[', ']', 'nsp_functions', '=', '[', ']', 'nsp_typedefs', '=', '[', ']', 'nsp_unions', '=', '[', ']', 'nsp_variables', '=', '[', ']', 'for', 'child', 'in', 'nspace', '.', 'children', ':', '# Skip children whose names were requested to be explicitly ignored.', 'should_exclude', '=', 'False', 'for', 'exclude', 'in', 'configs', '.', '_compiled_listing_exclude', ':', 'if', 'exclude', '.', 'match', '(', 'child', '.', 'name', ')', ':', 'should_exclude', '=', 'True', 'if', 'should_exclude', ':', 'continue', 'if', 'child', '.', 'kind', '==', '"namespace"', ':', 'nsp_namespaces', '.', 'append', '(', 'child', ')', 'elif', 'child', '.', 'kind', '==', '"struct"', 'or', 'child', '.', 'kind', '==', '"class"', ':', 'child', '.', 'findNestedClassLike', '(', 'nsp_nested_class_like', ')', 'child', '.', 'findNestedEnums', '(', 'nsp_enums', ')', 'child', '.', 'findNestedUnions', '(', 'nsp_unions', ')', 'elif', 'child', '.', 'kind', '==', '"enum"', ':', 'nsp_enums', '.', 'append', '(', 'child', ')', 'elif', 'child', '.', 'kind', '==', '"function"', ':', 'nsp_functions', '.', 'append', '(', 'child', ')', 'elif', 'child', '.', 'kind', '==', '"typedef"', ':', 'nsp_typedefs', '.', 'append', '(', 'child', ')', 'elif', 'child', '.', 'kind', '==', '"union"', ':', 'nsp_unions', '.', 'append', '(', 'child', ')', 'elif', 'child', '.', 'kind', '==', '"variable"', ':', 'nsp_variables', '.', 'append', '(', 'child', ')', "# generate their headings if they exist (no Defines...that's not a C++ thing...)", 'children_stream', '=', 'StringIO', '(', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Namespaces"', ',', 'nsp_namespaces', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Classes"', ',', 'nsp_nested_class_like', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Enums"', ',', 'nsp_enums', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Functions"', ',', 'nsp_functions', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Typedefs"', ',', 'nsp_typedefs', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Unions"', ',', 'nsp_unions', ')', 'self', '.', 'generateSortedChildListString', '(', 'children_stream', ',', '"Variables"', ',', 'nsp_variables', ')', '# read out the buffer contents, close it and return the desired string', 'children_string', '=', 'children_stream', '.', 'getvalue', '(', ')', 'children_stream', '.', 'close', '(', ')', 'return', 'children_string'] | Helper method for :func:`~exhale.graph.ExhaleRoot.generateSingleNamespace`, and
:func:`~exhale.graph.ExhaleRoot.generateFileNodeDocuments`. Builds the
body text for the namespace node document that links to all of the child
namespaces, structs, classes, functions, typedefs, unions, and variables
associated with this namespace.
:Parameters:
``nspace`` (ExhaleNode)
The namespace node we are generating the body text for.
:Return (str):
The string to be written to the namespace node's reStructuredText document. | ['Helper', 'method', 'for', ':', 'func', ':', '~exhale', '.', 'graph', '.', 'ExhaleRoot', '.', 'generateSingleNamespace', 'and', ':', 'func', ':', '~exhale', '.', 'graph', '.', 'ExhaleRoot', '.', 'generateFileNodeDocuments', '.', 'Builds', 'the', 'body', 'text', 'for', 'the', 'namespace', 'node', 'document', 'that', 'links', 'to', 'all', 'of', 'the', 'child', 'namespaces', 'structs', 'classes', 'functions', 'typedefs', 'unions', 'and', 'variables', 'associated', 'with', 'this', 'namespace', '.'] | train | https://github.com/svenevs/exhale/blob/fe7644829057af622e467bb529db6c03a830da99/exhale/graph.py#L2730-L2791 |
182 | pkgw/pwkit | pwkit/ndshow_gtk3.py | cycle | def cycle(arrays, descs=None, cadence=0.6, toworlds=None,
drawoverlay=None, yflip=False, tostatuses=None, run_main=True,
save_after_viewing=None):
"""Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed.
"""
n = len(arrays)
amin = amax = h = w = None
if toworlds is not None and tostatuses is not None:
raise ValueError('only one of "toworlds" and "tostatuses" may be given')
if descs is None:
descs = [''] * n
for array in arrays:
thish, thisw = array.shape
thismin, thismax = array.min(), array.max()
if not np.isfinite(thismin):
thismin = array[np.ma.where(np.isfinite(array))].min()
if not np.isfinite(thismax):
thismax = array[np.ma.where(np.isfinite(array))].max()
if amin is None:
w, h, amin, amax = thisw, thish, thismin, thismax
else:
if thisw != w:
raise ValueError('array widths not all equal')
if thish != h:
raise ValueError('array heights not all equal')
amin = min(amin, thismin)
amax = max(amax, thismax)
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32, w)
assert stride % 4 == 0 # stride is in bytes
imgdata = np.empty((n, h, stride // 4), dtype=np.uint32)
fixed = np.empty((n, h, w), dtype=np.int32)
antimask = np.empty((n, h, w), dtype=np.bool_)
surfaces = [None] * n
imgdata.fill(0xFF000000)
for i, array in enumerate(arrays):
surfaces[i] = cairo.ImageSurface.create_for_data(imgdata[i], cairo.FORMAT_ARGB32,
w, h, stride)
if np.ma.is_masked(array):
filled = array.filled(amin)
antimask[i] = ~array.mask
else:
filled = array
antimask[i].fill(True)
fixed[i] = (filled - amin) * (0x0FFFFFF0 / (amax - amin))
def getn():
return n
def getshapei(i):
return w, h
def getdesci(i):
return descs[i]
clipped = np.zeros((h, w), dtype=np.int32) # scratch arrays -- two needed
clipped2 = np.zeros((h, w), dtype=np.uint32) # to make numpy ufunc casting happy
def settuningi(i, tunerx, tunery):
np.bitwise_and(imgdata[i], 0xFF000000, imgdata[i])
fmin = int(0x0FFFFFF0 * tunerx)
fmax = int(0x0FFFFFF0 * tunery)
if fmin == fmax:
np.add(imgdata[i], 255 * (fixed[i] > fmin).astype(np.uint32), imgdata[i])
else:
np.clip(fixed[i], fmin, fmax, clipped)
np.subtract(clipped, fmin, clipped)
np.multiply(clipped, 255. / (fmax - fmin), clipped2, casting='unsafe')
np.add(imgdata[i], clipped2, imgdata[i])
np.multiply(imgdata[i], antimask[i], imgdata[i])
def getsurfacei(i, xoffset, yoffset, width, height):
return surfaces[i], xoffset, yoffset
# see comment in view()
nomasks = [not np.ma.is_masked(a) or a.mask is np.ma.nomask
for a in arrays]
if tostatuses is None:
if toworlds is None:
tostatuses = [None] * n
else:
from .astutil import fmthours, fmtdeglat
def make_status_func(toworld):
def status(y_and_x):
lat, lon = toworld(y_and_x)
return 'lat=%s lon=%s' % (fmtdeglat(lat),
fmthours(lon))
tostatuses = [make_status_func(toworlds[i]) for i in range(n)]
def fmtstatusi(i, x, y):
s = ''
row = int(np.floor(y + 0.5))
col = int(np.floor(x + 0.5))
if row >= 0 and col >= 0 and row < h and col < w:
if nomasks[i] or not arrays[i].mask[row,col]:
s += '%g ' % arrays[i][row,col]
if yflip:
y = h - 1 - y
row = h - 1 - row
s += '[%d,%d] x=%.1f y=%.1f' % (row, col, x, y)
if tostatuses[i] is not None:
s += ' ' + tostatuses[i](np.array([y, x]))
return s
cycler = Cycler()
cycler.set_n_getter(getn)
cycler.set_shape_getter(getshapei)
cycler.set_desc_getter(getdesci)
cycler.set_tuning_setter(settuningi)
cycler.set_surface_getter(getsurfacei)
cycler.set_status_formatter(fmtstatusi)
cycler.set_overlay_drawer(drawoverlay)
cycler.win.show_all()
if run_main:
cycler.win.connect('destroy', Gtk.main_quit)
Gtk.main()
else:
cycler.win.connect('destroy', lambda e: cycler.win.destroy())
if save_after_viewing is not None:
for i in range(n):
filename = save_after_viewing % (i,)
settuningi(i, cycler.last_tunerx, cycler.last_tunery)
surface, xoffset, yoffset = getsurfacei(i, 0, 0, w, h)
surface.write_to_png(filename) | python | def cycle(arrays, descs=None, cadence=0.6, toworlds=None,
drawoverlay=None, yflip=False, tostatuses=None, run_main=True,
save_after_viewing=None):
"""Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed.
"""
n = len(arrays)
amin = amax = h = w = None
if toworlds is not None and tostatuses is not None:
raise ValueError('only one of "toworlds" and "tostatuses" may be given')
if descs is None:
descs = [''] * n
for array in arrays:
thish, thisw = array.shape
thismin, thismax = array.min(), array.max()
if not np.isfinite(thismin):
thismin = array[np.ma.where(np.isfinite(array))].min()
if not np.isfinite(thismax):
thismax = array[np.ma.where(np.isfinite(array))].max()
if amin is None:
w, h, amin, amax = thisw, thish, thismin, thismax
else:
if thisw != w:
raise ValueError('array widths not all equal')
if thish != h:
raise ValueError('array heights not all equal')
amin = min(amin, thismin)
amax = max(amax, thismax)
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32, w)
assert stride % 4 == 0 # stride is in bytes
imgdata = np.empty((n, h, stride // 4), dtype=np.uint32)
fixed = np.empty((n, h, w), dtype=np.int32)
antimask = np.empty((n, h, w), dtype=np.bool_)
surfaces = [None] * n
imgdata.fill(0xFF000000)
for i, array in enumerate(arrays):
surfaces[i] = cairo.ImageSurface.create_for_data(imgdata[i], cairo.FORMAT_ARGB32,
w, h, stride)
if np.ma.is_masked(array):
filled = array.filled(amin)
antimask[i] = ~array.mask
else:
filled = array
antimask[i].fill(True)
fixed[i] = (filled - amin) * (0x0FFFFFF0 / (amax - amin))
def getn():
return n
def getshapei(i):
return w, h
def getdesci(i):
return descs[i]
clipped = np.zeros((h, w), dtype=np.int32) # scratch arrays -- two needed
clipped2 = np.zeros((h, w), dtype=np.uint32) # to make numpy ufunc casting happy
def settuningi(i, tunerx, tunery):
np.bitwise_and(imgdata[i], 0xFF000000, imgdata[i])
fmin = int(0x0FFFFFF0 * tunerx)
fmax = int(0x0FFFFFF0 * tunery)
if fmin == fmax:
np.add(imgdata[i], 255 * (fixed[i] > fmin).astype(np.uint32), imgdata[i])
else:
np.clip(fixed[i], fmin, fmax, clipped)
np.subtract(clipped, fmin, clipped)
np.multiply(clipped, 255. / (fmax - fmin), clipped2, casting='unsafe')
np.add(imgdata[i], clipped2, imgdata[i])
np.multiply(imgdata[i], antimask[i], imgdata[i])
def getsurfacei(i, xoffset, yoffset, width, height):
return surfaces[i], xoffset, yoffset
# see comment in view()
nomasks = [not np.ma.is_masked(a) or a.mask is np.ma.nomask
for a in arrays]
if tostatuses is None:
if toworlds is None:
tostatuses = [None] * n
else:
from .astutil import fmthours, fmtdeglat
def make_status_func(toworld):
def status(y_and_x):
lat, lon = toworld(y_and_x)
return 'lat=%s lon=%s' % (fmtdeglat(lat),
fmthours(lon))
tostatuses = [make_status_func(toworlds[i]) for i in range(n)]
def fmtstatusi(i, x, y):
s = ''
row = int(np.floor(y + 0.5))
col = int(np.floor(x + 0.5))
if row >= 0 and col >= 0 and row < h and col < w:
if nomasks[i] or not arrays[i].mask[row,col]:
s += '%g ' % arrays[i][row,col]
if yflip:
y = h - 1 - y
row = h - 1 - row
s += '[%d,%d] x=%.1f y=%.1f' % (row, col, x, y)
if tostatuses[i] is not None:
s += ' ' + tostatuses[i](np.array([y, x]))
return s
cycler = Cycler()
cycler.set_n_getter(getn)
cycler.set_shape_getter(getshapei)
cycler.set_desc_getter(getdesci)
cycler.set_tuning_setter(settuningi)
cycler.set_surface_getter(getsurfacei)
cycler.set_status_formatter(fmtstatusi)
cycler.set_overlay_drawer(drawoverlay)
cycler.win.show_all()
if run_main:
cycler.win.connect('destroy', Gtk.main_quit)
Gtk.main()
else:
cycler.win.connect('destroy', lambda e: cycler.win.destroy())
if save_after_viewing is not None:
for i in range(n):
filename = save_after_viewing % (i,)
settuningi(i, cycler.last_tunerx, cycler.last_tunery)
surface, xoffset, yoffset = getsurfacei(i, 0, 0, w, h)
surface.write_to_png(filename) | ['def', 'cycle', '(', 'arrays', ',', 'descs', '=', 'None', ',', 'cadence', '=', '0.6', ',', 'toworlds', '=', 'None', ',', 'drawoverlay', '=', 'None', ',', 'yflip', '=', 'False', ',', 'tostatuses', '=', 'None', ',', 'run_main', '=', 'True', ',', 'save_after_viewing', '=', 'None', ')', ':', 'n', '=', 'len', '(', 'arrays', ')', 'amin', '=', 'amax', '=', 'h', '=', 'w', '=', 'None', 'if', 'toworlds', 'is', 'not', 'None', 'and', 'tostatuses', 'is', 'not', 'None', ':', 'raise', 'ValueError', '(', '\'only one of "toworlds" and "tostatuses" may be given\'', ')', 'if', 'descs', 'is', 'None', ':', 'descs', '=', '[', "''", ']', '*', 'n', 'for', 'array', 'in', 'arrays', ':', 'thish', ',', 'thisw', '=', 'array', '.', 'shape', 'thismin', ',', 'thismax', '=', 'array', '.', 'min', '(', ')', ',', 'array', '.', 'max', '(', ')', 'if', 'not', 'np', '.', 'isfinite', '(', 'thismin', ')', ':', 'thismin', '=', 'array', '[', 'np', '.', 'ma', '.', 'where', '(', 'np', '.', 'isfinite', '(', 'array', ')', ')', ']', '.', 'min', '(', ')', 'if', 'not', 'np', '.', 'isfinite', '(', 'thismax', ')', ':', 'thismax', '=', 'array', '[', 'np', '.', 'ma', '.', 'where', '(', 'np', '.', 'isfinite', '(', 'array', ')', ')', ']', '.', 'max', '(', ')', 'if', 'amin', 'is', 'None', ':', 'w', ',', 'h', ',', 'amin', ',', 'amax', '=', 'thisw', ',', 'thish', ',', 'thismin', ',', 'thismax', 'else', ':', 'if', 'thisw', '!=', 'w', ':', 'raise', 'ValueError', '(', "'array widths not all equal'", ')', 'if', 'thish', '!=', 'h', ':', 'raise', 'ValueError', '(', "'array heights not all equal'", ')', 'amin', '=', 'min', '(', 'amin', ',', 'thismin', ')', 'amax', '=', 'max', '(', 'amax', ',', 'thismax', ')', 'stride', '=', 'cairo', '.', 'ImageSurface', '.', 'format_stride_for_width', '(', 'cairo', '.', 'FORMAT_ARGB32', ',', 'w', ')', 'assert', 'stride', '%', '4', '==', '0', '# stride is in bytes', 'imgdata', '=', 'np', '.', 'empty', '(', '(', 'n', ',', 'h', ',', 'stride', '//', '4', ')', ',', 'dtype', '=', 'np', '.', 'uint32', ')', 'fixed', '=', 'np', '.', 'empty', '(', '(', 'n', ',', 'h', ',', 'w', ')', ',', 'dtype', '=', 'np', '.', 'int32', ')', 'antimask', '=', 'np', '.', 'empty', '(', '(', 'n', ',', 'h', ',', 'w', ')', ',', 'dtype', '=', 'np', '.', 'bool_', ')', 'surfaces', '=', '[', 'None', ']', '*', 'n', 'imgdata', '.', 'fill', '(', '0xFF000000', ')', 'for', 'i', ',', 'array', 'in', 'enumerate', '(', 'arrays', ')', ':', 'surfaces', '[', 'i', ']', '=', 'cairo', '.', 'ImageSurface', '.', 'create_for_data', '(', 'imgdata', '[', 'i', ']', ',', 'cairo', '.', 'FORMAT_ARGB32', ',', 'w', ',', 'h', ',', 'stride', ')', 'if', 'np', '.', 'ma', '.', 'is_masked', '(', 'array', ')', ':', 'filled', '=', 'array', '.', 'filled', '(', 'amin', ')', 'antimask', '[', 'i', ']', '=', '~', 'array', '.', 'mask', 'else', ':', 'filled', '=', 'array', 'antimask', '[', 'i', ']', '.', 'fill', '(', 'True', ')', 'fixed', '[', 'i', ']', '=', '(', 'filled', '-', 'amin', ')', '*', '(', '0x0FFFFFF0', '/', '(', 'amax', '-', 'amin', ')', ')', 'def', 'getn', '(', ')', ':', 'return', 'n', 'def', 'getshapei', '(', 'i', ')', ':', 'return', 'w', ',', 'h', 'def', 'getdesci', '(', 'i', ')', ':', 'return', 'descs', '[', 'i', ']', 'clipped', '=', 'np', '.', 'zeros', '(', '(', 'h', ',', 'w', ')', ',', 'dtype', '=', 'np', '.', 'int32', ')', '# scratch arrays -- two needed', 'clipped2', '=', 'np', '.', 'zeros', '(', '(', 'h', ',', 'w', ')', ',', 'dtype', '=', 'np', '.', 'uint32', ')', '# to make numpy ufunc casting happy', 'def', 'settuningi', '(', 'i', ',', 'tunerx', ',', 'tunery', ')', ':', 'np', '.', 'bitwise_and', '(', 'imgdata', '[', 'i', ']', ',', '0xFF000000', ',', 'imgdata', '[', 'i', ']', ')', 'fmin', '=', 'int', '(', '0x0FFFFFF0', '*', 'tunerx', ')', 'fmax', '=', 'int', '(', '0x0FFFFFF0', '*', 'tunery', ')', 'if', 'fmin', '==', 'fmax', ':', 'np', '.', 'add', '(', 'imgdata', '[', 'i', ']', ',', '255', '*', '(', 'fixed', '[', 'i', ']', '>', 'fmin', ')', '.', 'astype', '(', 'np', '.', 'uint32', ')', ',', 'imgdata', '[', 'i', ']', ')', 'else', ':', 'np', '.', 'clip', '(', 'fixed', '[', 'i', ']', ',', 'fmin', ',', 'fmax', ',', 'clipped', ')', 'np', '.', 'subtract', '(', 'clipped', ',', 'fmin', ',', 'clipped', ')', 'np', '.', 'multiply', '(', 'clipped', ',', '255.', '/', '(', 'fmax', '-', 'fmin', ')', ',', 'clipped2', ',', 'casting', '=', "'unsafe'", ')', 'np', '.', 'add', '(', 'imgdata', '[', 'i', ']', ',', 'clipped2', ',', 'imgdata', '[', 'i', ']', ')', 'np', '.', 'multiply', '(', 'imgdata', '[', 'i', ']', ',', 'antimask', '[', 'i', ']', ',', 'imgdata', '[', 'i', ']', ')', 'def', 'getsurfacei', '(', 'i', ',', 'xoffset', ',', 'yoffset', ',', 'width', ',', 'height', ')', ':', 'return', 'surfaces', '[', 'i', ']', ',', 'xoffset', ',', 'yoffset', '# see comment in view()', 'nomasks', '=', '[', 'not', 'np', '.', 'ma', '.', 'is_masked', '(', 'a', ')', 'or', 'a', '.', 'mask', 'is', 'np', '.', 'ma', '.', 'nomask', 'for', 'a', 'in', 'arrays', ']', 'if', 'tostatuses', 'is', 'None', ':', 'if', 'toworlds', 'is', 'None', ':', 'tostatuses', '=', '[', 'None', ']', '*', 'n', 'else', ':', 'from', '.', 'astutil', 'import', 'fmthours', ',', 'fmtdeglat', 'def', 'make_status_func', '(', 'toworld', ')', ':', 'def', 'status', '(', 'y_and_x', ')', ':', 'lat', ',', 'lon', '=', 'toworld', '(', 'y_and_x', ')', 'return', "'lat=%s lon=%s'", '%', '(', 'fmtdeglat', '(', 'lat', ')', ',', 'fmthours', '(', 'lon', ')', ')', 'tostatuses', '=', '[', 'make_status_func', '(', 'toworlds', '[', 'i', ']', ')', 'for', 'i', 'in', 'range', '(', 'n', ')', ']', 'def', 'fmtstatusi', '(', 'i', ',', 'x', ',', 'y', ')', ':', 's', '=', "''", 'row', '=', 'int', '(', 'np', '.', 'floor', '(', 'y', '+', '0.5', ')', ')', 'col', '=', 'int', '(', 'np', '.', 'floor', '(', 'x', '+', '0.5', ')', ')', 'if', 'row', '>=', '0', 'and', 'col', '>=', '0', 'and', 'row', '<', 'h', 'and', 'col', '<', 'w', ':', 'if', 'nomasks', '[', 'i', ']', 'or', 'not', 'arrays', '[', 'i', ']', '.', 'mask', '[', 'row', ',', 'col', ']', ':', 's', '+=', "'%g '", '%', 'arrays', '[', 'i', ']', '[', 'row', ',', 'col', ']', 'if', 'yflip', ':', 'y', '=', 'h', '-', '1', '-', 'y', 'row', '=', 'h', '-', '1', '-', 'row', 's', '+=', "'[%d,%d] x=%.1f y=%.1f'", '%', '(', 'row', ',', 'col', ',', 'x', ',', 'y', ')', 'if', 'tostatuses', '[', 'i', ']', 'is', 'not', 'None', ':', 's', '+=', "' '", '+', 'tostatuses', '[', 'i', ']', '(', 'np', '.', 'array', '(', '[', 'y', ',', 'x', ']', ')', ')', 'return', 's', 'cycler', '=', 'Cycler', '(', ')', 'cycler', '.', 'set_n_getter', '(', 'getn', ')', 'cycler', '.', 'set_shape_getter', '(', 'getshapei', ')', 'cycler', '.', 'set_desc_getter', '(', 'getdesci', ')', 'cycler', '.', 'set_tuning_setter', '(', 'settuningi', ')', 'cycler', '.', 'set_surface_getter', '(', 'getsurfacei', ')', 'cycler', '.', 'set_status_formatter', '(', 'fmtstatusi', ')', 'cycler', '.', 'set_overlay_drawer', '(', 'drawoverlay', ')', 'cycler', '.', 'win', '.', 'show_all', '(', ')', 'if', 'run_main', ':', 'cycler', '.', 'win', '.', 'connect', '(', "'destroy'", ',', 'Gtk', '.', 'main_quit', ')', 'Gtk', '.', 'main', '(', ')', 'else', ':', 'cycler', '.', 'win', '.', 'connect', '(', "'destroy'", ',', 'lambda', 'e', ':', 'cycler', '.', 'win', '.', 'destroy', '(', ')', ')', 'if', 'save_after_viewing', 'is', 'not', 'None', ':', 'for', 'i', 'in', 'range', '(', 'n', ')', ':', 'filename', '=', 'save_after_viewing', '%', '(', 'i', ',', ')', 'settuningi', '(', 'i', ',', 'cycler', '.', 'last_tunerx', ',', 'cycler', '.', 'last_tunery', ')', 'surface', ',', 'xoffset', ',', 'yoffset', '=', 'getsurfacei', '(', 'i', ',', '0', ',', '0', ',', 'w', ',', 'h', ')', 'surface', '.', 'write_to_png', '(', 'filename', ')'] | Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed. | ['Interactively', 'display', 'a', 'series', 'of', '2D', 'data', 'arrays', '.'] | train | https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/ndshow_gtk3.py#L800-L974 |
183 | romanz/trezor-agent | libagent/device/keepkey.py | KeepKey.pubkey | def pubkey(self, identity, ecdh=False):
"""Return public key."""
_verify_support(identity, ecdh)
return trezor.Trezor.pubkey(self, identity=identity, ecdh=ecdh) | python | def pubkey(self, identity, ecdh=False):
"""Return public key."""
_verify_support(identity, ecdh)
return trezor.Trezor.pubkey(self, identity=identity, ecdh=ecdh) | ['def', 'pubkey', '(', 'self', ',', 'identity', ',', 'ecdh', '=', 'False', ')', ':', '_verify_support', '(', 'identity', ',', 'ecdh', ')', 'return', 'trezor', '.', 'Trezor', '.', 'pubkey', '(', 'self', ',', 'identity', '=', 'identity', ',', 'ecdh', '=', 'ecdh', ')'] | Return public key. | ['Return', 'public', 'key', '.'] | train | https://github.com/romanz/trezor-agent/blob/513b1259c4d7aca5f88cd958edc11828d0712f1b/libagent/device/keepkey.py#L38-L41 |
184 | matthiask/django-authlib | authlib/email.py | send_registration_mail | def send_registration_mail(email, *, request, **kwargs):
"""send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
"""
render_to_mail(
"registration/email_registration_email",
{"url": get_confirmation_url(email, request, **kwargs)},
to=[email],
).send() | python | def send_registration_mail(email, *, request, **kwargs):
"""send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
"""
render_to_mail(
"registration/email_registration_email",
{"url": get_confirmation_url(email, request, **kwargs)},
to=[email],
).send() | ['def', 'send_registration_mail', '(', 'email', ',', '*', ',', 'request', ',', '*', '*', 'kwargs', ')', ':', 'render_to_mail', '(', '"registration/email_registration_email"', ',', '{', '"url"', ':', 'get_confirmation_url', '(', 'email', ',', 'request', ',', '*', '*', 'kwargs', ')', '}', ',', 'to', '=', '[', 'email', ']', ',', ')', '.', 'send', '(', ')'] | send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either. | ['send_registration_mail', '(', 'email', '*', 'request', '**', 'kwargs', ')', 'Sends', 'the', 'registration', 'mail'] | train | https://github.com/matthiask/django-authlib/blob/a142da7e27fe9d30f34a84b12f24f686f9d2c8e1/authlib/email.py#L90-L115 |
185 | DataBiosphere/toil | src/toil/batchSystems/abstractBatchSystem.py | BatchSystemSupport.setEnv | def setEnv(self, name, value=None):
"""
Set an environment variable for the worker process before it is launched. The worker
process will typically inherit the environment of the machine it is running on but this
method makes it possible to override specific variables in that inherited environment
before the worker is launched. Note that this mechanism is different to the one used by
the worker internally to set up the environment of a job. A call to this method affects
all jobs issued after this method returns. Note to implementors: This means that you
would typically need to copy the variables before enqueuing a job.
If no value is provided it will be looked up from the current environment.
NB: Only the Mesos and single-machine batch systems support passing environment
variables. On other batch systems, this method has no effect. See
https://github.com/BD2KGenomics/toil/issues/547.
:param str name: the environment variable to be set on the worker.
:param str value: if given, the environment variable given by name will be set to this value.
if None, the variable's current value will be used as the value on the worker
:raise RuntimeError: if value is None and the name cannot be found in the environment
"""
if value is None:
try:
value = os.environ[name]
except KeyError:
raise RuntimeError("%s does not exist in current environment", name)
self.environment[name] = value | python | def setEnv(self, name, value=None):
"""
Set an environment variable for the worker process before it is launched. The worker
process will typically inherit the environment of the machine it is running on but this
method makes it possible to override specific variables in that inherited environment
before the worker is launched. Note that this mechanism is different to the one used by
the worker internally to set up the environment of a job. A call to this method affects
all jobs issued after this method returns. Note to implementors: This means that you
would typically need to copy the variables before enqueuing a job.
If no value is provided it will be looked up from the current environment.
NB: Only the Mesos and single-machine batch systems support passing environment
variables. On other batch systems, this method has no effect. See
https://github.com/BD2KGenomics/toil/issues/547.
:param str name: the environment variable to be set on the worker.
:param str value: if given, the environment variable given by name will be set to this value.
if None, the variable's current value will be used as the value on the worker
:raise RuntimeError: if value is None and the name cannot be found in the environment
"""
if value is None:
try:
value = os.environ[name]
except KeyError:
raise RuntimeError("%s does not exist in current environment", name)
self.environment[name] = value | ['def', 'setEnv', '(', 'self', ',', 'name', ',', 'value', '=', 'None', ')', ':', 'if', 'value', 'is', 'None', ':', 'try', ':', 'value', '=', 'os', '.', 'environ', '[', 'name', ']', 'except', 'KeyError', ':', 'raise', 'RuntimeError', '(', '"%s does not exist in current environment"', ',', 'name', ')', 'self', '.', 'environment', '[', 'name', ']', '=', 'value'] | Set an environment variable for the worker process before it is launched. The worker
process will typically inherit the environment of the machine it is running on but this
method makes it possible to override specific variables in that inherited environment
before the worker is launched. Note that this mechanism is different to the one used by
the worker internally to set up the environment of a job. A call to this method affects
all jobs issued after this method returns. Note to implementors: This means that you
would typically need to copy the variables before enqueuing a job.
If no value is provided it will be looked up from the current environment.
NB: Only the Mesos and single-machine batch systems support passing environment
variables. On other batch systems, this method has no effect. See
https://github.com/BD2KGenomics/toil/issues/547.
:param str name: the environment variable to be set on the worker.
:param str value: if given, the environment variable given by name will be set to this value.
if None, the variable's current value will be used as the value on the worker
:raise RuntimeError: if value is None and the name cannot be found in the environment | ['Set', 'an', 'environment', 'variable', 'for', 'the', 'worker', 'process', 'before', 'it', 'is', 'launched', '.', 'The', 'worker', 'process', 'will', 'typically', 'inherit', 'the', 'environment', 'of', 'the', 'machine', 'it', 'is', 'running', 'on', 'but', 'this', 'method', 'makes', 'it', 'possible', 'to', 'override', 'specific', 'variables', 'in', 'that', 'inherited', 'environment', 'before', 'the', 'worker', 'is', 'launched', '.', 'Note', 'that', 'this', 'mechanism', 'is', 'different', 'to', 'the', 'one', 'used', 'by', 'the', 'worker', 'internally', 'to', 'set', 'up', 'the', 'environment', 'of', 'a', 'job', '.', 'A', 'call', 'to', 'this', 'method', 'affects', 'all', 'jobs', 'issued', 'after', 'this', 'method', 'returns', '.', 'Note', 'to', 'implementors', ':', 'This', 'means', 'that', 'you', 'would', 'typically', 'need', 'to', 'copy', 'the', 'variables', 'before', 'enqueuing', 'a', 'job', '.'] | train | https://github.com/DataBiosphere/toil/blob/a8252277ff814e7bee0971139c2344f88e44b644/src/toil/batchSystems/abstractBatchSystem.py#L252-L280 |
186 | JarryShaw/PyPCAPKit | src/protocols/internet/hip.py | HIP._read_para_cert | def _read_para_cert(self, code, cbit, clen, *, desc, length, version):
"""Read HIP CERT parameter.
Structure of HIP CERT parameter [RFC 7401]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Type | Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| CERT group | CERT count | CERT ID | CERT type |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Certificate /
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/ | Padding (variable length) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Octets Bits Name Description
0 0 cert.type Parameter Type
1 15 cert.critical Critical Bit
2 16 cert.length Length of Contents
4 32 cert.group CERT Group
5 40 cert.count CERT Count
6 48 cert.id CERT ID
7 56 cert.cert_type CERT Type
8 64 cert.certificate Certificate
? ? - Padding
"""
_ctgp = self._read_unpack(1)
_ctct = self._read_unpack(1)
_ctid = self._read_unpack(1)
_cttp = self._read_unpack(1)
_ctdt = self._read_fileng(clen-4)
cert = dict(
type=desc,
critical=cbit,
length=clen,
group=_GROUP_ID.get(_ctgp, 'Unassigned'),
count=_ctct,
id=_ctid,
cert_type=_CERT_TYPE.get(_cttp, 'Unassigned'),
certificate=_ctdt,
)
_plen = length - clen
if _plen:
self._read_fileng(_plen)
return cert | python | def _read_para_cert(self, code, cbit, clen, *, desc, length, version):
"""Read HIP CERT parameter.
Structure of HIP CERT parameter [RFC 7401]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Type | Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| CERT group | CERT count | CERT ID | CERT type |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Certificate /
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/ | Padding (variable length) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Octets Bits Name Description
0 0 cert.type Parameter Type
1 15 cert.critical Critical Bit
2 16 cert.length Length of Contents
4 32 cert.group CERT Group
5 40 cert.count CERT Count
6 48 cert.id CERT ID
7 56 cert.cert_type CERT Type
8 64 cert.certificate Certificate
? ? - Padding
"""
_ctgp = self._read_unpack(1)
_ctct = self._read_unpack(1)
_ctid = self._read_unpack(1)
_cttp = self._read_unpack(1)
_ctdt = self._read_fileng(clen-4)
cert = dict(
type=desc,
critical=cbit,
length=clen,
group=_GROUP_ID.get(_ctgp, 'Unassigned'),
count=_ctct,
id=_ctid,
cert_type=_CERT_TYPE.get(_cttp, 'Unassigned'),
certificate=_ctdt,
)
_plen = length - clen
if _plen:
self._read_fileng(_plen)
return cert | ['def', '_read_para_cert', '(', 'self', ',', 'code', ',', 'cbit', ',', 'clen', ',', '*', ',', 'desc', ',', 'length', ',', 'version', ')', ':', '_ctgp', '=', 'self', '.', '_read_unpack', '(', '1', ')', '_ctct', '=', 'self', '.', '_read_unpack', '(', '1', ')', '_ctid', '=', 'self', '.', '_read_unpack', '(', '1', ')', '_cttp', '=', 'self', '.', '_read_unpack', '(', '1', ')', '_ctdt', '=', 'self', '.', '_read_fileng', '(', 'clen', '-', '4', ')', 'cert', '=', 'dict', '(', 'type', '=', 'desc', ',', 'critical', '=', 'cbit', ',', 'length', '=', 'clen', ',', 'group', '=', '_GROUP_ID', '.', 'get', '(', '_ctgp', ',', "'Unassigned'", ')', ',', 'count', '=', '_ctct', ',', 'id', '=', '_ctid', ',', 'cert_type', '=', '_CERT_TYPE', '.', 'get', '(', '_cttp', ',', "'Unassigned'", ')', ',', 'certificate', '=', '_ctdt', ',', ')', '_plen', '=', 'length', '-', 'clen', 'if', '_plen', ':', 'self', '.', '_read_fileng', '(', '_plen', ')', 'return', 'cert'] | Read HIP CERT parameter.
Structure of HIP CERT parameter [RFC 7401]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Type | Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| CERT group | CERT count | CERT ID | CERT type |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Certificate /
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/ | Padding (variable length) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Octets Bits Name Description
0 0 cert.type Parameter Type
1 15 cert.critical Critical Bit
2 16 cert.length Length of Contents
4 32 cert.group CERT Group
5 40 cert.count CERT Count
6 48 cert.id CERT ID
7 56 cert.cert_type CERT Type
8 64 cert.certificate Certificate
? ? - Padding | ['Read', 'HIP', 'CERT', 'parameter', '.'] | train | https://github.com/JarryShaw/PyPCAPKit/blob/c7f0da9aebc2cf210bf8f8b912f7d3cbb98ca10e/src/protocols/internet/hip.py#L1100-L1149 |
187 | koreyou/word_embedding_loader | word_embedding_loader/loader/vocab.py | load_vocab | def load_vocab(fin):
"""
Load vocabulary from vocab file created by word2vec with
``-save-vocab <file>`` option.
Args:
fin (File): File-like object to read from.
encoding (bytes): Encoding of the input file as defined in ``codecs``
module of Python standard library.
errors (bytes): Set the error handling scheme. The default error
handler is 'strict' meaning that encoding errors raise ValueError.
Refer to ``codecs`` module for more information.
Returns:
OrderedDict: Mapping from a word (``bytes``) to the number of
appearance in the original text (``int``). Order are preserved from
the original vocab file.
"""
vocab = OrderedDict()
for line in fin:
v, c = line.strip().split()
vocab[v] = int(c)
return vocab | python | def load_vocab(fin):
"""
Load vocabulary from vocab file created by word2vec with
``-save-vocab <file>`` option.
Args:
fin (File): File-like object to read from.
encoding (bytes): Encoding of the input file as defined in ``codecs``
module of Python standard library.
errors (bytes): Set the error handling scheme. The default error
handler is 'strict' meaning that encoding errors raise ValueError.
Refer to ``codecs`` module for more information.
Returns:
OrderedDict: Mapping from a word (``bytes``) to the number of
appearance in the original text (``int``). Order are preserved from
the original vocab file.
"""
vocab = OrderedDict()
for line in fin:
v, c = line.strip().split()
vocab[v] = int(c)
return vocab | ['def', 'load_vocab', '(', 'fin', ')', ':', 'vocab', '=', 'OrderedDict', '(', ')', 'for', 'line', 'in', 'fin', ':', 'v', ',', 'c', '=', 'line', '.', 'strip', '(', ')', '.', 'split', '(', ')', 'vocab', '[', 'v', ']', '=', 'int', '(', 'c', ')', 'return', 'vocab'] | Load vocabulary from vocab file created by word2vec with
``-save-vocab <file>`` option.
Args:
fin (File): File-like object to read from.
encoding (bytes): Encoding of the input file as defined in ``codecs``
module of Python standard library.
errors (bytes): Set the error handling scheme. The default error
handler is 'strict' meaning that encoding errors raise ValueError.
Refer to ``codecs`` module for more information.
Returns:
OrderedDict: Mapping from a word (``bytes``) to the number of
appearance in the original text (``int``). Order are preserved from
the original vocab file. | ['Load', 'vocabulary', 'from', 'vocab', 'file', 'created', 'by', 'word2vec', 'with', '-', 'save', '-', 'vocab', '<file', '>', 'option', '.'] | train | https://github.com/koreyou/word_embedding_loader/blob/1bc123f1a8bea12646576dcd768dae3ecea39c06/word_embedding_loader/loader/vocab.py#L8-L30 |
188 | aburrell/apexpy | src/apexpy/helpers.py | subsol | def subsol(datetime):
"""Finds subsolar geocentric latitude and longitude.
Parameters
==========
datetime : :class:`datetime.datetime`
Returns
=======
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
=====
Based on formulas in Astronomical Almanac for the year 1996, p. C24.
(U.S. Government Printing Office, 1994). Usable for years 1601-2100,
inclusive. According to the Almanac, results are good to at least 0.01
degree latitude and 0.025 degrees longitude between years 1950 and 2050.
Accuracy for other years has not been tested. Every day is assumed to have
exactly 86400 seconds; thus leap seconds that sometimes occur on December
31 are ignored (their effect is below the accuracy threshold of the
algorithm).
After Fortran code by A. D. Richmond, NCAR. Translated from IDL
by K. Laundal.
"""
# convert to year, day of year and seconds since midnight
year = datetime.year
doy = datetime.timetuple().tm_yday
ut = datetime.hour * 3600 + datetime.minute * 60 + datetime.second
if not 1601 <= year <= 2100:
raise ValueError('Year must be in [1601, 2100]')
yr = year - 2000
nleap = int(np.floor((year - 1601.0) / 4.0))
nleap -= 99
if year <= 1900:
ncent = int(np.floor((year - 1601.0) / 100.0))
ncent = 3 - ncent
nleap = nleap + ncent
l0 = -79.549 + (-0.238699 * (yr - 4.0 * nleap) + 3.08514e-2 * nleap)
g0 = -2.472 + (-0.2558905 * (yr - 4.0 * nleap) - 3.79617e-2 * nleap)
# Days (including fraction) since 12 UT on January 1 of IYR:
df = (ut / 86400.0 - 1.5) + doy
# Mean longitude of Sun:
lmean = l0 + 0.9856474 * df
# Mean anomaly in radians:
grad = np.radians(g0 + 0.9856003 * df)
# Ecliptic longitude:
lmrad = np.radians(lmean + 1.915 * np.sin(grad)
+ 0.020 * np.sin(2.0 * grad))
sinlm = np.sin(lmrad)
# Obliquity of ecliptic in radians:
epsrad = np.radians(23.439 - 4e-7 * (df + 365 * yr + nleap))
# Right ascension:
alpha = np.degrees(np.arctan2(np.cos(epsrad) * sinlm, np.cos(lmrad)))
# Declination, which is also the subsolar latitude:
sslat = np.degrees(np.arcsin(np.sin(epsrad) * sinlm))
# Equation of time (degrees):
etdeg = lmean - alpha
nrot = round(etdeg / 360.0)
etdeg = etdeg - 360.0 * nrot
# Subsolar longitude:
sslon = 180.0 - (ut / 240.0 + etdeg) # Earth rotates one degree every 240 s.
nrot = round(sslon / 360.0)
sslon = sslon - 360.0 * nrot
return sslat, sslon | python | def subsol(datetime):
"""Finds subsolar geocentric latitude and longitude.
Parameters
==========
datetime : :class:`datetime.datetime`
Returns
=======
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
=====
Based on formulas in Astronomical Almanac for the year 1996, p. C24.
(U.S. Government Printing Office, 1994). Usable for years 1601-2100,
inclusive. According to the Almanac, results are good to at least 0.01
degree latitude and 0.025 degrees longitude between years 1950 and 2050.
Accuracy for other years has not been tested. Every day is assumed to have
exactly 86400 seconds; thus leap seconds that sometimes occur on December
31 are ignored (their effect is below the accuracy threshold of the
algorithm).
After Fortran code by A. D. Richmond, NCAR. Translated from IDL
by K. Laundal.
"""
# convert to year, day of year and seconds since midnight
year = datetime.year
doy = datetime.timetuple().tm_yday
ut = datetime.hour * 3600 + datetime.minute * 60 + datetime.second
if not 1601 <= year <= 2100:
raise ValueError('Year must be in [1601, 2100]')
yr = year - 2000
nleap = int(np.floor((year - 1601.0) / 4.0))
nleap -= 99
if year <= 1900:
ncent = int(np.floor((year - 1601.0) / 100.0))
ncent = 3 - ncent
nleap = nleap + ncent
l0 = -79.549 + (-0.238699 * (yr - 4.0 * nleap) + 3.08514e-2 * nleap)
g0 = -2.472 + (-0.2558905 * (yr - 4.0 * nleap) - 3.79617e-2 * nleap)
# Days (including fraction) since 12 UT on January 1 of IYR:
df = (ut / 86400.0 - 1.5) + doy
# Mean longitude of Sun:
lmean = l0 + 0.9856474 * df
# Mean anomaly in radians:
grad = np.radians(g0 + 0.9856003 * df)
# Ecliptic longitude:
lmrad = np.radians(lmean + 1.915 * np.sin(grad)
+ 0.020 * np.sin(2.0 * grad))
sinlm = np.sin(lmrad)
# Obliquity of ecliptic in radians:
epsrad = np.radians(23.439 - 4e-7 * (df + 365 * yr + nleap))
# Right ascension:
alpha = np.degrees(np.arctan2(np.cos(epsrad) * sinlm, np.cos(lmrad)))
# Declination, which is also the subsolar latitude:
sslat = np.degrees(np.arcsin(np.sin(epsrad) * sinlm))
# Equation of time (degrees):
etdeg = lmean - alpha
nrot = round(etdeg / 360.0)
etdeg = etdeg - 360.0 * nrot
# Subsolar longitude:
sslon = 180.0 - (ut / 240.0 + etdeg) # Earth rotates one degree every 240 s.
nrot = round(sslon / 360.0)
sslon = sslon - 360.0 * nrot
return sslat, sslon | ['def', 'subsol', '(', 'datetime', ')', ':', '# convert to year, day of year and seconds since midnight', 'year', '=', 'datetime', '.', 'year', 'doy', '=', 'datetime', '.', 'timetuple', '(', ')', '.', 'tm_yday', 'ut', '=', 'datetime', '.', 'hour', '*', '3600', '+', 'datetime', '.', 'minute', '*', '60', '+', 'datetime', '.', 'second', 'if', 'not', '1601', '<=', 'year', '<=', '2100', ':', 'raise', 'ValueError', '(', "'Year must be in [1601, 2100]'", ')', 'yr', '=', 'year', '-', '2000', 'nleap', '=', 'int', '(', 'np', '.', 'floor', '(', '(', 'year', '-', '1601.0', ')', '/', '4.0', ')', ')', 'nleap', '-=', '99', 'if', 'year', '<=', '1900', ':', 'ncent', '=', 'int', '(', 'np', '.', 'floor', '(', '(', 'year', '-', '1601.0', ')', '/', '100.0', ')', ')', 'ncent', '=', '3', '-', 'ncent', 'nleap', '=', 'nleap', '+', 'ncent', 'l0', '=', '-', '79.549', '+', '(', '-', '0.238699', '*', '(', 'yr', '-', '4.0', '*', 'nleap', ')', '+', '3.08514e-2', '*', 'nleap', ')', 'g0', '=', '-', '2.472', '+', '(', '-', '0.2558905', '*', '(', 'yr', '-', '4.0', '*', 'nleap', ')', '-', '3.79617e-2', '*', 'nleap', ')', '# Days (including fraction) since 12 UT on January 1 of IYR:', 'df', '=', '(', 'ut', '/', '86400.0', '-', '1.5', ')', '+', 'doy', '# Mean longitude of Sun:', 'lmean', '=', 'l0', '+', '0.9856474', '*', 'df', '# Mean anomaly in radians:', 'grad', '=', 'np', '.', 'radians', '(', 'g0', '+', '0.9856003', '*', 'df', ')', '# Ecliptic longitude:', 'lmrad', '=', 'np', '.', 'radians', '(', 'lmean', '+', '1.915', '*', 'np', '.', 'sin', '(', 'grad', ')', '+', '0.020', '*', 'np', '.', 'sin', '(', '2.0', '*', 'grad', ')', ')', 'sinlm', '=', 'np', '.', 'sin', '(', 'lmrad', ')', '# Obliquity of ecliptic in radians:', 'epsrad', '=', 'np', '.', 'radians', '(', '23.439', '-', '4e-7', '*', '(', 'df', '+', '365', '*', 'yr', '+', 'nleap', ')', ')', '# Right ascension:', 'alpha', '=', 'np', '.', 'degrees', '(', 'np', '.', 'arctan2', '(', 'np', '.', 'cos', '(', 'epsrad', ')', '*', 'sinlm', ',', 'np', '.', 'cos', '(', 'lmrad', ')', ')', ')', '# Declination, which is also the subsolar latitude:', 'sslat', '=', 'np', '.', 'degrees', '(', 'np', '.', 'arcsin', '(', 'np', '.', 'sin', '(', 'epsrad', ')', '*', 'sinlm', ')', ')', '# Equation of time (degrees):', 'etdeg', '=', 'lmean', '-', 'alpha', 'nrot', '=', 'round', '(', 'etdeg', '/', '360.0', ')', 'etdeg', '=', 'etdeg', '-', '360.0', '*', 'nrot', '# Subsolar longitude:', 'sslon', '=', '180.0', '-', '(', 'ut', '/', '240.0', '+', 'etdeg', ')', '# Earth rotates one degree every 240 s.', 'nrot', '=', 'round', '(', 'sslon', '/', '360.0', ')', 'sslon', '=', 'sslon', '-', '360.0', '*', 'nrot', 'return', 'sslat', ',', 'sslon'] | Finds subsolar geocentric latitude and longitude.
Parameters
==========
datetime : :class:`datetime.datetime`
Returns
=======
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
=====
Based on formulas in Astronomical Almanac for the year 1996, p. C24.
(U.S. Government Printing Office, 1994). Usable for years 1601-2100,
inclusive. According to the Almanac, results are good to at least 0.01
degree latitude and 0.025 degrees longitude between years 1950 and 2050.
Accuracy for other years has not been tested. Every day is assumed to have
exactly 86400 seconds; thus leap seconds that sometimes occur on December
31 are ignored (their effect is below the accuracy threshold of the
algorithm).
After Fortran code by A. D. Richmond, NCAR. Translated from IDL
by K. Laundal. | ['Finds', 'subsolar', 'geocentric', 'latitude', 'and', 'longitude', '.'] | train | https://github.com/aburrell/apexpy/blob/a2e919fd9ea9a65d49c4c22c9eb030c8ccf48386/src/apexpy/helpers.py#L144-L226 |
189 | ericmjl/nxviz | nxviz/geometry.py | circos_radius | def circos_radius(n_nodes, node_r):
"""
Automatically computes the origin-to-node centre radius of the Circos plot
using the triangle equality sine rule.
a / sin(A) = b / sin(B) = c / sin(C)
:param n_nodes: the number of nodes in the plot.
:type n_nodes: int
:param node_r: the radius of each node.
:type node_r: float
:returns: Origin-to-node centre radius.
"""
A = 2 * np.pi / n_nodes # noqa
B = (np.pi - A) / 2 # noqa
a = 2 * node_r
return a * np.sin(B) / np.sin(A) | python | def circos_radius(n_nodes, node_r):
"""
Automatically computes the origin-to-node centre radius of the Circos plot
using the triangle equality sine rule.
a / sin(A) = b / sin(B) = c / sin(C)
:param n_nodes: the number of nodes in the plot.
:type n_nodes: int
:param node_r: the radius of each node.
:type node_r: float
:returns: Origin-to-node centre radius.
"""
A = 2 * np.pi / n_nodes # noqa
B = (np.pi - A) / 2 # noqa
a = 2 * node_r
return a * np.sin(B) / np.sin(A) | ['def', 'circos_radius', '(', 'n_nodes', ',', 'node_r', ')', ':', 'A', '=', '2', '*', 'np', '.', 'pi', '/', 'n_nodes', '# noqa', 'B', '=', '(', 'np', '.', 'pi', '-', 'A', ')', '/', '2', '# noqa', 'a', '=', '2', '*', 'node_r', 'return', 'a', '*', 'np', '.', 'sin', '(', 'B', ')', '/', 'np', '.', 'sin', '(', 'A', ')'] | Automatically computes the origin-to-node centre radius of the Circos plot
using the triangle equality sine rule.
a / sin(A) = b / sin(B) = c / sin(C)
:param n_nodes: the number of nodes in the plot.
:type n_nodes: int
:param node_r: the radius of each node.
:type node_r: float
:returns: Origin-to-node centre radius. | ['Automatically', 'computes', 'the', 'origin', '-', 'to', '-', 'node', 'centre', 'radius', 'of', 'the', 'Circos', 'plot', 'using', 'the', 'triangle', 'equality', 'sine', 'rule', '.'] | train | https://github.com/ericmjl/nxviz/blob/6ea5823a8030a686f165fbe37d7a04d0f037ecc9/nxviz/geometry.py#L101-L117 |
190 | aiogram/aiogram | aiogram/types/message.py | Message.send_contact | async def send_contact(self, phone_number: base.String,
first_name: base.String, last_name: typing.Union[base.String, None] = None,
disable_notification: typing.Union[base.Boolean, None] = None,
reply_markup=None,
reply=True) -> Message:
"""
Use this method to send phone contacts.
Source: https://core.telegram.org/bots/api#sendcontact
:param phone_number: Contact's phone number
:type phone_number: :obj:`base.String`
:param first_name: Contact's first name
:type first_name: :obj:`base.String`
:param last_name: Contact's last name
:type last_name: :obj:`typing.Union[base.String, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound.
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_markup: Additional interface options.
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:param reply: fill 'reply_to_message_id'
:return: On success, the sent Message is returned.
:rtype: :obj:`types.Message`
"""
warn_deprecated('"Message.send_contact" method will be removed in 2.2 version.\n'
'Use "Message.reply_contact" instead.',
stacklevel=8)
return await self.bot.send_contact(chat_id=self.chat.id,
phone_number=phone_number,
first_name=first_name, last_name=last_name,
disable_notification=disable_notification,
reply_to_message_id=self.message_id if reply else None,
reply_markup=reply_markup) | python | async def send_contact(self, phone_number: base.String,
first_name: base.String, last_name: typing.Union[base.String, None] = None,
disable_notification: typing.Union[base.Boolean, None] = None,
reply_markup=None,
reply=True) -> Message:
"""
Use this method to send phone contacts.
Source: https://core.telegram.org/bots/api#sendcontact
:param phone_number: Contact's phone number
:type phone_number: :obj:`base.String`
:param first_name: Contact's first name
:type first_name: :obj:`base.String`
:param last_name: Contact's last name
:type last_name: :obj:`typing.Union[base.String, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound.
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_markup: Additional interface options.
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:param reply: fill 'reply_to_message_id'
:return: On success, the sent Message is returned.
:rtype: :obj:`types.Message`
"""
warn_deprecated('"Message.send_contact" method will be removed in 2.2 version.\n'
'Use "Message.reply_contact" instead.',
stacklevel=8)
return await self.bot.send_contact(chat_id=self.chat.id,
phone_number=phone_number,
first_name=first_name, last_name=last_name,
disable_notification=disable_notification,
reply_to_message_id=self.message_id if reply else None,
reply_markup=reply_markup) | ['async', 'def', 'send_contact', '(', 'self', ',', 'phone_number', ':', 'base', '.', 'String', ',', 'first_name', ':', 'base', '.', 'String', ',', 'last_name', ':', 'typing', '.', 'Union', '[', 'base', '.', 'String', ',', 'None', ']', '=', 'None', ',', 'disable_notification', ':', 'typing', '.', 'Union', '[', 'base', '.', 'Boolean', ',', 'None', ']', '=', 'None', ',', 'reply_markup', '=', 'None', ',', 'reply', '=', 'True', ')', '->', 'Message', ':', 'warn_deprecated', '(', '\'"Message.send_contact" method will be removed in 2.2 version.\\n\'', '\'Use "Message.reply_contact" instead.\'', ',', 'stacklevel', '=', '8', ')', 'return', 'await', 'self', '.', 'bot', '.', 'send_contact', '(', 'chat_id', '=', 'self', '.', 'chat', '.', 'id', ',', 'phone_number', '=', 'phone_number', ',', 'first_name', '=', 'first_name', ',', 'last_name', '=', 'last_name', ',', 'disable_notification', '=', 'disable_notification', ',', 'reply_to_message_id', '=', 'self', '.', 'message_id', 'if', 'reply', 'else', 'None', ',', 'reply_markup', '=', 'reply_markup', ')'] | Use this method to send phone contacts.
Source: https://core.telegram.org/bots/api#sendcontact
:param phone_number: Contact's phone number
:type phone_number: :obj:`base.String`
:param first_name: Contact's first name
:type first_name: :obj:`base.String`
:param last_name: Contact's last name
:type last_name: :obj:`typing.Union[base.String, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound.
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_markup: Additional interface options.
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:param reply: fill 'reply_to_message_id'
:return: On success, the sent Message is returned.
:rtype: :obj:`types.Message` | ['Use', 'this', 'method', 'to', 'send', 'phone', 'contacts', '.'] | train | https://github.com/aiogram/aiogram/blob/2af930149ce2482547721e2c8755c10307295e48/aiogram/types/message.py#L1230-L1264 |
191 | gem/oq-engine | openquake/commonlib/source.py | CompositionInfo.rlzs | def rlzs(self):
"""
:returns: an array of realizations
"""
tups = [(r.ordinal, r.uid, r.weight['weight'])
for r in self.get_rlzs_assoc().realizations]
return numpy.array(tups, rlz_dt) | python | def rlzs(self):
"""
:returns: an array of realizations
"""
tups = [(r.ordinal, r.uid, r.weight['weight'])
for r in self.get_rlzs_assoc().realizations]
return numpy.array(tups, rlz_dt) | ['def', 'rlzs', '(', 'self', ')', ':', 'tups', '=', '[', '(', 'r', '.', 'ordinal', ',', 'r', '.', 'uid', ',', 'r', '.', 'weight', '[', "'weight'", ']', ')', 'for', 'r', 'in', 'self', '.', 'get_rlzs_assoc', '(', ')', '.', 'realizations', ']', 'return', 'numpy', '.', 'array', '(', 'tups', ',', 'rlz_dt', ')'] | :returns: an array of realizations | [':', 'returns', ':', 'an', 'array', 'of', 'realizations'] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/source.py#L257-L263 |
192 | crocs-muni/roca | roca/detect.py | RocaFingerprinter.init_parser | def init_parser(self):
"""
Init command line parser
:return:
"""
parser = argparse.ArgumentParser(description='ROCA Fingerprinter')
parser.add_argument('--tmp', dest='tmp_dir', default='.',
help='Temporary dir for subprocessing (e.g. APK parsing scratch)')
parser.add_argument('--debug', dest='debug', default=False, action='store_const', const=True,
help='Debugging logging')
parser.add_argument('--dump', dest='dump', default=False, action='store_const', const=True,
help='Dump all processed info')
parser.add_argument('--flatten', dest='flatten', default=False, action='store_const', const=True,
help='Flatten the dump')
parser.add_argument('--indent', dest='indent', default=False, action='store_const', const=True,
help='Indent the dump')
parser.add_argument('--old', dest='old', default=False, action='store_const', const=True,
help='Old fingerprinting algorithm - moduli detector')
parser.add_argument('--base64-stdin', dest='base64stdin', default=False, action='store_const', const=True,
help='Decode STDIN as base64')
parser.add_argument('--file-pem', dest='file_pem', default=False, action='store_const', const=True,
help='Force read as PEM encoded file')
parser.add_argument('--file-der', dest='file_der', default=False, action='store_const', const=True,
help='Force read as DER encoded file')
parser.add_argument('--file-pgp', dest='file_pgp', default=False, action='store_const', const=True,
help='Force read as PGP ASC encoded file')
parser.add_argument('--file-ssh', dest='file_ssh', default=False, action='store_const', const=True,
help='Force read as SSH public key file')
parser.add_argument('--file-mod', dest='file_mod', default=False, action='store_const', const=True,
help='Force read as One modulus per line')
parser.add_argument('--file-json', dest='file_json', default=False, action='store_const', const=True,
help='Force read as JSON file')
parser.add_argument('--file-ldiff', dest='file_ldiff', default=False, action='store_const', const=True,
help='Force read as LDIFF file')
parser.add_argument('--file-pkcs7', dest='file_pkcs7', default=False, action='store_const', const=True,
help='Force read as PKCS7 file')
parser.add_argument('--key-fmt-base64', dest='key_fmt_base64', default=False, action='store_const', const=True,
help='Modulus per line, base64 encoded')
parser.add_argument('--key-fmt-hex', dest='key_fmt_hex', default=False, action='store_const', const=True,
help='Modulus per line, hex encoded')
parser.add_argument('--key-fmt-dec', dest='key_fmt_dec', default=False, action='store_const', const=True,
help='Modulus per line, dec encoded')
parser.add_argument('--jks-pass-file', dest='jks_pass_file', default=None,
help='Password file for JKS, one per line')
parser.add_argument('files', nargs=argparse.ZERO_OR_MORE, default=[],
help='files to process')
return parser | python | def init_parser(self):
"""
Init command line parser
:return:
"""
parser = argparse.ArgumentParser(description='ROCA Fingerprinter')
parser.add_argument('--tmp', dest='tmp_dir', default='.',
help='Temporary dir for subprocessing (e.g. APK parsing scratch)')
parser.add_argument('--debug', dest='debug', default=False, action='store_const', const=True,
help='Debugging logging')
parser.add_argument('--dump', dest='dump', default=False, action='store_const', const=True,
help='Dump all processed info')
parser.add_argument('--flatten', dest='flatten', default=False, action='store_const', const=True,
help='Flatten the dump')
parser.add_argument('--indent', dest='indent', default=False, action='store_const', const=True,
help='Indent the dump')
parser.add_argument('--old', dest='old', default=False, action='store_const', const=True,
help='Old fingerprinting algorithm - moduli detector')
parser.add_argument('--base64-stdin', dest='base64stdin', default=False, action='store_const', const=True,
help='Decode STDIN as base64')
parser.add_argument('--file-pem', dest='file_pem', default=False, action='store_const', const=True,
help='Force read as PEM encoded file')
parser.add_argument('--file-der', dest='file_der', default=False, action='store_const', const=True,
help='Force read as DER encoded file')
parser.add_argument('--file-pgp', dest='file_pgp', default=False, action='store_const', const=True,
help='Force read as PGP ASC encoded file')
parser.add_argument('--file-ssh', dest='file_ssh', default=False, action='store_const', const=True,
help='Force read as SSH public key file')
parser.add_argument('--file-mod', dest='file_mod', default=False, action='store_const', const=True,
help='Force read as One modulus per line')
parser.add_argument('--file-json', dest='file_json', default=False, action='store_const', const=True,
help='Force read as JSON file')
parser.add_argument('--file-ldiff', dest='file_ldiff', default=False, action='store_const', const=True,
help='Force read as LDIFF file')
parser.add_argument('--file-pkcs7', dest='file_pkcs7', default=False, action='store_const', const=True,
help='Force read as PKCS7 file')
parser.add_argument('--key-fmt-base64', dest='key_fmt_base64', default=False, action='store_const', const=True,
help='Modulus per line, base64 encoded')
parser.add_argument('--key-fmt-hex', dest='key_fmt_hex', default=False, action='store_const', const=True,
help='Modulus per line, hex encoded')
parser.add_argument('--key-fmt-dec', dest='key_fmt_dec', default=False, action='store_const', const=True,
help='Modulus per line, dec encoded')
parser.add_argument('--jks-pass-file', dest='jks_pass_file', default=None,
help='Password file for JKS, one per line')
parser.add_argument('files', nargs=argparse.ZERO_OR_MORE, default=[],
help='files to process')
return parser | ['def', 'init_parser', '(', 'self', ')', ':', 'parser', '=', 'argparse', '.', 'ArgumentParser', '(', 'description', '=', "'ROCA Fingerprinter'", ')', 'parser', '.', 'add_argument', '(', "'--tmp'", ',', 'dest', '=', "'tmp_dir'", ',', 'default', '=', "'.'", ',', 'help', '=', "'Temporary dir for subprocessing (e.g. APK parsing scratch)'", ')', 'parser', '.', 'add_argument', '(', "'--debug'", ',', 'dest', '=', "'debug'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Debugging logging'", ')', 'parser', '.', 'add_argument', '(', "'--dump'", ',', 'dest', '=', "'dump'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Dump all processed info'", ')', 'parser', '.', 'add_argument', '(', "'--flatten'", ',', 'dest', '=', "'flatten'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Flatten the dump'", ')', 'parser', '.', 'add_argument', '(', "'--indent'", ',', 'dest', '=', "'indent'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Indent the dump'", ')', 'parser', '.', 'add_argument', '(', "'--old'", ',', 'dest', '=', "'old'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Old fingerprinting algorithm - moduli detector'", ')', 'parser', '.', 'add_argument', '(', "'--base64-stdin'", ',', 'dest', '=', "'base64stdin'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Decode STDIN as base64'", ')', 'parser', '.', 'add_argument', '(', "'--file-pem'", ',', 'dest', '=', "'file_pem'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as PEM encoded file'", ')', 'parser', '.', 'add_argument', '(', "'--file-der'", ',', 'dest', '=', "'file_der'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as DER encoded file'", ')', 'parser', '.', 'add_argument', '(', "'--file-pgp'", ',', 'dest', '=', "'file_pgp'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as PGP ASC encoded file'", ')', 'parser', '.', 'add_argument', '(', "'--file-ssh'", ',', 'dest', '=', "'file_ssh'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as SSH public key file'", ')', 'parser', '.', 'add_argument', '(', "'--file-mod'", ',', 'dest', '=', "'file_mod'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as One modulus per line'", ')', 'parser', '.', 'add_argument', '(', "'--file-json'", ',', 'dest', '=', "'file_json'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as JSON file'", ')', 'parser', '.', 'add_argument', '(', "'--file-ldiff'", ',', 'dest', '=', "'file_ldiff'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as LDIFF file'", ')', 'parser', '.', 'add_argument', '(', "'--file-pkcs7'", ',', 'dest', '=', "'file_pkcs7'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Force read as PKCS7 file'", ')', 'parser', '.', 'add_argument', '(', "'--key-fmt-base64'", ',', 'dest', '=', "'key_fmt_base64'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Modulus per line, base64 encoded'", ')', 'parser', '.', 'add_argument', '(', "'--key-fmt-hex'", ',', 'dest', '=', "'key_fmt_hex'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Modulus per line, hex encoded'", ')', 'parser', '.', 'add_argument', '(', "'--key-fmt-dec'", ',', 'dest', '=', "'key_fmt_dec'", ',', 'default', '=', 'False', ',', 'action', '=', "'store_const'", ',', 'const', '=', 'True', ',', 'help', '=', "'Modulus per line, dec encoded'", ')', 'parser', '.', 'add_argument', '(', "'--jks-pass-file'", ',', 'dest', '=', "'jks_pass_file'", ',', 'default', '=', 'None', ',', 'help', '=', "'Password file for JKS, one per line'", ')', 'parser', '.', 'add_argument', '(', "'files'", ',', 'nargs', '=', 'argparse', '.', 'ZERO_OR_MORE', ',', 'default', '=', '[', ']', ',', 'help', '=', "'files to process'", ')', 'return', 'parser'] | Init command line parser
:return: | ['Init', 'command', 'line', 'parser', ':', 'return', ':'] | train | https://github.com/crocs-muni/roca/blob/74ad6ce63c428d83dcffce9c5e26ef7b9e30faa5/roca/detect.py#L2169-L2235 |
193 | saltstack/salt | salt/utils/hashutils.py | sha1_digest | def sha1_digest(instr):
'''
Generate an sha1 hash of a given string.
'''
if six.PY3:
b = salt.utils.stringutils.to_bytes(instr)
return hashlib.sha1(b).hexdigest()
return hashlib.sha1(instr).hexdigest() | python | def sha1_digest(instr):
'''
Generate an sha1 hash of a given string.
'''
if six.PY3:
b = salt.utils.stringutils.to_bytes(instr)
return hashlib.sha1(b).hexdigest()
return hashlib.sha1(instr).hexdigest() | ['def', 'sha1_digest', '(', 'instr', ')', ':', 'if', 'six', '.', 'PY3', ':', 'b', '=', 'salt', '.', 'utils', '.', 'stringutils', '.', 'to_bytes', '(', 'instr', ')', 'return', 'hashlib', '.', 'sha1', '(', 'b', ')', '.', 'hexdigest', '(', ')', 'return', 'hashlib', '.', 'sha1', '(', 'instr', ')', '.', 'hexdigest', '(', ')'] | Generate an sha1 hash of a given string. | ['Generate', 'an', 'sha1', 'hash', 'of', 'a', 'given', 'string', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/utils/hashutils.py#L96-L103 |
194 | wonambi-python/wonambi | wonambi/widgets/utils.py | FormList.get_value | def get_value(self, default=None):
"""Get int from widget.
Parameters
----------
default : list
list with widgets
Returns
-------
list
list that might contain int or str or float etc
"""
if default is None:
default = []
try:
text = literal_eval(self.text())
if not isinstance(text, list):
pass
# raise ValueError
except ValueError:
lg.debug('Cannot convert "' + str(text) + '" to list. ' +
'Using default ' + str(default))
text = default
self.set_value(text)
return text | python | def get_value(self, default=None):
"""Get int from widget.
Parameters
----------
default : list
list with widgets
Returns
-------
list
list that might contain int or str or float etc
"""
if default is None:
default = []
try:
text = literal_eval(self.text())
if not isinstance(text, list):
pass
# raise ValueError
except ValueError:
lg.debug('Cannot convert "' + str(text) + '" to list. ' +
'Using default ' + str(default))
text = default
self.set_value(text)
return text | ['def', 'get_value', '(', 'self', ',', 'default', '=', 'None', ')', ':', 'if', 'default', 'is', 'None', ':', 'default', '=', '[', ']', 'try', ':', 'text', '=', 'literal_eval', '(', 'self', '.', 'text', '(', ')', ')', 'if', 'not', 'isinstance', '(', 'text', ',', 'list', ')', ':', 'pass', '# raise ValueError', 'except', 'ValueError', ':', 'lg', '.', 'debug', '(', '\'Cannot convert "\'', '+', 'str', '(', 'text', ')', '+', '\'" to list. \'', '+', "'Using default '", '+', 'str', '(', 'default', ')', ')', 'text', '=', 'default', 'self', '.', 'set_value', '(', 'text', ')', 'return', 'text'] | Get int from widget.
Parameters
----------
default : list
list with widgets
Returns
-------
list
list that might contain int or str or float etc | ['Get', 'int', 'from', 'widget', '.'] | train | https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/widgets/utils.py#L390-L419 |
195 | jkehler/redisqueue | redisqueue/__init__.py | RedisQueue.qsize | def qsize(self):
"""
Returns the number of items currently in the queue
:return: Integer containing size of the queue
:exception: ConnectionError if queue is not connected
"""
if not self.connected:
raise QueueNotConnectedError("Queue is not Connected")
try:
size = self.__db.llen(self._key)
except redis.ConnectionError as e:
raise redis.ConnectionError(repr(e))
return size | python | def qsize(self):
"""
Returns the number of items currently in the queue
:return: Integer containing size of the queue
:exception: ConnectionError if queue is not connected
"""
if not self.connected:
raise QueueNotConnectedError("Queue is not Connected")
try:
size = self.__db.llen(self._key)
except redis.ConnectionError as e:
raise redis.ConnectionError(repr(e))
return size | ['def', 'qsize', '(', 'self', ')', ':', 'if', 'not', 'self', '.', 'connected', ':', 'raise', 'QueueNotConnectedError', '(', '"Queue is not Connected"', ')', 'try', ':', 'size', '=', 'self', '.', '__db', '.', 'llen', '(', 'self', '.', '_key', ')', 'except', 'redis', '.', 'ConnectionError', 'as', 'e', ':', 'raise', 'redis', '.', 'ConnectionError', '(', 'repr', '(', 'e', ')', ')', 'return', 'size'] | Returns the number of items currently in the queue
:return: Integer containing size of the queue
:exception: ConnectionError if queue is not connected | ['Returns', 'the', 'number', 'of', 'items', 'currently', 'in', 'the', 'queue'] | train | https://github.com/jkehler/redisqueue/blob/feac4dfc30837e0ab1a55a8479443ea74b2793f2/redisqueue/__init__.py#L89-L103 |
196 | pyGrowler/Growler | growler/core/router.py | routerify | def routerify(obj):
"""
Scan through attributes of object parameter looking for any which
match a route signature.
A router will be created and added to the object with parameter.
Args:
obj (object): The object (with attributes) from which to
setup a router
Returns:
Router: The router created from attributes in the object.
"""
router = Router()
for info in get_routing_attributes(obj):
router.add_route(*info)
obj.__growler_router = router
return router | python | def routerify(obj):
"""
Scan through attributes of object parameter looking for any which
match a route signature.
A router will be created and added to the object with parameter.
Args:
obj (object): The object (with attributes) from which to
setup a router
Returns:
Router: The router created from attributes in the object.
"""
router = Router()
for info in get_routing_attributes(obj):
router.add_route(*info)
obj.__growler_router = router
return router | ['def', 'routerify', '(', 'obj', ')', ':', 'router', '=', 'Router', '(', ')', 'for', 'info', 'in', 'get_routing_attributes', '(', 'obj', ')', ':', 'router', '.', 'add_route', '(', '*', 'info', ')', 'obj', '.', '__growler_router', '=', 'router', 'return', 'router'] | Scan through attributes of object parameter looking for any which
match a route signature.
A router will be created and added to the object with parameter.
Args:
obj (object): The object (with attributes) from which to
setup a router
Returns:
Router: The router created from attributes in the object. | ['Scan', 'through', 'attributes', 'of', 'object', 'parameter', 'looking', 'for', 'any', 'which', 'match', 'a', 'route', 'signature', '.', 'A', 'router', 'will', 'be', 'created', 'and', 'added', 'to', 'the', 'object', 'with', 'parameter', '.'] | train | https://github.com/pyGrowler/Growler/blob/90c923ff204f28b86a01d741224987a22f69540f/growler/core/router.py#L281-L298 |
197 | nerdvegas/rez | src/rezgui/dialogs/WriteGraphDialog.py | view_graph | def view_graph(graph_str, parent=None, prune_to=None):
"""View a graph."""
from rezgui.dialogs.ImageViewerDialog import ImageViewerDialog
from rez.config import config
# check for already written tempfile
h = hash((graph_str, prune_to))
filepath = graph_file_lookup.get(h)
if filepath and not os.path.exists(filepath):
filepath = None
# write graph to tempfile
if filepath is None:
suffix = ".%s" % config.dot_image_format
fd, filepath = tempfile.mkstemp(suffix=suffix, prefix="rez-graph-")
os.close(fd)
dlg = WriteGraphDialog(graph_str, filepath, parent, prune_to=prune_to)
if not dlg.write_graph():
return
# display graph
graph_file_lookup[h] = filepath
dlg = ImageViewerDialog(filepath, parent)
dlg.exec_() | python | def view_graph(graph_str, parent=None, prune_to=None):
"""View a graph."""
from rezgui.dialogs.ImageViewerDialog import ImageViewerDialog
from rez.config import config
# check for already written tempfile
h = hash((graph_str, prune_to))
filepath = graph_file_lookup.get(h)
if filepath and not os.path.exists(filepath):
filepath = None
# write graph to tempfile
if filepath is None:
suffix = ".%s" % config.dot_image_format
fd, filepath = tempfile.mkstemp(suffix=suffix, prefix="rez-graph-")
os.close(fd)
dlg = WriteGraphDialog(graph_str, filepath, parent, prune_to=prune_to)
if not dlg.write_graph():
return
# display graph
graph_file_lookup[h] = filepath
dlg = ImageViewerDialog(filepath, parent)
dlg.exec_() | ['def', 'view_graph', '(', 'graph_str', ',', 'parent', '=', 'None', ',', 'prune_to', '=', 'None', ')', ':', 'from', 'rezgui', '.', 'dialogs', '.', 'ImageViewerDialog', 'import', 'ImageViewerDialog', 'from', 'rez', '.', 'config', 'import', 'config', '# check for already written tempfile', 'h', '=', 'hash', '(', '(', 'graph_str', ',', 'prune_to', ')', ')', 'filepath', '=', 'graph_file_lookup', '.', 'get', '(', 'h', ')', 'if', 'filepath', 'and', 'not', 'os', '.', 'path', '.', 'exists', '(', 'filepath', ')', ':', 'filepath', '=', 'None', '# write graph to tempfile', 'if', 'filepath', 'is', 'None', ':', 'suffix', '=', '".%s"', '%', 'config', '.', 'dot_image_format', 'fd', ',', 'filepath', '=', 'tempfile', '.', 'mkstemp', '(', 'suffix', '=', 'suffix', ',', 'prefix', '=', '"rez-graph-"', ')', 'os', '.', 'close', '(', 'fd', ')', 'dlg', '=', 'WriteGraphDialog', '(', 'graph_str', ',', 'filepath', ',', 'parent', ',', 'prune_to', '=', 'prune_to', ')', 'if', 'not', 'dlg', '.', 'write_graph', '(', ')', ':', 'return', '# display graph', 'graph_file_lookup', '[', 'h', ']', '=', 'filepath', 'dlg', '=', 'ImageViewerDialog', '(', 'filepath', ',', 'parent', ')', 'dlg', '.', 'exec_', '(', ')'] | View a graph. | ['View', 'a', 'graph', '.'] | train | https://github.com/nerdvegas/rez/blob/1d3b846d53b5b5404edfe8ddb9083f9ceec8c5e7/src/rezgui/dialogs/WriteGraphDialog.py#L109-L133 |
198 | materialsproject/pymatgen | pymatgen/apps/battery/battery_abc.py | AbstractElectrode.get_capacity_vol | def get_capacity_vol(self, min_voltage=None, max_voltage=None,
use_overall_normalization=True):
"""
Get the volumetric capacity of the electrode.
Args:
min_voltage (float): The minimum allowable voltage for a given
step.
max_voltage (float): The maximum allowable voltage allowable for a
given step.
use_overall_normalization (booL): If False, normalize by the
discharged state of only the voltage pairs matching the voltage
criteria. if True, use default normalization of the full
electrode path.
Returns:
Volumetric capacity in mAh/cc across the insertion path (a subset
of the path can be chosen by the optional arguments)
"""
pairs_in_range = self._select_in_voltage_range(min_voltage,
max_voltage)
normalization_vol = self.normalization_volume \
if use_overall_normalization or len(pairs_in_range) == 0 \
else pairs_in_range[-1].vol_discharge
return sum([pair.mAh for pair in pairs_in_range]) / normalization_vol \
* 1e24 / N_A | python | def get_capacity_vol(self, min_voltage=None, max_voltage=None,
use_overall_normalization=True):
"""
Get the volumetric capacity of the electrode.
Args:
min_voltage (float): The minimum allowable voltage for a given
step.
max_voltage (float): The maximum allowable voltage allowable for a
given step.
use_overall_normalization (booL): If False, normalize by the
discharged state of only the voltage pairs matching the voltage
criteria. if True, use default normalization of the full
electrode path.
Returns:
Volumetric capacity in mAh/cc across the insertion path (a subset
of the path can be chosen by the optional arguments)
"""
pairs_in_range = self._select_in_voltage_range(min_voltage,
max_voltage)
normalization_vol = self.normalization_volume \
if use_overall_normalization or len(pairs_in_range) == 0 \
else pairs_in_range[-1].vol_discharge
return sum([pair.mAh for pair in pairs_in_range]) / normalization_vol \
* 1e24 / N_A | ['def', 'get_capacity_vol', '(', 'self', ',', 'min_voltage', '=', 'None', ',', 'max_voltage', '=', 'None', ',', 'use_overall_normalization', '=', 'True', ')', ':', 'pairs_in_range', '=', 'self', '.', '_select_in_voltage_range', '(', 'min_voltage', ',', 'max_voltage', ')', 'normalization_vol', '=', 'self', '.', 'normalization_volume', 'if', 'use_overall_normalization', 'or', 'len', '(', 'pairs_in_range', ')', '==', '0', 'else', 'pairs_in_range', '[', '-', '1', ']', '.', 'vol_discharge', 'return', 'sum', '(', '[', 'pair', '.', 'mAh', 'for', 'pair', 'in', 'pairs_in_range', ']', ')', '/', 'normalization_vol', '*', '1e24', '/', 'N_A'] | Get the volumetric capacity of the electrode.
Args:
min_voltage (float): The minimum allowable voltage for a given
step.
max_voltage (float): The maximum allowable voltage allowable for a
given step.
use_overall_normalization (booL): If False, normalize by the
discharged state of only the voltage pairs matching the voltage
criteria. if True, use default normalization of the full
electrode path.
Returns:
Volumetric capacity in mAh/cc across the insertion path (a subset
of the path can be chosen by the optional arguments) | ['Get', 'the', 'volumetric', 'capacity', 'of', 'the', 'electrode', '.'] | train | https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/apps/battery/battery_abc.py#L257-L282 |
199 | rigetti/quantumflow | quantumflow/ops.py | Channel.partial_trace | def partial_trace(self, qubits: Qubits) -> 'Channel':
"""Return the partial trace over the specified qubits"""
vec = self.vec.partial_trace(qubits)
return Channel(vec.tensor, vec.qubits) | python | def partial_trace(self, qubits: Qubits) -> 'Channel':
"""Return the partial trace over the specified qubits"""
vec = self.vec.partial_trace(qubits)
return Channel(vec.tensor, vec.qubits) | ['def', 'partial_trace', '(', 'self', ',', 'qubits', ':', 'Qubits', ')', '->', "'Channel'", ':', 'vec', '=', 'self', '.', 'vec', '.', 'partial_trace', '(', 'qubits', ')', 'return', 'Channel', '(', 'vec', '.', 'tensor', ',', 'vec', '.', 'qubits', ')'] | Return the partial trace over the specified qubits | ['Return', 'the', 'partial', 'trace', 'over', 'the', 'specified', 'qubits'] | train | https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/ops.py#L407-L410 |