text
stringlengths 75
104k
| code_tokens
sequence | avg_line_len
float64 7.91
980
| score
float64 0
0.18
| texts
sequence | scores
sequence | num_lines
int64 3
2.77k
| avg_score
float64 0
0.37
|
---|---|---|---|---|---|---|---|
def update_opdocs(self, checksum, opdocs, revision=None):
"""
Modifies the internal state based a change to the content and returns
the sets of words added and removed.
:Parameters:
checksum : `hashable`
A checksum generated from the text of a revision
opdocs : `iterable` ( `dict` )
A sequence of operations that represent the diff of this new
revision
revision : `mixed`
Revision metadata
:Returns:
A triple of lists:
current_tokens : `list` ( :class:`~mwpersistence.Token` )
A sequence of Tokens representing the revision that was just
processed.
tokens_added : `list` ( :class:`~mwpersistence.Token` )
Tokens that were added while updating state.
tokens_removed : `list` ( :class:`~mwpersistence.Token` )
Tokens that were removed while updating state.
"""
return self._update(checksum=checksum, opdocs=opdocs,
revision=revision) | [
"def",
"update_opdocs",
"(",
"self",
",",
"checksum",
",",
"opdocs",
",",
"revision",
"=",
"None",
")",
":",
"return",
"self",
".",
"_update",
"(",
"checksum",
"=",
"checksum",
",",
"opdocs",
"=",
"opdocs",
",",
"revision",
"=",
"revision",
")"
] | 40.925926 | 0.001768 | [
"def update_opdocs(self, checksum, opdocs, revision=None):\n",
" \"\"\"\n",
" Modifies the internal state based a change to the content and returns\n",
" the sets of words added and removed.\n",
"\n",
" :Parameters:\n",
" checksum : `hashable`\n",
" A checksum generated from the text of a revision\n",
" opdocs : `iterable` ( `dict` )\n",
" A sequence of operations that represent the diff of this new\n",
" revision\n",
" revision : `mixed`\n",
" Revision metadata\n",
"\n",
" :Returns:\n",
" A triple of lists:\n",
"\n",
" current_tokens : `list` ( :class:`~mwpersistence.Token` )\n",
" A sequence of Tokens representing the revision that was just\n",
" processed.\n",
" tokens_added : `list` ( :class:`~mwpersistence.Token` )\n",
" Tokens that were added while updating state.\n",
" tokens_removed : `list` ( :class:`~mwpersistence.Token` )\n",
" Tokens that were removed while updating state.\n",
" \"\"\"\n",
" return self._update(checksum=checksum, opdocs=opdocs,\n",
" revision=revision)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.021739130434782608
] | 27 | 0.003892 |
def get_default_mapping(self, z, cmapper):
"""Create dictionary containing default ColumnDataSource glyph to data
mappings.
"""
map_annular = dict(x=self.max_radius, y=self.max_radius,
inner_radius="inner_radius",
outer_radius="outer_radius",
start_angle="start_angle",
end_angle="end_angle",
fill_color={'field': z, 'transform': cmapper})
map_seg_label = dict(x="x", y="y", text="text",
angle="angle", text_align="center")
map_ann_label = dict(x="x", y="y", text="text",
angle="angle", text_align="center",
text_baseline="bottom")
map_xmarks = dict(xs="xs", ys="ys")
map_ymarks = dict(x= self.max_radius, y=self.max_radius,
start_angle=0, end_angle=2*np.pi, radius="radius")
return {'annular_wedge_1': map_annular,
'text_1': map_seg_label,
'text_2': map_ann_label,
'multi_line_1': map_xmarks,
'arc_1': map_ymarks} | [
"def",
"get_default_mapping",
"(",
"self",
",",
"z",
",",
"cmapper",
")",
":",
"map_annular",
"=",
"dict",
"(",
"x",
"=",
"self",
".",
"max_radius",
",",
"y",
"=",
"self",
".",
"max_radius",
",",
"inner_radius",
"=",
"\"inner_radius\"",
",",
"outer_radius",
"=",
"\"outer_radius\"",
",",
"start_angle",
"=",
"\"start_angle\"",
",",
"end_angle",
"=",
"\"end_angle\"",
",",
"fill_color",
"=",
"{",
"'field'",
":",
"z",
",",
"'transform'",
":",
"cmapper",
"}",
")",
"map_seg_label",
"=",
"dict",
"(",
"x",
"=",
"\"x\"",
",",
"y",
"=",
"\"y\"",
",",
"text",
"=",
"\"text\"",
",",
"angle",
"=",
"\"angle\"",
",",
"text_align",
"=",
"\"center\"",
")",
"map_ann_label",
"=",
"dict",
"(",
"x",
"=",
"\"x\"",
",",
"y",
"=",
"\"y\"",
",",
"text",
"=",
"\"text\"",
",",
"angle",
"=",
"\"angle\"",
",",
"text_align",
"=",
"\"center\"",
",",
"text_baseline",
"=",
"\"bottom\"",
")",
"map_xmarks",
"=",
"dict",
"(",
"xs",
"=",
"\"xs\"",
",",
"ys",
"=",
"\"ys\"",
")",
"map_ymarks",
"=",
"dict",
"(",
"x",
"=",
"self",
".",
"max_radius",
",",
"y",
"=",
"self",
".",
"max_radius",
",",
"start_angle",
"=",
"0",
",",
"end_angle",
"=",
"2",
"*",
"np",
".",
"pi",
",",
"radius",
"=",
"\"radius\"",
")",
"return",
"{",
"'annular_wedge_1'",
":",
"map_annular",
",",
"'text_1'",
":",
"map_seg_label",
",",
"'text_2'",
":",
"map_ann_label",
",",
"'multi_line_1'",
":",
"map_xmarks",
",",
"'arc_1'",
":",
"map_ymarks",
"}"
] | 39.2 | 0.00249 | [
"def get_default_mapping(self, z, cmapper):\n",
" \"\"\"Create dictionary containing default ColumnDataSource glyph to data\n",
" mappings.\n",
"\n",
" \"\"\"\n",
"\n",
" map_annular = dict(x=self.max_radius, y=self.max_radius,\n",
" inner_radius=\"inner_radius\",\n",
" outer_radius=\"outer_radius\",\n",
" start_angle=\"start_angle\",\n",
" end_angle=\"end_angle\",\n",
" fill_color={'field': z, 'transform': cmapper})\n",
"\n",
" map_seg_label = dict(x=\"x\", y=\"y\", text=\"text\",\n",
" angle=\"angle\", text_align=\"center\")\n",
"\n",
" map_ann_label = dict(x=\"x\", y=\"y\", text=\"text\",\n",
" angle=\"angle\", text_align=\"center\",\n",
" text_baseline=\"bottom\")\n",
"\n",
" map_xmarks = dict(xs=\"xs\", ys=\"ys\")\n",
"\n",
" map_ymarks = dict(x= self.max_radius, y=self.max_radius,\n",
" start_angle=0, end_angle=2*np.pi, radius=\"radius\")\n",
"\n",
" return {'annular_wedge_1': map_annular,\n",
" 'text_1': map_seg_label,\n",
" 'text_2': map_ann_label,\n",
" 'multi_line_1': map_xmarks,\n",
" 'arc_1': map_ymarks}"
] | [
0,
0.012658227848101266,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.015384615384615385,
0,
0,
0,
0,
0,
0,
0.027777777777777776
] | 30 | 0.001861 |
def _route(self, attr, args, kwargs, **fkwargs):
"""
Perform routing and return db_nums
"""
return self.cluster.hosts.keys() | [
"def",
"_route",
"(",
"self",
",",
"attr",
",",
"args",
",",
"kwargs",
",",
"*",
"*",
"fkwargs",
")",
":",
"return",
"self",
".",
"cluster",
".",
"hosts",
".",
"keys",
"(",
")"
] | 30.4 | 0.012821 | [
"def _route(self, attr, args, kwargs, **fkwargs):\n",
" \"\"\"\n",
" Perform routing and return db_nums\n",
" \"\"\"\n",
" return self.cluster.hosts.keys()"
] | [
0,
0.08333333333333333,
0,
0,
0.025
] | 5 | 0.021667 |
def _make_policies(self):
"""
Convert the 'scalingPolicies' dictionary into AutoScalePolicy objects.
"""
self.policies = [AutoScalePolicy(self.manager, dct, self)
for dct in self.scalingPolicies] | [
"def",
"_make_policies",
"(",
"self",
")",
":",
"self",
".",
"policies",
"=",
"[",
"AutoScalePolicy",
"(",
"self",
".",
"manager",
",",
"dct",
",",
"self",
")",
"for",
"dct",
"in",
"self",
".",
"scalingPolicies",
"]"
] | 39.666667 | 0.012346 | [
"def _make_policies(self):\n",
" \"\"\"\n",
" Convert the 'scalingPolicies' dictionary into AutoScalePolicy objects.\n",
" \"\"\"\n",
" self.policies = [AutoScalePolicy(self.manager, dct, self)\n",
" for dct in self.scalingPolicies]"
] | [
0,
0.08333333333333333,
0,
0,
0,
0.041666666666666664
] | 6 | 0.020833 |
def _roots_to_targets(self, build_graph, target_roots):
"""Populate the BuildGraph and target list from a set of input TargetRoots."""
with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]):
return [
build_graph.get_target(address)
for address
in build_graph.inject_roots_closure(target_roots, self._fail_fast)
] | [
"def",
"_roots_to_targets",
"(",
"self",
",",
"build_graph",
",",
"target_roots",
")",
":",
"with",
"self",
".",
"_run_tracker",
".",
"new_workunit",
"(",
"name",
"=",
"'parse'",
",",
"labels",
"=",
"[",
"WorkUnitLabel",
".",
"SETUP",
"]",
")",
":",
"return",
"[",
"build_graph",
".",
"get_target",
"(",
"address",
")",
"for",
"address",
"in",
"build_graph",
".",
"inject_roots_closure",
"(",
"target_roots",
",",
"self",
".",
"_fail_fast",
")",
"]"
] | 46.75 | 0.010499 | [
"def _roots_to_targets(self, build_graph, target_roots):\n",
" \"\"\"Populate the BuildGraph and target list from a set of input TargetRoots.\"\"\"\n",
" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]):\n",
" return [\n",
" build_graph.get_target(address)\n",
" for address\n",
" in build_graph.inject_roots_closure(target_roots, self._fail_fast)\n",
" ]"
] | [
0,
0.012048192771084338,
0.011764705882352941,
0.06666666666666667,
0,
0,
0,
0.14285714285714285
] | 8 | 0.029167 |
def getipmacarp(self):
"""
Function operates on the IMCDev object and updates the ipmacarp attribute
:return:
"""
self.ipmacarp = get_ip_mac_arp_list(self.auth, self.url, devid = self.devid) | [
"def",
"getipmacarp",
"(",
"self",
")",
":",
"self",
".",
"ipmacarp",
"=",
"get_ip_mac_arp_list",
"(",
"self",
".",
"auth",
",",
"self",
".",
"url",
",",
"devid",
"=",
"self",
".",
"devid",
")"
] | 37.5 | 0.026087 | [
"def getipmacarp(self):\n",
" \"\"\"\n",
" Function operates on the IMCDev object and updates the ipmacarp attribute\n",
" :return:\n",
" \"\"\"\n",
" self.ipmacarp = get_ip_mac_arp_list(self.auth, self.url, devid = self.devid)"
] | [
0,
0.08333333333333333,
0.012195121951219513,
0,
0,
0.047619047619047616
] | 6 | 0.023858 |
def check_class(self, id_, class_, lineno, scope=None, show_error=True):
""" Check the id is either undefined or defined with
the given class.
- If the identifier (e.g. variable) does not exists means
it's undeclared, and returns True (OK).
- If the identifier exists, but its class_ attribute is
unknown yet (None), returns also True. This means the
identifier has been referenced in advanced and it's undeclared.
Otherwise fails returning False.
"""
assert CLASS.is_valid(class_)
entry = self.get_entry(id_, scope)
if entry is None or entry.class_ == CLASS.unknown: # Undeclared yet
return True
if entry.class_ != class_:
if show_error:
if entry.class_ == CLASS.array:
a1 = 'n'
else:
a1 = ''
if class_ == CLASS.array:
a2 = 'n'
else:
a2 = ''
syntax_error(lineno, "identifier '%s' is a%s %s, not a%s %s" %
(id_, a1, entry.class_, a2, class_))
return False
return True | [
"def",
"check_class",
"(",
"self",
",",
"id_",
",",
"class_",
",",
"lineno",
",",
"scope",
"=",
"None",
",",
"show_error",
"=",
"True",
")",
":",
"assert",
"CLASS",
".",
"is_valid",
"(",
"class_",
")",
"entry",
"=",
"self",
".",
"get_entry",
"(",
"id_",
",",
"scope",
")",
"if",
"entry",
"is",
"None",
"or",
"entry",
".",
"class_",
"==",
"CLASS",
".",
"unknown",
":",
"# Undeclared yet",
"return",
"True",
"if",
"entry",
".",
"class_",
"!=",
"class_",
":",
"if",
"show_error",
":",
"if",
"entry",
".",
"class_",
"==",
"CLASS",
".",
"array",
":",
"a1",
"=",
"'n'",
"else",
":",
"a1",
"=",
"''",
"if",
"class_",
"==",
"CLASS",
".",
"array",
":",
"a2",
"=",
"'n'",
"else",
":",
"a2",
"=",
"''",
"syntax_error",
"(",
"lineno",
",",
"\"identifier '%s' is a%s %s, not a%s %s\"",
"%",
"(",
"id_",
",",
"a1",
",",
"entry",
".",
"class_",
",",
"a2",
",",
"class_",
")",
")",
"return",
"False",
"return",
"True"
] | 36.8125 | 0.001654 | [
"def check_class(self, id_, class_, lineno, scope=None, show_error=True):\n",
" \"\"\" Check the id is either undefined or defined with\n",
" the given class.\n",
"\n",
" - If the identifier (e.g. variable) does not exists means\n",
" it's undeclared, and returns True (OK).\n",
" - If the identifier exists, but its class_ attribute is\n",
" unknown yet (None), returns also True. This means the\n",
" identifier has been referenced in advanced and it's undeclared.\n",
"\n",
" Otherwise fails returning False.\n",
" \"\"\"\n",
" assert CLASS.is_valid(class_)\n",
" entry = self.get_entry(id_, scope)\n",
" if entry is None or entry.class_ == CLASS.unknown: # Undeclared yet\n",
" return True\n",
"\n",
" if entry.class_ != class_:\n",
" if show_error:\n",
" if entry.class_ == CLASS.array:\n",
" a1 = 'n'\n",
" else:\n",
" a1 = ''\n",
" if class_ == CLASS.array:\n",
" a2 = 'n'\n",
" else:\n",
" a2 = ''\n",
" syntax_error(lineno, \"identifier '%s' is a%s %s, not a%s %s\" %\n",
" (id_, a1, entry.class_, a2, class_))\n",
" return False\n",
"\n",
" return True"
] | [
0,
0.01639344262295082,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 32 | 0.002157 |
def can_run_c_extension(name=None):
"""
Determine whether the given Python C extension loads correctly.
If ``name`` is ``None``, tests all Python C extensions,
and return ``True`` if and only if all load correctly.
:param string name: the name of the Python C extension to test
:rtype: bool
"""
def can_run_cdtw():
""" Python C extension for computing DTW """
try:
import aeneas.cdtw.cdtw
return True
except ImportError:
return False
def can_run_cmfcc():
""" Python C extension for computing MFCC """
try:
import aeneas.cmfcc.cmfcc
return True
except ImportError:
return False
def can_run_cew():
""" Python C extension for synthesizing with eSpeak """
try:
import aeneas.cew.cew
return True
except ImportError:
return False
def can_run_cfw():
""" Python C extension for synthesizing with Festival """
try:
import aeneas.cfw.cfw
return True
except ImportError:
return False
if name == "cdtw":
return can_run_cdtw()
elif name == "cmfcc":
return can_run_cmfcc()
elif name == "cew":
return can_run_cew()
elif name == "cfw":
return can_run_cfw()
else:
# NOTE cfw is still experimental!
return can_run_cdtw() and can_run_cmfcc() and can_run_cew() | [
"def",
"can_run_c_extension",
"(",
"name",
"=",
"None",
")",
":",
"def",
"can_run_cdtw",
"(",
")",
":",
"\"\"\" Python C extension for computing DTW \"\"\"",
"try",
":",
"import",
"aeneas",
".",
"cdtw",
".",
"cdtw",
"return",
"True",
"except",
"ImportError",
":",
"return",
"False",
"def",
"can_run_cmfcc",
"(",
")",
":",
"\"\"\" Python C extension for computing MFCC \"\"\"",
"try",
":",
"import",
"aeneas",
".",
"cmfcc",
".",
"cmfcc",
"return",
"True",
"except",
"ImportError",
":",
"return",
"False",
"def",
"can_run_cew",
"(",
")",
":",
"\"\"\" Python C extension for synthesizing with eSpeak \"\"\"",
"try",
":",
"import",
"aeneas",
".",
"cew",
".",
"cew",
"return",
"True",
"except",
"ImportError",
":",
"return",
"False",
"def",
"can_run_cfw",
"(",
")",
":",
"\"\"\" Python C extension for synthesizing with Festival \"\"\"",
"try",
":",
"import",
"aeneas",
".",
"cfw",
".",
"cfw",
"return",
"True",
"except",
"ImportError",
":",
"return",
"False",
"if",
"name",
"==",
"\"cdtw\"",
":",
"return",
"can_run_cdtw",
"(",
")",
"elif",
"name",
"==",
"\"cmfcc\"",
":",
"return",
"can_run_cmfcc",
"(",
")",
"elif",
"name",
"==",
"\"cew\"",
":",
"return",
"can_run_cew",
"(",
")",
"elif",
"name",
"==",
"\"cfw\"",
":",
"return",
"can_run_cfw",
"(",
")",
"else",
":",
"# NOTE cfw is still experimental!",
"return",
"can_run_cdtw",
"(",
")",
"and",
"can_run_cmfcc",
"(",
")",
"and",
"can_run_cew",
"(",
")"
] | 27.283019 | 0.000668 | [
"def can_run_c_extension(name=None):\n",
" \"\"\"\n",
" Determine whether the given Python C extension loads correctly.\n",
"\n",
" If ``name`` is ``None``, tests all Python C extensions,\n",
" and return ``True`` if and only if all load correctly.\n",
"\n",
" :param string name: the name of the Python C extension to test\n",
" :rtype: bool\n",
" \"\"\"\n",
" def can_run_cdtw():\n",
" \"\"\" Python C extension for computing DTW \"\"\"\n",
" try:\n",
" import aeneas.cdtw.cdtw\n",
" return True\n",
" except ImportError:\n",
" return False\n",
"\n",
" def can_run_cmfcc():\n",
" \"\"\" Python C extension for computing MFCC \"\"\"\n",
" try:\n",
" import aeneas.cmfcc.cmfcc\n",
" return True\n",
" except ImportError:\n",
" return False\n",
"\n",
" def can_run_cew():\n",
" \"\"\" Python C extension for synthesizing with eSpeak \"\"\"\n",
" try:\n",
" import aeneas.cew.cew\n",
" return True\n",
" except ImportError:\n",
" return False\n",
"\n",
" def can_run_cfw():\n",
" \"\"\" Python C extension for synthesizing with Festival \"\"\"\n",
" try:\n",
" import aeneas.cfw.cfw\n",
" return True\n",
" except ImportError:\n",
" return False\n",
"\n",
" if name == \"cdtw\":\n",
" return can_run_cdtw()\n",
" elif name == \"cmfcc\":\n",
" return can_run_cmfcc()\n",
" elif name == \"cew\":\n",
" return can_run_cew()\n",
" elif name == \"cfw\":\n",
" return can_run_cfw()\n",
" else:\n",
" # NOTE cfw is still experimental!\n",
" return can_run_cdtw() and can_run_cmfcc() and can_run_cew()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.014925373134328358
] | 53 | 0.000282 |
def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False):
"""
Returns an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:param tom:
Temporal occurrance model
:param float mesh_spacing:
Mesh spacing
"""
if not self.mfd:
raise ValueError("Cannot write to hazardlib without MFD")
return SimpleFaultSource(
self.id,
self.name,
self.trt,
self.mfd,
mesh_spacing,
conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults),
conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),
tom,
self.upper_depth,
self.lower_depth,
self.fault_trace,
self.dip,
self.rake) | [
"def",
"create_oqhazardlib_source",
"(",
"self",
",",
"tom",
",",
"mesh_spacing",
",",
"use_defaults",
"=",
"False",
")",
":",
"if",
"not",
"self",
".",
"mfd",
":",
"raise",
"ValueError",
"(",
"\"Cannot write to hazardlib without MFD\"",
")",
"return",
"SimpleFaultSource",
"(",
"self",
".",
"id",
",",
"self",
".",
"name",
",",
"self",
".",
"trt",
",",
"self",
".",
"mfd",
",",
"mesh_spacing",
",",
"conv",
".",
"mag_scale_rel_to_hazardlib",
"(",
"self",
".",
"mag_scale_rel",
",",
"use_defaults",
")",
",",
"conv",
".",
"render_aspect_ratio",
"(",
"self",
".",
"rupt_aspect_ratio",
",",
"use_defaults",
")",
",",
"tom",
",",
"self",
".",
"upper_depth",
",",
"self",
".",
"lower_depth",
",",
"self",
".",
"fault_trace",
",",
"self",
".",
"dip",
",",
"self",
".",
"rake",
")"
] | 32.923077 | 0.00227 | [
"def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False):\n",
" \"\"\"\n",
" Returns an instance of the :class:\n",
" `openquake.hazardlib.source.simple_fault.SimpleFaultSource`\n",
"\n",
" :param tom:\n",
" Temporal occurrance model\n",
" :param float mesh_spacing:\n",
" Mesh spacing\n",
" \"\"\"\n",
" if not self.mfd:\n",
" raise ValueError(\"Cannot write to hazardlib without MFD\")\n",
" return SimpleFaultSource(\n",
" self.id,\n",
" self.name,\n",
" self.trt,\n",
" self.mfd,\n",
" mesh_spacing,\n",
" conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults),\n",
" conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),\n",
" tom,\n",
" self.upper_depth,\n",
" self.lower_depth,\n",
" self.fault_trace,\n",
" self.dip,\n",
" self.rake)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.045454545454545456
] | 26 | 0.004953 |
def add_to_hash(self, filename, hasher):
"""Contribute `filename`'s data to the Md5Hash `hasher`."""
hasher.update(self.executed_lines(filename))
hasher.update(self.executed_arcs(filename)) | [
"def",
"add_to_hash",
"(",
"self",
",",
"filename",
",",
"hasher",
")",
":",
"hasher",
".",
"update",
"(",
"self",
".",
"executed_lines",
"(",
"filename",
")",
")",
"hasher",
".",
"update",
"(",
"self",
".",
"executed_arcs",
"(",
"filename",
")",
")"
] | 52.5 | 0.00939 | [
"def add_to_hash(self, filename, hasher):\n",
" \"\"\"Contribute `filename`'s data to the Md5Hash `hasher`.\"\"\"\n",
" hasher.update(self.executed_lines(filename))\n",
" hasher.update(self.executed_arcs(filename))"
] | [
0,
0.014705882352941176,
0,
0.0196078431372549
] | 4 | 0.008578 |
def template(page=None, layout=None, **kwargs):
"""
Decorator to change the view template and layout.
It works on both View class and view methods
on class
only $layout is applied, everything else will be passed to the kwargs
Using as first argument, it will be the layout.
:first arg or $layout: The layout to use for that view
:param layout: The layout to use for that view
:param kwargs:
get pass to the TEMPLATE_CONTEXT
** on method that return a dict
page or layout are optional
:param page: The html page
:param layout: The layout to use for that view
:param kwargs:
get pass to the view as k/V
** on other methods that return other type, it doesn't apply
:return:
"""
pkey = "_template_extends__"
def decorator(f):
if inspect.isclass(f):
layout_ = layout or page
extends = kwargs.pop("extends", None)
if extends and hasattr(extends, pkey):
items = getattr(extends, pkey).items()
if "layout" in items:
layout_ = items.pop("layout")
for k, v in items:
kwargs.setdefault(k, v)
if not layout_:
layout_ = "layout.html"
kwargs.setdefault("brand_name", "")
kwargs["layout"] = layout_
setattr(f, pkey, kwargs)
setattr(f, "base_layout", kwargs.get("layout"))
f.g(TEMPLATE_CONTEXT=kwargs)
return f
else:
@functools.wraps(f)
def wrap(*args2, **kwargs2):
response = f(*args2, **kwargs2)
if isinstance(response, dict) or response is None:
response = response or {}
if page:
response.setdefault("template_", page)
if layout:
response.setdefault("layout_", layout)
for k, v in kwargs.items():
response.setdefault(k, v)
return response
return wrap
return decorator | [
"def",
"template",
"(",
"page",
"=",
"None",
",",
"layout",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"pkey",
"=",
"\"_template_extends__\"",
"def",
"decorator",
"(",
"f",
")",
":",
"if",
"inspect",
".",
"isclass",
"(",
"f",
")",
":",
"layout_",
"=",
"layout",
"or",
"page",
"extends",
"=",
"kwargs",
".",
"pop",
"(",
"\"extends\"",
",",
"None",
")",
"if",
"extends",
"and",
"hasattr",
"(",
"extends",
",",
"pkey",
")",
":",
"items",
"=",
"getattr",
"(",
"extends",
",",
"pkey",
")",
".",
"items",
"(",
")",
"if",
"\"layout\"",
"in",
"items",
":",
"layout_",
"=",
"items",
".",
"pop",
"(",
"\"layout\"",
")",
"for",
"k",
",",
"v",
"in",
"items",
":",
"kwargs",
".",
"setdefault",
"(",
"k",
",",
"v",
")",
"if",
"not",
"layout_",
":",
"layout_",
"=",
"\"layout.html\"",
"kwargs",
".",
"setdefault",
"(",
"\"brand_name\"",
",",
"\"\"",
")",
"kwargs",
"[",
"\"layout\"",
"]",
"=",
"layout_",
"setattr",
"(",
"f",
",",
"pkey",
",",
"kwargs",
")",
"setattr",
"(",
"f",
",",
"\"base_layout\"",
",",
"kwargs",
".",
"get",
"(",
"\"layout\"",
")",
")",
"f",
".",
"g",
"(",
"TEMPLATE_CONTEXT",
"=",
"kwargs",
")",
"return",
"f",
"else",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrap",
"(",
"*",
"args2",
",",
"*",
"*",
"kwargs2",
")",
":",
"response",
"=",
"f",
"(",
"*",
"args2",
",",
"*",
"*",
"kwargs2",
")",
"if",
"isinstance",
"(",
"response",
",",
"dict",
")",
"or",
"response",
"is",
"None",
":",
"response",
"=",
"response",
"or",
"{",
"}",
"if",
"page",
":",
"response",
".",
"setdefault",
"(",
"\"template_\"",
",",
"page",
")",
"if",
"layout",
":",
"response",
".",
"setdefault",
"(",
"\"layout_\"",
",",
"layout",
")",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"response",
".",
"setdefault",
"(",
"k",
",",
"v",
")",
"return",
"response",
"return",
"wrap",
"return",
"decorator"
] | 33.09375 | 0.000459 | [
"def template(page=None, layout=None, **kwargs):\n",
" \"\"\"\n",
" Decorator to change the view template and layout.\n",
"\n",
" It works on both View class and view methods\n",
"\n",
" on class\n",
" only $layout is applied, everything else will be passed to the kwargs\n",
" Using as first argument, it will be the layout.\n",
"\n",
" :first arg or $layout: The layout to use for that view\n",
" :param layout: The layout to use for that view\n",
" :param kwargs:\n",
" get pass to the TEMPLATE_CONTEXT\n",
"\n",
" ** on method that return a dict\n",
" page or layout are optional\n",
"\n",
" :param page: The html page\n",
" :param layout: The layout to use for that view\n",
"\n",
" :param kwargs:\n",
" get pass to the view as k/V\n",
"\n",
" ** on other methods that return other type, it doesn't apply\n",
"\n",
" :return:\n",
" \"\"\"\n",
" pkey = \"_template_extends__\"\n",
"\n",
" def decorator(f):\n",
" if inspect.isclass(f):\n",
" layout_ = layout or page\n",
" extends = kwargs.pop(\"extends\", None)\n",
" if extends and hasattr(extends, pkey):\n",
" items = getattr(extends, pkey).items()\n",
" if \"layout\" in items:\n",
" layout_ = items.pop(\"layout\")\n",
" for k, v in items:\n",
" kwargs.setdefault(k, v)\n",
" if not layout_:\n",
" layout_ = \"layout.html\"\n",
" kwargs.setdefault(\"brand_name\", \"\")\n",
" kwargs[\"layout\"] = layout_\n",
"\n",
" setattr(f, pkey, kwargs)\n",
" setattr(f, \"base_layout\", kwargs.get(\"layout\"))\n",
" f.g(TEMPLATE_CONTEXT=kwargs)\n",
" return f\n",
" else:\n",
" @functools.wraps(f)\n",
" def wrap(*args2, **kwargs2):\n",
" response = f(*args2, **kwargs2)\n",
" if isinstance(response, dict) or response is None:\n",
" response = response or {}\n",
" if page:\n",
" response.setdefault(\"template_\", page)\n",
" if layout:\n",
" response.setdefault(\"layout_\", layout)\n",
" for k, v in kwargs.items():\n",
" response.setdefault(k, v)\n",
" return response\n",
" return wrap\n",
" return decorator"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 64 | 0.000781 |
def filter_create(self, phrase, context, irreversible = False, whole_word = True, expires_in = None):
"""
Creates a new keyword filter. `phrase` is the phrase that should be
filtered out, `context` specifies from where to filter the keywords.
Valid contexts are 'home', 'notifications', 'public' and 'thread'.
Set `irreversible` to True if you want the filter to just delete statuses
server side. This works only for the 'home' and 'notifications' contexts.
Set `whole_word` to False if you want to allow filter matches to
start or end within a word, not only at word boundaries.
Set `expires_in` to specify for how many seconds the filter should be
kept around.
Returns the `filter dict`_ of the newly created filter.
"""
params = self.__generate_params(locals())
for context_val in context:
if not context_val in ['home', 'notifications', 'public', 'thread']:
raise MastodonIllegalArgumentError('Invalid filter context.')
return self.__api_request('POST', '/api/v1/filters', params) | [
"def",
"filter_create",
"(",
"self",
",",
"phrase",
",",
"context",
",",
"irreversible",
"=",
"False",
",",
"whole_word",
"=",
"True",
",",
"expires_in",
"=",
"None",
")",
":",
"params",
"=",
"self",
".",
"__generate_params",
"(",
"locals",
"(",
")",
")",
"for",
"context_val",
"in",
"context",
":",
"if",
"not",
"context_val",
"in",
"[",
"'home'",
",",
"'notifications'",
",",
"'public'",
",",
"'thread'",
"]",
":",
"raise",
"MastodonIllegalArgumentError",
"(",
"'Invalid filter context.'",
")",
"return",
"self",
".",
"__api_request",
"(",
"'POST'",
",",
"'/api/v1/filters'",
",",
"params",
")"
] | 48.5 | 0.016849 | [
"def filter_create(self, phrase, context, irreversible = False, whole_word = True, expires_in = None):\n",
" \"\"\"\n",
" Creates a new keyword filter. `phrase` is the phrase that should be\n",
" filtered out, `context` specifies from where to filter the keywords.\n",
" Valid contexts are 'home', 'notifications', 'public' and 'thread'.\n",
" \n",
" Set `irreversible` to True if you want the filter to just delete statuses\n",
" server side. This works only for the 'home' and 'notifications' contexts.\n",
" \n",
" Set `whole_word` to False if you want to allow filter matches to\n",
" start or end within a word, not only at word boundaries.\n",
" \n",
" Set `expires_in` to specify for how many seconds the filter should be\n",
" kept around.\n",
" \n",
" Returns the `filter dict`_ of the newly created filter. \n",
" \"\"\"\n",
" params = self.__generate_params(locals())\n",
" \n",
" for context_val in context:\n",
" if not context_val in ['home', 'notifications', 'public', 'thread']:\n",
" raise MastodonIllegalArgumentError('Invalid filter context.')\n",
" \n",
" return self.__api_request('POST', '/api/v1/filters', params)"
] | [
0.06862745098039216,
0.08333333333333333,
0,
0,
0,
0.1111111111111111,
0.012195121951219513,
0.012195121951219513,
0.1111111111111111,
0,
0,
0.1111111111111111,
0,
0,
0.1111111111111111,
0.015384615384615385,
0,
0,
0.1111111111111111,
0,
0.024691358024691357,
0,
0.1111111111111111,
0.014705882352941176
] | 24 | 0.037408 |
def get_dual_rmetric( self, invert_h = False, mode_inv = 'svd' ):
"""
Compute the dual Riemannian Metric
This is not satisfactory, because if mdimG<mdimY the shape of H
will not be the same as the shape of G. TODO(maybe): return a (copied)
smaller H with only the rows and columns in G.
"""
if self.H is None:
self.H, self.G, self.Hvv, self.Hsvals, self.Gsvals = riemann_metric(self.Y, self.L, self.mdimG, invert_h = invert_h, mode_inv = mode_inv)
if invert_h:
return self.H, self.G
else:
return self.H | [
"def",
"get_dual_rmetric",
"(",
"self",
",",
"invert_h",
"=",
"False",
",",
"mode_inv",
"=",
"'svd'",
")",
":",
"if",
"self",
".",
"H",
"is",
"None",
":",
"self",
".",
"H",
",",
"self",
".",
"G",
",",
"self",
".",
"Hvv",
",",
"self",
".",
"Hsvals",
",",
"self",
".",
"Gsvals",
"=",
"riemann_metric",
"(",
"self",
".",
"Y",
",",
"self",
".",
"L",
",",
"self",
".",
"mdimG",
",",
"invert_h",
"=",
"invert_h",
",",
"mode_inv",
"=",
"mode_inv",
")",
"if",
"invert_h",
":",
"return",
"self",
".",
"H",
",",
"self",
".",
"G",
"else",
":",
"return",
"self",
".",
"H"
] | 46 | 0.021311 | [
"def get_dual_rmetric( self, invert_h = False, mode_inv = 'svd' ):\n",
" \"\"\"\n",
" Compute the dual Riemannian Metric\n",
" This is not satisfactory, because if mdimG<mdimY the shape of H\n",
" will not be the same as the shape of G. TODO(maybe): return a (copied)\n",
" smaller H with only the rows and columns in G.\n",
" \"\"\"\n",
" if self.H is None:\n",
" self.H, self.G, self.Hvv, self.Hsvals, self.Gsvals = riemann_metric(self.Y, self.L, self.mdimG, invert_h = invert_h, mode_inv = mode_inv)\n",
" if invert_h:\n",
" return self.H, self.G\n",
" else:\n",
" return self.H"
] | [
0.09090909090909091,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0.03333333333333333,
0,
0,
0,
0.04
] | 13 | 0.019044 |
def _load(db_data, db):
"""
Load :class:`mongomock.database.Database` from dict data.
"""
if db.name != db_data["name"]:
raise ValueError("dbname doesn't matches! Maybe wrong database data.")
db.__init__(client=db._client, name=db.name)
for col_name, col_data in iteritems(db_data["_collections"]):
collection = db.get_collection(col_name)
collection._documents = col_data["_documents"]
collection._uniques = col_data["_uniques"]
db._collections[col_name] = collection
return db | [
"def",
"_load",
"(",
"db_data",
",",
"db",
")",
":",
"if",
"db",
".",
"name",
"!=",
"db_data",
"[",
"\"name\"",
"]",
":",
"raise",
"ValueError",
"(",
"\"dbname doesn't matches! Maybe wrong database data.\"",
")",
"db",
".",
"__init__",
"(",
"client",
"=",
"db",
".",
"_client",
",",
"name",
"=",
"db",
".",
"name",
")",
"for",
"col_name",
",",
"col_data",
"in",
"iteritems",
"(",
"db_data",
"[",
"\"_collections\"",
"]",
")",
":",
"collection",
"=",
"db",
".",
"get_collection",
"(",
"col_name",
")",
"collection",
".",
"_documents",
"=",
"col_data",
"[",
"\"_documents\"",
"]",
"collection",
".",
"_uniques",
"=",
"col_data",
"[",
"\"_uniques\"",
"]",
"db",
".",
"_collections",
"[",
"col_name",
"]",
"=",
"collection",
"return",
"db"
] | 35.6 | 0.001825 | [
"def _load(db_data, db):\n",
" \"\"\"\n",
" Load :class:`mongomock.database.Database` from dict data.\n",
" \"\"\"\n",
" if db.name != db_data[\"name\"]:\n",
" raise ValueError(\"dbname doesn't matches! Maybe wrong database data.\")\n",
"\n",
" db.__init__(client=db._client, name=db.name)\n",
" for col_name, col_data in iteritems(db_data[\"_collections\"]):\n",
" collection = db.get_collection(col_name)\n",
" collection._documents = col_data[\"_documents\"]\n",
" collection._uniques = col_data[\"_uniques\"]\n",
" db._collections[col_name] = collection\n",
"\n",
" return db"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07692307692307693
] | 15 | 0.005128 |
def _upload_resumable_all(self, upload_info, bitmap,
number_of_units, unit_size):
"""Prepare and upload all resumable units and return upload_key
upload_info -- UploadInfo object
bitmap -- bitmap node of upload/check
number_of_units -- number of units requested
unit_size -- size of a single upload unit in bytes
"""
fd = upload_info.fd
upload_key = None
for unit_id in range(number_of_units):
upload_status = decode_resumable_upload_bitmap(
bitmap, number_of_units)
if upload_status[unit_id]:
logger.debug("Skipping unit %d/%d - already uploaded",
unit_id + 1, number_of_units)
continue
logger.debug("Uploading unit %d/%d",
unit_id + 1, number_of_units)
offset = unit_id * unit_size
with SubsetIO(fd, offset, unit_size) as unit_fd:
unit_info = _UploadUnitInfo(
upload_info=upload_info,
hash_=upload_info.hash_info.units[unit_id],
fd=unit_fd,
uid=unit_id)
upload_result = self._upload_resumable_unit(unit_info)
# upload_key is needed for polling
if upload_key is None:
upload_key = upload_result['doupload']['key']
return upload_key | [
"def",
"_upload_resumable_all",
"(",
"self",
",",
"upload_info",
",",
"bitmap",
",",
"number_of_units",
",",
"unit_size",
")",
":",
"fd",
"=",
"upload_info",
".",
"fd",
"upload_key",
"=",
"None",
"for",
"unit_id",
"in",
"range",
"(",
"number_of_units",
")",
":",
"upload_status",
"=",
"decode_resumable_upload_bitmap",
"(",
"bitmap",
",",
"number_of_units",
")",
"if",
"upload_status",
"[",
"unit_id",
"]",
":",
"logger",
".",
"debug",
"(",
"\"Skipping unit %d/%d - already uploaded\"",
",",
"unit_id",
"+",
"1",
",",
"number_of_units",
")",
"continue",
"logger",
".",
"debug",
"(",
"\"Uploading unit %d/%d\"",
",",
"unit_id",
"+",
"1",
",",
"number_of_units",
")",
"offset",
"=",
"unit_id",
"*",
"unit_size",
"with",
"SubsetIO",
"(",
"fd",
",",
"offset",
",",
"unit_size",
")",
"as",
"unit_fd",
":",
"unit_info",
"=",
"_UploadUnitInfo",
"(",
"upload_info",
"=",
"upload_info",
",",
"hash_",
"=",
"upload_info",
".",
"hash_info",
".",
"units",
"[",
"unit_id",
"]",
",",
"fd",
"=",
"unit_fd",
",",
"uid",
"=",
"unit_id",
")",
"upload_result",
"=",
"self",
".",
"_upload_resumable_unit",
"(",
"unit_info",
")",
"# upload_key is needed for polling",
"if",
"upload_key",
"is",
"None",
":",
"upload_key",
"=",
"upload_result",
"[",
"'doupload'",
"]",
"[",
"'key'",
"]",
"return",
"upload_key"
] | 33.44186 | 0.002027 | [
"def _upload_resumable_all(self, upload_info, bitmap,\n",
" number_of_units, unit_size):\n",
" \"\"\"Prepare and upload all resumable units and return upload_key\n",
"\n",
" upload_info -- UploadInfo object\n",
" bitmap -- bitmap node of upload/check\n",
" number_of_units -- number of units requested\n",
" unit_size -- size of a single upload unit in bytes\n",
" \"\"\"\n",
"\n",
" fd = upload_info.fd\n",
"\n",
" upload_key = None\n",
"\n",
" for unit_id in range(number_of_units):\n",
" upload_status = decode_resumable_upload_bitmap(\n",
" bitmap, number_of_units)\n",
"\n",
" if upload_status[unit_id]:\n",
" logger.debug(\"Skipping unit %d/%d - already uploaded\",\n",
" unit_id + 1, number_of_units)\n",
" continue\n",
"\n",
" logger.debug(\"Uploading unit %d/%d\",\n",
" unit_id + 1, number_of_units)\n",
"\n",
" offset = unit_id * unit_size\n",
"\n",
" with SubsetIO(fd, offset, unit_size) as unit_fd:\n",
"\n",
" unit_info = _UploadUnitInfo(\n",
" upload_info=upload_info,\n",
" hash_=upload_info.hash_info.units[unit_id],\n",
" fd=unit_fd,\n",
" uid=unit_id)\n",
"\n",
" upload_result = self._upload_resumable_unit(unit_info)\n",
"\n",
" # upload_key is needed for polling\n",
" if upload_key is None:\n",
" upload_key = upload_result['doupload']['key']\n",
"\n",
" return upload_key"
] | [
0,
0.01694915254237288,
0.013888888888888888,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.04
] | 43 | 0.001647 |
def debug_inspect_node(self, node_msindex):
"""
Get info about the node. See pycut.inspect_node() for details.
Processing is done in temporary shape.
:param node_seed:
:return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds
"""
return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex) | [
"def",
"debug_inspect_node",
"(",
"self",
",",
"node_msindex",
")",
":",
"return",
"inspect_node",
"(",
"self",
".",
"nlinks",
",",
"self",
".",
"unariesalt2",
",",
"self",
".",
"msinds",
",",
"node_msindex",
")"
] | 42.111111 | 0.010336 | [
"def debug_inspect_node(self, node_msindex):\n",
" \"\"\"\n",
" Get info about the node. See pycut.inspect_node() for details.\n",
" Processing is done in temporary shape.\n",
"\n",
" :param node_seed:\n",
" :return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds\n",
" \"\"\"\n",
" return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0.011235955056179775,
0,
0.023529411764705882
] | 9 | 0.013122 |
def point_in_prism(tri1, tri2, pt):
'''
point_in_prism(tri1, tri2, pt) yields True if the given point is inside the prism that stretches
between triangle 1 and triangle 2. Will automatically thread over extended dimensions. If
multiple triangles are given, then the vertices must be an earlier dimension than the
coordinates; e.g., a 3 x 3 x n array will be assumed to organized such that element [0,1,k] is
the y coordinate of the first vertex of the k'th triangle.
'''
bcs = prism_barycentric_coordinates(tri1, tri2, pt)
return np.logical_not(np.isclose(np.sum(bcs, axis=0), 0)) | [
"def",
"point_in_prism",
"(",
"tri1",
",",
"tri2",
",",
"pt",
")",
":",
"bcs",
"=",
"prism_barycentric_coordinates",
"(",
"tri1",
",",
"tri2",
",",
"pt",
")",
"return",
"np",
".",
"logical_not",
"(",
"np",
".",
"isclose",
"(",
"np",
".",
"sum",
"(",
"bcs",
",",
"axis",
"=",
"0",
")",
",",
"0",
")",
")"
] | 61.5 | 0.008013 | [
"def point_in_prism(tri1, tri2, pt):\n",
" '''\n",
" point_in_prism(tri1, tri2, pt) yields True if the given point is inside the prism that stretches\n",
" between triangle 1 and triangle 2. Will automatically thread over extended dimensions. If\n",
" multiple triangles are given, then the vertices must be an earlier dimension than the\n",
" coordinates; e.g., a 3 x 3 x n array will be assumed to organized such that element [0,1,k] is\n",
" the y coordinate of the first vertex of the k'th triangle.\n",
" '''\n",
" bcs = prism_barycentric_coordinates(tri1, tri2, pt)\n",
" return np.logical_not(np.isclose(np.sum(bcs, axis=0), 0))"
] | [
0,
0,
0.009900990099009901,
0.010416666666666666,
0.010869565217391304,
0.009900990099009901,
0,
0,
0,
0.01639344262295082
] | 10 | 0.005748 |
def smeft_toarray(wc_name, wc_dict):
"""Construct a numpy array with Wilson coefficient values from a
dictionary of label-value pairs corresponding to the non-redundant
elements."""
shape = smeftutil.C_keys_shape[wc_name]
C = np.zeros(shape, dtype=complex)
for k, v in wc_dict.items():
if k.split('_')[0] != wc_name:
continue
indices = k.split('_')[-1] # e.g. '1213'
indices = tuple(int(s) - 1 for s in indices) # e.g. (1, 2, 1, 3)
C[indices] = v
C = smeftutil.symmetrize({wc_name: C})[wc_name]
return C | [
"def",
"smeft_toarray",
"(",
"wc_name",
",",
"wc_dict",
")",
":",
"shape",
"=",
"smeftutil",
".",
"C_keys_shape",
"[",
"wc_name",
"]",
"C",
"=",
"np",
".",
"zeros",
"(",
"shape",
",",
"dtype",
"=",
"complex",
")",
"for",
"k",
",",
"v",
"in",
"wc_dict",
".",
"items",
"(",
")",
":",
"if",
"k",
".",
"split",
"(",
"'_'",
")",
"[",
"0",
"]",
"!=",
"wc_name",
":",
"continue",
"indices",
"=",
"k",
".",
"split",
"(",
"'_'",
")",
"[",
"-",
"1",
"]",
"# e.g. '1213'",
"indices",
"=",
"tuple",
"(",
"int",
"(",
"s",
")",
"-",
"1",
"for",
"s",
"in",
"indices",
")",
"# e.g. (1, 2, 1, 3)",
"C",
"[",
"indices",
"]",
"=",
"v",
"C",
"=",
"smeftutil",
".",
"symmetrize",
"(",
"{",
"wc_name",
":",
"C",
"}",
")",
"[",
"wc_name",
"]",
"return",
"C"
] | 40.571429 | 0.001721 | [
"def smeft_toarray(wc_name, wc_dict):\n",
" \"\"\"Construct a numpy array with Wilson coefficient values from a\n",
" dictionary of label-value pairs corresponding to the non-redundant\n",
" elements.\"\"\"\n",
" shape = smeftutil.C_keys_shape[wc_name]\n",
" C = np.zeros(shape, dtype=complex)\n",
" for k, v in wc_dict.items():\n",
" if k.split('_')[0] != wc_name:\n",
" continue\n",
" indices = k.split('_')[-1] # e.g. '1213'\n",
" indices = tuple(int(s) - 1 for s in indices) # e.g. (1, 2, 1, 3)\n",
" C[indices] = v\n",
" C = smeftutil.symmetrize({wc_name: C})[wc_name]\n",
" return C"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08333333333333333
] | 14 | 0.005952 |
def validate(bbllines:iter, *, profiling=False):
"""Yield lines of warnings and errors about input bbl lines.
profiling -- yield also info lines about input bbl file.
If bbllines is a valid file name, it will be read.
Else, it should be an iterable of bubble file lines.
"""
if isinstance(bbllines, str):
if os.path.exists(bbllines): # filename containing bubble
bbllines = utils.file_lines(bbllines)
elif '\n' not in bbllines or '\t' not in bbllines:
# probably a bad file name: let's rise the proper error
bbllines = utils.file_lines(bbllines)
else: # bubble itself
bbllines = bbllines.split('\n')
bubble = tuple(bbllines)
data = tuple(utils.line_data(line) for line in bubble)
types = tuple(utils.line_type(line) for line in bubble)
# launch profiling
if profiling:
ltype_counts = Counter(types)
for ltype, count in ltype_counts.items():
yield 'INFO {} lines of type {}'.format(count, ltype)
yield 'INFO {} lines of payload'.format(
ltype_counts['EDGE'] + ltype_counts['IN'] +
ltype_counts['NODE'] + ltype_counts['SET'])
# launch validation
for errline in (l for l, t in zip(bubble, types) if t == 'ERROR'):
yield 'ERROR line is not bubble: "{}"'.format(errline)
tree = BubbleTree.from_bubble_data(data)
cc, subroots = tree.connected_components()
# print('cc:', cc)
# print('subroots:', subroots)
if profiling:
yield 'INFO {} top (power)nodes'.format(len(tree.roots))
yield 'INFO {} connected components'.format(len(cc))
yield 'INFO {} nodes are defined, {} are used'.format(
ltype_counts['NODE'], len(tuple(tree.nodes())))
yield 'INFO {} powernodes are defined, {} are used'.format(
ltype_counts['SET'], len(tuple(tree.powernodes())))
yield from inclusions_validation(tree)
yield from mergeability_validation(tree) | [
"def",
"validate",
"(",
"bbllines",
":",
"iter",
",",
"*",
",",
"profiling",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"bbllines",
",",
"str",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"bbllines",
")",
":",
"# filename containing bubble",
"bbllines",
"=",
"utils",
".",
"file_lines",
"(",
"bbllines",
")",
"elif",
"'\\n'",
"not",
"in",
"bbllines",
"or",
"'\\t'",
"not",
"in",
"bbllines",
":",
"# probably a bad file name: let's rise the proper error",
"bbllines",
"=",
"utils",
".",
"file_lines",
"(",
"bbllines",
")",
"else",
":",
"# bubble itself",
"bbllines",
"=",
"bbllines",
".",
"split",
"(",
"'\\n'",
")",
"bubble",
"=",
"tuple",
"(",
"bbllines",
")",
"data",
"=",
"tuple",
"(",
"utils",
".",
"line_data",
"(",
"line",
")",
"for",
"line",
"in",
"bubble",
")",
"types",
"=",
"tuple",
"(",
"utils",
".",
"line_type",
"(",
"line",
")",
"for",
"line",
"in",
"bubble",
")",
"# launch profiling",
"if",
"profiling",
":",
"ltype_counts",
"=",
"Counter",
"(",
"types",
")",
"for",
"ltype",
",",
"count",
"in",
"ltype_counts",
".",
"items",
"(",
")",
":",
"yield",
"'INFO {} lines of type {}'",
".",
"format",
"(",
"count",
",",
"ltype",
")",
"yield",
"'INFO {} lines of payload'",
".",
"format",
"(",
"ltype_counts",
"[",
"'EDGE'",
"]",
"+",
"ltype_counts",
"[",
"'IN'",
"]",
"+",
"ltype_counts",
"[",
"'NODE'",
"]",
"+",
"ltype_counts",
"[",
"'SET'",
"]",
")",
"# launch validation",
"for",
"errline",
"in",
"(",
"l",
"for",
"l",
",",
"t",
"in",
"zip",
"(",
"bubble",
",",
"types",
")",
"if",
"t",
"==",
"'ERROR'",
")",
":",
"yield",
"'ERROR line is not bubble: \"{}\"'",
".",
"format",
"(",
"errline",
")",
"tree",
"=",
"BubbleTree",
".",
"from_bubble_data",
"(",
"data",
")",
"cc",
",",
"subroots",
"=",
"tree",
".",
"connected_components",
"(",
")",
"# print('cc:', cc)",
"# print('subroots:', subroots)",
"if",
"profiling",
":",
"yield",
"'INFO {} top (power)nodes'",
".",
"format",
"(",
"len",
"(",
"tree",
".",
"roots",
")",
")",
"yield",
"'INFO {} connected components'",
".",
"format",
"(",
"len",
"(",
"cc",
")",
")",
"yield",
"'INFO {} nodes are defined, {} are used'",
".",
"format",
"(",
"ltype_counts",
"[",
"'NODE'",
"]",
",",
"len",
"(",
"tuple",
"(",
"tree",
".",
"nodes",
"(",
")",
")",
")",
")",
"yield",
"'INFO {} powernodes are defined, {} are used'",
".",
"format",
"(",
"ltype_counts",
"[",
"'SET'",
"]",
",",
"len",
"(",
"tuple",
"(",
"tree",
".",
"powernodes",
"(",
")",
")",
")",
")",
"yield",
"from",
"inclusions_validation",
"(",
"tree",
")",
"yield",
"from",
"mergeability_validation",
"(",
"tree",
")"
] | 43.466667 | 0.0015 | [
"def validate(bbllines:iter, *, profiling=False):\n",
" \"\"\"Yield lines of warnings and errors about input bbl lines.\n",
"\n",
" profiling -- yield also info lines about input bbl file.\n",
"\n",
" If bbllines is a valid file name, it will be read.\n",
" Else, it should be an iterable of bubble file lines.\n",
"\n",
" \"\"\"\n",
" if isinstance(bbllines, str):\n",
" if os.path.exists(bbllines): # filename containing bubble\n",
" bbllines = utils.file_lines(bbllines)\n",
" elif '\\n' not in bbllines or '\\t' not in bbllines:\n",
" # probably a bad file name: let's rise the proper error\n",
" bbllines = utils.file_lines(bbllines)\n",
" else: # bubble itself\n",
" bbllines = bbllines.split('\\n')\n",
" bubble = tuple(bbllines)\n",
" data = tuple(utils.line_data(line) for line in bubble)\n",
" types = tuple(utils.line_type(line) for line in bubble)\n",
" # launch profiling\n",
" if profiling:\n",
" ltype_counts = Counter(types)\n",
" for ltype, count in ltype_counts.items():\n",
" yield 'INFO {} lines of type {}'.format(count, ltype)\n",
" yield 'INFO {} lines of payload'.format(\n",
" ltype_counts['EDGE'] + ltype_counts['IN'] +\n",
" ltype_counts['NODE'] + ltype_counts['SET'])\n",
" # launch validation\n",
" for errline in (l for l, t in zip(bubble, types) if t == 'ERROR'):\n",
" yield 'ERROR line is not bubble: \"{}\"'.format(errline)\n",
" tree = BubbleTree.from_bubble_data(data)\n",
" cc, subroots = tree.connected_components()\n",
" # print('cc:', cc)\n",
" # print('subroots:', subroots)\n",
" if profiling:\n",
" yield 'INFO {} top (power)nodes'.format(len(tree.roots))\n",
" yield 'INFO {} connected components'.format(len(cc))\n",
" yield 'INFO {} nodes are defined, {} are used'.format(\n",
" ltype_counts['NODE'], len(tuple(tree.nodes())))\n",
" yield 'INFO {} powernodes are defined, {} are used'.format(\n",
" ltype_counts['SET'], len(tuple(tree.powernodes())))\n",
"\n",
" yield from inclusions_validation(tree)\n",
" yield from mergeability_validation(tree)"
] | [
0.02040816326530612,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.014084507042253521,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.022727272727272728
] | 45 | 0.001272 |
def get_total_ram():
"""The total amount of system RAM in bytes.
This is what is reported by the OS, and may be overcommitted when
there are multiple containers hosted on the same machine.
"""
with open('/proc/meminfo', 'r') as f:
for line in f.readlines():
if line:
key, value, unit = line.split()
if key == 'MemTotal:':
assert unit == 'kB', 'Unknown unit'
return int(value) * 1024 # Classic, not KiB.
raise NotImplementedError() | [
"def",
"get_total_ram",
"(",
")",
":",
"with",
"open",
"(",
"'/proc/meminfo'",
",",
"'r'",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
".",
"readlines",
"(",
")",
":",
"if",
"line",
":",
"key",
",",
"value",
",",
"unit",
"=",
"line",
".",
"split",
"(",
")",
"if",
"key",
"==",
"'MemTotal:'",
":",
"assert",
"unit",
"==",
"'kB'",
",",
"'Unknown unit'",
"return",
"int",
"(",
"value",
")",
"*",
"1024",
"# Classic, not KiB.",
"raise",
"NotImplementedError",
"(",
")"
] | 38.5 | 0.001812 | [
"def get_total_ram():\n",
" \"\"\"The total amount of system RAM in bytes.\n",
"\n",
" This is what is reported by the OS, and may be overcommitted when\n",
" there are multiple containers hosted on the same machine.\n",
" \"\"\"\n",
" with open('/proc/meminfo', 'r') as f:\n",
" for line in f.readlines():\n",
" if line:\n",
" key, value, unit = line.split()\n",
" if key == 'MemTotal:':\n",
" assert unit == 'kB', 'Unknown unit'\n",
" return int(value) * 1024 # Classic, not KiB.\n",
" raise NotImplementedError()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.02857142857142857
] | 14 | 0.002041 |
def _to_point(dims):
"""Convert (width, height) or size -> point.Point."""
assert dims
if isinstance(dims, (tuple, list)):
if len(dims) != 2:
raise ValueError(
"A two element tuple or list is expected here, got {}.".format(dims))
else:
width = int(dims[0])
height = int(dims[1])
if width <= 0 or height <= 0:
raise ValueError("Must specify +ve dims, got {}.".format(dims))
else:
return point.Point(width, height)
else:
size = int(dims)
if size <= 0:
raise ValueError(
"Must specify a +ve value for size, got {}.".format(dims))
else:
return point.Point(size, size) | [
"def",
"_to_point",
"(",
"dims",
")",
":",
"assert",
"dims",
"if",
"isinstance",
"(",
"dims",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"if",
"len",
"(",
"dims",
")",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"A two element tuple or list is expected here, got {}.\"",
".",
"format",
"(",
"dims",
")",
")",
"else",
":",
"width",
"=",
"int",
"(",
"dims",
"[",
"0",
"]",
")",
"height",
"=",
"int",
"(",
"dims",
"[",
"1",
"]",
")",
"if",
"width",
"<=",
"0",
"or",
"height",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"Must specify +ve dims, got {}.\"",
".",
"format",
"(",
"dims",
")",
")",
"else",
":",
"return",
"point",
".",
"Point",
"(",
"width",
",",
"height",
")",
"else",
":",
"size",
"=",
"int",
"(",
"dims",
")",
"if",
"size",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"Must specify a +ve value for size, got {}.\"",
".",
"format",
"(",
"dims",
")",
")",
"else",
":",
"return",
"point",
".",
"Point",
"(",
"size",
",",
"size",
")"
] | 29.5 | 0.01791 | [
"def _to_point(dims):\n",
" \"\"\"Convert (width, height) or size -> point.Point.\"\"\"\n",
" assert dims\n",
"\n",
" if isinstance(dims, (tuple, list)):\n",
" if len(dims) != 2:\n",
" raise ValueError(\n",
" \"A two element tuple or list is expected here, got {}.\".format(dims))\n",
" else:\n",
" width = int(dims[0])\n",
" height = int(dims[1])\n",
" if width <= 0 or height <= 0:\n",
" raise ValueError(\"Must specify +ve dims, got {}.\".format(dims))\n",
" else:\n",
" return point.Point(width, height)\n",
" else:\n",
" size = int(dims)\n",
" if size <= 0:\n",
" raise ValueError(\n",
" \"Must specify a +ve value for size, got {}.\".format(dims))\n",
" else:\n",
" return point.Point(size, size)"
] | [
0,
0.017857142857142856,
0.07142857142857142,
0,
0.02631578947368421,
0,
0.041666666666666664,
0,
0,
0.037037037037037035,
0.03571428571428571,
0.027777777777777776,
0,
0.08333333333333333,
0,
0.125,
0,
0,
0.041666666666666664,
0,
0,
0.05555555555555555
] | 22 | 0.025607 |
def _Rforce(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the radial force
HISTORY:
2013-06-26 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return -self._mass(r)*R/r**3. | [
"def",
"_Rforce",
"(",
"self",
",",
"R",
",",
"z",
",",
"phi",
"=",
"0.",
",",
"t",
"=",
"0.",
")",
":",
"r",
"=",
"nu",
".",
"sqrt",
"(",
"R",
"*",
"R",
"+",
"z",
"*",
"z",
")",
"return",
"-",
"self",
".",
"_mass",
"(",
"r",
")",
"*",
"R",
"/",
"r",
"**",
"3."
] | 25.5 | 0.014706 | [
"def _Rforce(self,R,z,phi=0.,t=0.):\n",
" \"\"\"\n",
" NAME:\n",
" _Rforce\n",
" PURPOSE:\n",
" evaluate the radial force for this potential\n",
" INPUT:\n",
" R - Galactocentric cylindrical radius\n",
" z - vertical height\n",
" phi - azimuth\n",
" t - time\n",
" OUTPUT:\n",
" the radial force\n",
" HISTORY:\n",
" 2013-06-26 - Written - Bovy (IAS)\n",
" \"\"\"\n",
" r= nu.sqrt(R*R+z*z)\n",
" return -self._mass(r)*R/r**3."
] | [
0.11428571428571428,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.03571428571428571,
0.02702702702702703
] | 18 | 0.014464 |
def FixmatFactory(fixmatfile, categories = None, var_name = 'fixmat', field_name='x'):
"""
Loads a single fixmat (fixmatfile).
Parameters:
fixmatfile : string
The matlab fixmat that should be loaded.
categories : instance of stimuli.Categories, optional
Links data in categories to data in fixmat.
"""
try:
data = loadmat(fixmatfile, struct_as_record = False)
keys = list(data.keys())
data = data[var_name][0][0]
except KeyError:
raise RuntimeError('%s is not a field of the matlab structure. Possible'+
'Keys are %s'%str(keys))
num_fix = data.__getattribute__(field_name).size
# Get a list with fieldnames and a list with parameters
fields = {}
parameters = {}
for field in data._fieldnames:
if data.__getattribute__(field).size == num_fix:
fields[field] = data.__getattribute__(field)
else:
parameters[field] = data.__getattribute__(field)[0].tolist()
if len(parameters[field]) == 1:
parameters[field] = parameters[field][0]
# Generate FixMat
fixmat = FixMat(categories = categories)
fixmat._fields = list(fields.keys())
for (field, value) in list(fields.items()):
fixmat.__dict__[field] = value.reshape(-1,)
fixmat._parameters = parameters
fixmat._subjects = None
for (field, value) in list(parameters.items()):
fixmat.__dict__[field] = value
fixmat._num_fix = num_fix
return fixmat | [
"def",
"FixmatFactory",
"(",
"fixmatfile",
",",
"categories",
"=",
"None",
",",
"var_name",
"=",
"'fixmat'",
",",
"field_name",
"=",
"'x'",
")",
":",
"try",
":",
"data",
"=",
"loadmat",
"(",
"fixmatfile",
",",
"struct_as_record",
"=",
"False",
")",
"keys",
"=",
"list",
"(",
"data",
".",
"keys",
"(",
")",
")",
"data",
"=",
"data",
"[",
"var_name",
"]",
"[",
"0",
"]",
"[",
"0",
"]",
"except",
"KeyError",
":",
"raise",
"RuntimeError",
"(",
"'%s is not a field of the matlab structure. Possible'",
"+",
"'Keys are %s'",
"%",
"str",
"(",
"keys",
")",
")",
"num_fix",
"=",
"data",
".",
"__getattribute__",
"(",
"field_name",
")",
".",
"size",
"# Get a list with fieldnames and a list with parameters",
"fields",
"=",
"{",
"}",
"parameters",
"=",
"{",
"}",
"for",
"field",
"in",
"data",
".",
"_fieldnames",
":",
"if",
"data",
".",
"__getattribute__",
"(",
"field",
")",
".",
"size",
"==",
"num_fix",
":",
"fields",
"[",
"field",
"]",
"=",
"data",
".",
"__getattribute__",
"(",
"field",
")",
"else",
":",
"parameters",
"[",
"field",
"]",
"=",
"data",
".",
"__getattribute__",
"(",
"field",
")",
"[",
"0",
"]",
".",
"tolist",
"(",
")",
"if",
"len",
"(",
"parameters",
"[",
"field",
"]",
")",
"==",
"1",
":",
"parameters",
"[",
"field",
"]",
"=",
"parameters",
"[",
"field",
"]",
"[",
"0",
"]",
"# Generate FixMat",
"fixmat",
"=",
"FixMat",
"(",
"categories",
"=",
"categories",
")",
"fixmat",
".",
"_fields",
"=",
"list",
"(",
"fields",
".",
"keys",
"(",
")",
")",
"for",
"(",
"field",
",",
"value",
")",
"in",
"list",
"(",
"fields",
".",
"items",
"(",
")",
")",
":",
"fixmat",
".",
"__dict__",
"[",
"field",
"]",
"=",
"value",
".",
"reshape",
"(",
"-",
"1",
",",
")",
"fixmat",
".",
"_parameters",
"=",
"parameters",
"fixmat",
".",
"_subjects",
"=",
"None",
"for",
"(",
"field",
",",
"value",
")",
"in",
"list",
"(",
"parameters",
".",
"items",
"(",
")",
")",
":",
"fixmat",
".",
"__dict__",
"[",
"field",
"]",
"=",
"value",
"fixmat",
".",
"_num_fix",
"=",
"num_fix",
"return",
"fixmat"
] | 35.465116 | 0.012125 | [
"def FixmatFactory(fixmatfile, categories = None, var_name = 'fixmat', field_name='x'):\n",
" \"\"\"\n",
" Loads a single fixmat (fixmatfile).\n",
" \n",
" Parameters:\n",
" fixmatfile : string\n",
" The matlab fixmat that should be loaded.\n",
" categories : instance of stimuli.Categories, optional\n",
" Links data in categories to data in fixmat.\n",
" \"\"\"\n",
" try:\n",
" data = loadmat(fixmatfile, struct_as_record = False)\n",
" keys = list(data.keys())\n",
" data = data[var_name][0][0]\n",
" except KeyError:\n",
" raise RuntimeError('%s is not a field of the matlab structure. Possible'+\n",
" 'Keys are %s'%str(keys))\n",
" \n",
" num_fix = data.__getattribute__(field_name).size\n",
"\n",
" # Get a list with fieldnames and a list with parameters\n",
" fields = {}\n",
" parameters = {}\n",
" for field in data._fieldnames:\n",
" if data.__getattribute__(field).size == num_fix:\n",
" fields[field] = data.__getattribute__(field)\n",
" else: \n",
" parameters[field] = data.__getattribute__(field)[0].tolist()\n",
" if len(parameters[field]) == 1:\n",
" parameters[field] = parameters[field][0]\n",
" \n",
" # Generate FixMat\n",
" fixmat = FixMat(categories = categories)\n",
" fixmat._fields = list(fields.keys())\n",
" for (field, value) in list(fields.items()):\n",
" fixmat.__dict__[field] = value.reshape(-1,) \n",
"\n",
" fixmat._parameters = parameters\n",
" fixmat._subjects = None\n",
" for (field, value) in list(parameters.items()):\n",
" fixmat.__dict__[field] = value\n",
" fixmat._num_fix = num_fix\n",
" return fixmat"
] | [
0.05747126436781609,
0,
0,
0.2,
0,
0,
0,
0,
0,
0,
0,
0.03278688524590164,
0,
0,
0,
0.024390243902439025,
0.04878048780487805,
0.2,
0,
0,
0,
0,
0,
0,
0,
0,
0.038461538461538464,
0,
0,
0,
0.2,
0,
0.044444444444444446,
0,
0,
0.018867924528301886,
0,
0,
0,
0,
0,
0,
0.058823529411764705
] | 43 | 0.021489 |
def ignore_path(path, ignore_list=None, whitelist=None):
"""
Returns a boolean indicating if a path should be ignored given an
ignore_list and a whitelist of glob patterns.
"""
if ignore_list is None:
return True
should_ignore = matches_glob_list(path, ignore_list)
if whitelist is None:
return should_ignore
return should_ignore and not matches_glob_list(path, whitelist) | [
"def",
"ignore_path",
"(",
"path",
",",
"ignore_list",
"=",
"None",
",",
"whitelist",
"=",
"None",
")",
":",
"if",
"ignore_list",
"is",
"None",
":",
"return",
"True",
"should_ignore",
"=",
"matches_glob_list",
"(",
"path",
",",
"ignore_list",
")",
"if",
"whitelist",
"is",
"None",
":",
"return",
"should_ignore",
"return",
"should_ignore",
"and",
"not",
"matches_glob_list",
"(",
"path",
",",
"whitelist",
")"
] | 31.538462 | 0.00237 | [
"def ignore_path(path, ignore_list=None, whitelist=None):\n",
" \"\"\"\n",
" Returns a boolean indicating if a path should be ignored given an\n",
" ignore_list and a whitelist of glob patterns.\n",
" \"\"\"\n",
" if ignore_list is None:\n",
" return True\n",
"\n",
" should_ignore = matches_glob_list(path, ignore_list)\n",
" if whitelist is None:\n",
" return should_ignore\n",
"\n",
" return should_ignore and not matches_glob_list(path, whitelist)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.014925373134328358
] | 13 | 0.001148 |
def trigger(self, source):
"""
Triggers all actions meant to trigger on the board state from `source`.
"""
actions = self.evaluate(source)
if actions:
if not hasattr(actions, "__iter__"):
actions = (actions, )
source.game.trigger_actions(source, actions) | [
"def",
"trigger",
"(",
"self",
",",
"source",
")",
":",
"actions",
"=",
"self",
".",
"evaluate",
"(",
"source",
")",
"if",
"actions",
":",
"if",
"not",
"hasattr",
"(",
"actions",
",",
"\"__iter__\"",
")",
":",
"actions",
"=",
"(",
"actions",
",",
")",
"source",
".",
"game",
".",
"trigger_actions",
"(",
"source",
",",
"actions",
")"
] | 29.555556 | 0.036496 | [
"def trigger(self, source):\n",
"\t\t\"\"\"\n",
"\t\tTriggers all actions meant to trigger on the board state from `source`.\n",
"\t\t\"\"\"\n",
"\t\tactions = self.evaluate(source)\n",
"\t\tif actions:\n",
"\t\t\tif not hasattr(actions, \"__iter__\"):\n",
"\t\t\t\tactions = (actions, )\n",
"\t\t\tsource.game.trigger_actions(source, actions)"
] | [
0,
0.3333333333333333,
0.013513513513513514,
0.16666666666666666,
0.029411764705882353,
0.07142857142857142,
0.025,
0.038461538461538464,
0.0425531914893617
] | 9 | 0.080041 |
def plot_vectors(self, arrows=True):
"""
Plot vectors of positional transition of LISA values
within quadrant in scatterplot in a polar plot.
Parameters
----------
ax : Matplotlib Axes instance, optional
If given, the figure will be created inside this axis.
Default =None.
arrows : boolean, optional
If True show arrowheads of vectors. Default =True
**kwargs : keyword arguments, optional
Keywords used for creating and designing the plot.
Note: 'c' and 'color' cannot be passed when attribute is not None
Returns
-------
fig : Matplotlib Figure instance
Moran scatterplot figure
ax : matplotlib Axes instance
Axes in which the figure is plotted
"""
from splot.giddy import dynamic_lisa_vectors
fig, ax = dynamic_lisa_vectors(self, arrows=arrows)
return fig, ax | [
"def",
"plot_vectors",
"(",
"self",
",",
"arrows",
"=",
"True",
")",
":",
"from",
"splot",
".",
"giddy",
"import",
"dynamic_lisa_vectors",
"fig",
",",
"ax",
"=",
"dynamic_lisa_vectors",
"(",
"self",
",",
"arrows",
"=",
"arrows",
")",
"return",
"fig",
",",
"ax"
] | 32.793103 | 0.002043 | [
"def plot_vectors(self, arrows=True):\n",
" \"\"\"\n",
" Plot vectors of positional transition of LISA values\n",
" within quadrant in scatterplot in a polar plot.\n",
"\n",
" Parameters\n",
" ----------\n",
" ax : Matplotlib Axes instance, optional\n",
" If given, the figure will be created inside this axis.\n",
" Default =None.\n",
" arrows : boolean, optional\n",
" If True show arrowheads of vectors. Default =True\n",
" **kwargs : keyword arguments, optional\n",
" Keywords used for creating and designing the plot.\n",
" Note: 'c' and 'color' cannot be passed when attribute is not None\n",
"\n",
" Returns\n",
" -------\n",
" fig : Matplotlib Figure instance\n",
" Moran scatterplot figure\n",
" ax : matplotlib Axes instance\n",
" Axes in which the figure is plotted\n",
"\n",
" \"\"\"\n",
"\n",
" from splot.giddy import dynamic_lisa_vectors\n",
"\n",
" fig, ax = dynamic_lisa_vectors(self, arrows=arrows)\n",
" return fig, ax"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.045454545454545456
] | 29 | 0.004441 |
def get_settings_from_client(client):
"""Pull out settings from a SoftLayer.BaseClient instance.
:param client: SoftLayer.BaseClient instance
"""
settings = {
'username': '',
'api_key': '',
'timeout': '',
'endpoint_url': '',
}
try:
settings['username'] = client.auth.username
settings['api_key'] = client.auth.api_key
except AttributeError:
pass
transport = _resolve_transport(client.transport)
try:
settings['timeout'] = transport.timeout
settings['endpoint_url'] = transport.endpoint_url
except AttributeError:
pass
return settings | [
"def",
"get_settings_from_client",
"(",
"client",
")",
":",
"settings",
"=",
"{",
"'username'",
":",
"''",
",",
"'api_key'",
":",
"''",
",",
"'timeout'",
":",
"''",
",",
"'endpoint_url'",
":",
"''",
",",
"}",
"try",
":",
"settings",
"[",
"'username'",
"]",
"=",
"client",
".",
"auth",
".",
"username",
"settings",
"[",
"'api_key'",
"]",
"=",
"client",
".",
"auth",
".",
"api_key",
"except",
"AttributeError",
":",
"pass",
"transport",
"=",
"_resolve_transport",
"(",
"client",
".",
"transport",
")",
"try",
":",
"settings",
"[",
"'timeout'",
"]",
"=",
"transport",
".",
"timeout",
"settings",
"[",
"'endpoint_url'",
"]",
"=",
"transport",
".",
"endpoint_url",
"except",
"AttributeError",
":",
"pass",
"return",
"settings"
] | 25.44 | 0.001515 | [
"def get_settings_from_client(client):\n",
" \"\"\"Pull out settings from a SoftLayer.BaseClient instance.\n",
"\n",
" :param client: SoftLayer.BaseClient instance\n",
" \"\"\"\n",
" settings = {\n",
" 'username': '',\n",
" 'api_key': '',\n",
" 'timeout': '',\n",
" 'endpoint_url': '',\n",
" }\n",
" try:\n",
" settings['username'] = client.auth.username\n",
" settings['api_key'] = client.auth.api_key\n",
" except AttributeError:\n",
" pass\n",
"\n",
" transport = _resolve_transport(client.transport)\n",
" try:\n",
" settings['timeout'] = transport.timeout\n",
" settings['endpoint_url'] = transport.endpoint_url\n",
" except AttributeError:\n",
" pass\n",
"\n",
" return settings"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 25 | 0.002105 |
def _init():
"""Dynamically import engines that initialize successfully."""
import importlib
import os
import re
filenames = os.listdir(os.path.dirname(__file__))
module_names = set()
for filename in filenames:
match = re.match(r'^(?P<name>[A-Z_a-z]\w*)\.py[co]?$', filename)
if match:
module_names.add(match.group('name'))
for module_name in module_names:
try:
module = importlib.import_module('.' + module_name, __name__)
except ImportError:
continue
for name, member in module.__dict__.items():
if not isinstance(member, type):
# skip non-new-style classes
continue
if not issubclass(member, Engine):
# skip non-subclasses of Engine
continue
if member is Engine:
# skip "abstract" class Engine
continue
try:
handle = member.handle
except AttributeError:
continue
engines[handle] = member | [
"def",
"_init",
"(",
")",
":",
"import",
"importlib",
"import",
"os",
"import",
"re",
"filenames",
"=",
"os",
".",
"listdir",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"module_names",
"=",
"set",
"(",
")",
"for",
"filename",
"in",
"filenames",
":",
"match",
"=",
"re",
".",
"match",
"(",
"r'^(?P<name>[A-Z_a-z]\\w*)\\.py[co]?$'",
",",
"filename",
")",
"if",
"match",
":",
"module_names",
".",
"add",
"(",
"match",
".",
"group",
"(",
"'name'",
")",
")",
"for",
"module_name",
"in",
"module_names",
":",
"try",
":",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"'.'",
"+",
"module_name",
",",
"__name__",
")",
"except",
"ImportError",
":",
"continue",
"for",
"name",
",",
"member",
"in",
"module",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"not",
"isinstance",
"(",
"member",
",",
"type",
")",
":",
"# skip non-new-style classes",
"continue",
"if",
"not",
"issubclass",
"(",
"member",
",",
"Engine",
")",
":",
"# skip non-subclasses of Engine",
"continue",
"if",
"member",
"is",
"Engine",
":",
"# skip \"abstract\" class Engine",
"continue",
"try",
":",
"handle",
"=",
"member",
".",
"handle",
"except",
"AttributeError",
":",
"continue",
"engines",
"[",
"handle",
"]",
"=",
"member"
] | 28.864865 | 0.000906 | [
"def _init():\n",
" \"\"\"Dynamically import engines that initialize successfully.\"\"\"\n",
" import importlib\n",
" import os\n",
" import re\n",
"\n",
" filenames = os.listdir(os.path.dirname(__file__))\n",
"\n",
" module_names = set()\n",
" for filename in filenames:\n",
" match = re.match(r'^(?P<name>[A-Z_a-z]\\w*)\\.py[co]?$', filename)\n",
" if match:\n",
" module_names.add(match.group('name'))\n",
"\n",
" for module_name in module_names:\n",
" try:\n",
" module = importlib.import_module('.' + module_name, __name__)\n",
" except ImportError:\n",
" continue\n",
"\n",
" for name, member in module.__dict__.items():\n",
" if not isinstance(member, type):\n",
" # skip non-new-style classes\n",
" continue\n",
" if not issubclass(member, Engine):\n",
" # skip non-subclasses of Engine\n",
" continue\n",
" if member is Engine:\n",
" # skip \"abstract\" class Engine\n",
" continue\n",
"\n",
" try:\n",
" handle = member.handle\n",
" except AttributeError:\n",
" continue\n",
"\n",
" engines[handle] = member"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.027777777777777776
] | 37 | 0.000751 |
def displayEmptyInputWarningBox(display=True, parent=None):
""" Displays a warning box for the 'input' parameter.
"""
if sys.version_info[0] >= 3:
from tkinter.messagebox import showwarning
else:
from tkMessageBox import showwarning
if display:
msg = 'No valid input files found! '+\
'Please check the value for the "input" parameter.'
showwarning(parent=parent,message=msg, title="No valid inputs!")
return "yes" | [
"def",
"displayEmptyInputWarningBox",
"(",
"display",
"=",
"True",
",",
"parent",
"=",
"None",
")",
":",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
">=",
"3",
":",
"from",
"tkinter",
".",
"messagebox",
"import",
"showwarning",
"else",
":",
"from",
"tkMessageBox",
"import",
"showwarning",
"if",
"display",
":",
"msg",
"=",
"'No valid input files found! '",
"+",
"'Please check the value for the \"input\" parameter.'",
"showwarning",
"(",
"parent",
"=",
"parent",
",",
"message",
"=",
"msg",
",",
"title",
"=",
"\"No valid inputs!\"",
")",
"return",
"\"yes\""
] | 35.846154 | 0.008368 | [
"def displayEmptyInputWarningBox(display=True, parent=None):\n",
" \"\"\" Displays a warning box for the 'input' parameter.\n",
" \"\"\"\n",
" if sys.version_info[0] >= 3:\n",
" from tkinter.messagebox import showwarning\n",
" else:\n",
" from tkMessageBox import showwarning\n",
"\n",
" if display:\n",
" msg = 'No valid input files found! '+\\\n",
" 'Please check the value for the \"input\" parameter.'\n",
" showwarning(parent=parent,message=msg, title=\"No valid inputs!\")\n",
" return \"yes\""
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.02127659574468085,
0.016666666666666666,
0.0136986301369863,
0.0625
] | 13 | 0.00878 |
def _check_directory_arguments(self):
"""
Validates arguments for loading from directories, including static image and time series directories.
"""
if not os.path.isdir(self.datapath):
raise (NotADirectoryError('Directory does not exist: %s' % self.datapath))
if self.time_delay:
if self.time_delay < 1:
raise ValueError('Time step argument must be greater than 0, but gave: %i' % self.time_delay)
if not isinstance(self.time_delay, int):
raise ValueError('Time step argument must be an integer, but gave: %s' % str(self.time_delay)) | [
"def",
"_check_directory_arguments",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"datapath",
")",
":",
"raise",
"(",
"NotADirectoryError",
"(",
"'Directory does not exist: %s'",
"%",
"self",
".",
"datapath",
")",
")",
"if",
"self",
".",
"time_delay",
":",
"if",
"self",
".",
"time_delay",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'Time step argument must be greater than 0, but gave: %i'",
"%",
"self",
".",
"time_delay",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"time_delay",
",",
"int",
")",
":",
"raise",
"ValueError",
"(",
"'Time step argument must be an integer, but gave: %s'",
"%",
"str",
"(",
"self",
".",
"time_delay",
")",
")"
] | 57.363636 | 0.00936 | [
"def _check_directory_arguments(self):\n",
" \"\"\"\n",
" Validates arguments for loading from directories, including static image and time series directories.\n",
" \"\"\"\n",
" if not os.path.isdir(self.datapath):\n",
" raise (NotADirectoryError('Directory does not exist: %s' % self.datapath))\n",
" if self.time_delay:\n",
" if self.time_delay < 1:\n",
" raise ValueError('Time step argument must be greater than 0, but gave: %i' % self.time_delay)\n",
" if not isinstance(self.time_delay, int):\n",
" raise ValueError('Time step argument must be an integer, but gave: %s' % str(self.time_delay))"
] | [
0,
0.08333333333333333,
0.00909090909090909,
0,
0,
0.011494252873563218,
0,
0,
0.00909090909090909,
0,
0.01818181818181818
] | 11 | 0.011926 |
def remove_invalid_fields(self, queryset, fields, view):
"""Remove invalid fields from an ordering.
Overwrites the DRF default remove_invalid_fields method to return
both the valid orderings and any invalid orderings.
"""
valid_orderings = []
invalid_orderings = []
# for each field sent down from the query param,
# determine if its valid or invalid
for term in fields:
stripped_term = term.lstrip('-')
# add back the '-' add the end if necessary
reverse_sort_term = '' if len(stripped_term) is len(term) else '-'
ordering = self.ordering_for(stripped_term, view)
if ordering:
valid_orderings.append(reverse_sort_term + ordering)
else:
invalid_orderings.append(term)
return valid_orderings, invalid_orderings | [
"def",
"remove_invalid_fields",
"(",
"self",
",",
"queryset",
",",
"fields",
",",
"view",
")",
":",
"valid_orderings",
"=",
"[",
"]",
"invalid_orderings",
"=",
"[",
"]",
"# for each field sent down from the query param,",
"# determine if its valid or invalid",
"for",
"term",
"in",
"fields",
":",
"stripped_term",
"=",
"term",
".",
"lstrip",
"(",
"'-'",
")",
"# add back the '-' add the end if necessary",
"reverse_sort_term",
"=",
"''",
"if",
"len",
"(",
"stripped_term",
")",
"is",
"len",
"(",
"term",
")",
"else",
"'-'",
"ordering",
"=",
"self",
".",
"ordering_for",
"(",
"stripped_term",
",",
"view",
")",
"if",
"ordering",
":",
"valid_orderings",
".",
"append",
"(",
"reverse_sort_term",
"+",
"ordering",
")",
"else",
":",
"invalid_orderings",
".",
"append",
"(",
"term",
")",
"return",
"valid_orderings",
",",
"invalid_orderings"
] | 36.458333 | 0.002227 | [
"def remove_invalid_fields(self, queryset, fields, view):\n",
" \"\"\"Remove invalid fields from an ordering.\n",
"\n",
" Overwrites the DRF default remove_invalid_fields method to return\n",
" both the valid orderings and any invalid orderings.\n",
" \"\"\"\n",
"\n",
" valid_orderings = []\n",
" invalid_orderings = []\n",
"\n",
" # for each field sent down from the query param,\n",
" # determine if its valid or invalid\n",
" for term in fields:\n",
" stripped_term = term.lstrip('-')\n",
" # add back the '-' add the end if necessary\n",
" reverse_sort_term = '' if len(stripped_term) is len(term) else '-'\n",
" ordering = self.ordering_for(stripped_term, view)\n",
"\n",
" if ordering:\n",
" valid_orderings.append(reverse_sort_term + ordering)\n",
" else:\n",
" invalid_orderings.append(term)\n",
"\n",
" return valid_orderings, invalid_orderings"
] | [
0,
0.0196078431372549,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.02040816326530612
] | 24 | 0.001667 |
def is_null(*symbols):
""" True if no nodes or all the given nodes are either
None, NOP or empty blocks. For blocks this applies recursively
"""
from symbols.symbol_ import Symbol
for sym in symbols:
if sym is None:
continue
if not isinstance(sym, Symbol):
return False
if sym.token == 'NOP':
continue
if sym.token == 'BLOCK':
if not is_null(*sym.children):
return False
continue
return False
return True | [
"def",
"is_null",
"(",
"*",
"symbols",
")",
":",
"from",
"symbols",
".",
"symbol_",
"import",
"Symbol",
"for",
"sym",
"in",
"symbols",
":",
"if",
"sym",
"is",
"None",
":",
"continue",
"if",
"not",
"isinstance",
"(",
"sym",
",",
"Symbol",
")",
":",
"return",
"False",
"if",
"sym",
".",
"token",
"==",
"'NOP'",
":",
"continue",
"if",
"sym",
".",
"token",
"==",
"'BLOCK'",
":",
"if",
"not",
"is_null",
"(",
"*",
"sym",
".",
"children",
")",
":",
"return",
"False",
"continue",
"return",
"False",
"return",
"True"
] | 27.736842 | 0.001835 | [
"def is_null(*symbols):\n",
" \"\"\" True if no nodes or all the given nodes are either\n",
" None, NOP or empty blocks. For blocks this applies recursively\n",
" \"\"\"\n",
" from symbols.symbol_ import Symbol\n",
"\n",
" for sym in symbols:\n",
" if sym is None:\n",
" continue\n",
" if not isinstance(sym, Symbol):\n",
" return False\n",
" if sym.token == 'NOP':\n",
" continue\n",
" if sym.token == 'BLOCK':\n",
" if not is_null(*sym.children):\n",
" return False\n",
" continue\n",
" return False\n",
" return True"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.06666666666666667
] | 19 | 0.003509 |
def get_renderer(app, id):
"""Retrieve a renderer.
:param app: :class:`~flask.Flask` application to look ``id`` up on
:param id: Internal renderer id-string to look up
"""
renderer = app.extensions.get('nav_renderers', {})[id]
if isinstance(renderer, tuple):
mod_name, cls_name = renderer
mod = import_module(mod_name)
cls = mod
for name in cls_name.split('.'):
cls = getattr(cls, name)
return cls
return renderer | [
"def",
"get_renderer",
"(",
"app",
",",
"id",
")",
":",
"renderer",
"=",
"app",
".",
"extensions",
".",
"get",
"(",
"'nav_renderers'",
",",
"{",
"}",
")",
"[",
"id",
"]",
"if",
"isinstance",
"(",
"renderer",
",",
"tuple",
")",
":",
"mod_name",
",",
"cls_name",
"=",
"renderer",
"mod",
"=",
"import_module",
"(",
"mod_name",
")",
"cls",
"=",
"mod",
"for",
"name",
"in",
"cls_name",
".",
"split",
"(",
"'.'",
")",
":",
"cls",
"=",
"getattr",
"(",
"cls",
",",
"name",
")",
"return",
"cls",
"return",
"renderer"
] | 25.263158 | 0.002008 | [
"def get_renderer(app, id):\n",
" \"\"\"Retrieve a renderer.\n",
"\n",
" :param app: :class:`~flask.Flask` application to look ``id`` up on\n",
" :param id: Internal renderer id-string to look up\n",
" \"\"\"\n",
" renderer = app.extensions.get('nav_renderers', {})[id]\n",
"\n",
" if isinstance(renderer, tuple):\n",
" mod_name, cls_name = renderer\n",
" mod = import_module(mod_name)\n",
"\n",
" cls = mod\n",
" for name in cls_name.split('.'):\n",
" cls = getattr(cls, name)\n",
"\n",
" return cls\n",
"\n",
" return renderer"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 19 | 0.00277 |
def append_if_local_or_in_imports(self, definition):
"""Add definition to list.
Handles local definitions and adds to project_definitions.
"""
if isinstance(definition, LocalModuleDefinition):
self.definitions.append(definition)
elif self.import_names == ["*"]:
self.definitions.append(definition)
elif self.import_names and definition.name in self.import_names:
self.definitions.append(definition)
elif (self.import_alias_mapping and definition.name in
self.import_alias_mapping.values()):
self.definitions.append(definition)
if definition.parent_module_name:
self.definitions.append(definition)
if definition.node not in project_definitions:
project_definitions[definition.node] = definition | [
"def",
"append_if_local_or_in_imports",
"(",
"self",
",",
"definition",
")",
":",
"if",
"isinstance",
"(",
"definition",
",",
"LocalModuleDefinition",
")",
":",
"self",
".",
"definitions",
".",
"append",
"(",
"definition",
")",
"elif",
"self",
".",
"import_names",
"==",
"[",
"\"*\"",
"]",
":",
"self",
".",
"definitions",
".",
"append",
"(",
"definition",
")",
"elif",
"self",
".",
"import_names",
"and",
"definition",
".",
"name",
"in",
"self",
".",
"import_names",
":",
"self",
".",
"definitions",
".",
"append",
"(",
"definition",
")",
"elif",
"(",
"self",
".",
"import_alias_mapping",
"and",
"definition",
".",
"name",
"in",
"self",
".",
"import_alias_mapping",
".",
"values",
"(",
")",
")",
":",
"self",
".",
"definitions",
".",
"append",
"(",
"definition",
")",
"if",
"definition",
".",
"parent_module_name",
":",
"self",
".",
"definitions",
".",
"append",
"(",
"definition",
")",
"if",
"definition",
".",
"node",
"not",
"in",
"project_definitions",
":",
"project_definitions",
"[",
"definition",
".",
"node",
"]",
"=",
"definition"
] | 41.75 | 0.002342 | [
"def append_if_local_or_in_imports(self, definition):\n",
" \"\"\"Add definition to list.\n",
"\n",
" Handles local definitions and adds to project_definitions.\n",
" \"\"\"\n",
" if isinstance(definition, LocalModuleDefinition):\n",
" self.definitions.append(definition)\n",
" elif self.import_names == [\"*\"]:\n",
" self.definitions.append(definition)\n",
" elif self.import_names and definition.name in self.import_names:\n",
" self.definitions.append(definition)\n",
" elif (self.import_alias_mapping and definition.name in\n",
" self.import_alias_mapping.values()):\n",
" self.definitions.append(definition)\n",
"\n",
" if definition.parent_module_name:\n",
" self.definitions.append(definition)\n",
"\n",
" if definition.node not in project_definitions:\n",
" project_definitions[definition.node] = definition"
] | [
0,
0.02857142857142857,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.01639344262295082
] | 20 | 0.002248 |
def deepupdate(
mapping: abc.MutableMapping, other: abc.Mapping, listextend=False
):
"""update one dictionary from another recursively. Only individual
values will be overwritten--not entire branches of nested
dictionaries.
"""
def inner(other, previouskeys):
"""previouskeys is a tuple that stores all the names of keys
we've recursed into so far so it can they can be looked up
recursively on the pimary mapping when a value needs updateing.
"""
for key, value in other.items():
if isinstance(value, abc.Mapping):
inner(value, (*previouskeys, key))
else:
node = mapping
for previouskey in previouskeys:
node = node.setdefault(previouskey, {})
target = node.get(key)
if (
listextend
and isinstance(target, abc.MutableSequence)
and isinstance(value, abc.Sequence)
):
target.extend(value)
else:
node[key] = value
inner(other, ()) | [
"def",
"deepupdate",
"(",
"mapping",
":",
"abc",
".",
"MutableMapping",
",",
"other",
":",
"abc",
".",
"Mapping",
",",
"listextend",
"=",
"False",
")",
":",
"def",
"inner",
"(",
"other",
",",
"previouskeys",
")",
":",
"\"\"\"previouskeys is a tuple that stores all the names of keys\n we've recursed into so far so it can they can be looked up\n recursively on the pimary mapping when a value needs updateing.\n \"\"\"",
"for",
"key",
",",
"value",
"in",
"other",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"abc",
".",
"Mapping",
")",
":",
"inner",
"(",
"value",
",",
"(",
"*",
"previouskeys",
",",
"key",
")",
")",
"else",
":",
"node",
"=",
"mapping",
"for",
"previouskey",
"in",
"previouskeys",
":",
"node",
"=",
"node",
".",
"setdefault",
"(",
"previouskey",
",",
"{",
"}",
")",
"target",
"=",
"node",
".",
"get",
"(",
"key",
")",
"if",
"(",
"listextend",
"and",
"isinstance",
"(",
"target",
",",
"abc",
".",
"MutableSequence",
")",
"and",
"isinstance",
"(",
"value",
",",
"abc",
".",
"Sequence",
")",
")",
":",
"target",
".",
"extend",
"(",
"value",
")",
"else",
":",
"node",
"[",
"key",
"]",
"=",
"value",
"inner",
"(",
"other",
",",
"(",
")",
")"
] | 35.125 | 0.000866 | [
"def deepupdate(\n",
" mapping: abc.MutableMapping, other: abc.Mapping, listextend=False\n",
"):\n",
" \"\"\"update one dictionary from another recursively. Only individual\n",
" values will be overwritten--not entire branches of nested\n",
" dictionaries.\n",
" \"\"\"\n",
"\n",
" def inner(other, previouskeys):\n",
" \"\"\"previouskeys is a tuple that stores all the names of keys\n",
" we've recursed into so far so it can they can be looked up\n",
" recursively on the pimary mapping when a value needs updateing.\n",
" \"\"\"\n",
" for key, value in other.items():\n",
" if isinstance(value, abc.Mapping):\n",
" inner(value, (*previouskeys, key))\n",
"\n",
" else:\n",
" node = mapping\n",
" for previouskey in previouskeys:\n",
" node = node.setdefault(previouskey, {})\n",
" target = node.get(key)\n",
" if (\n",
" listextend\n",
" and isinstance(target, abc.MutableSequence)\n",
" and isinstance(value, abc.Sequence)\n",
" ):\n",
" target.extend(value)\n",
" else:\n",
" node[key] = value\n",
"\n",
" inner(other, ())"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 32 | 0.001563 |
def main():
"""Start the bot."""
# Bale Bot Authorization Token
updater = Updater("TOKEN")
# Get the dispatcher to register handlers
dp = updater.dispatcher
# on different commands - answer in Bale
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("help", help))
# on noncommand i.e message - echo the message on Bale
dp.add_handler(MessageHandler(DefaultFilter(), echo))
# log all errors
dp.add_error_handler(error)
# Start the Bot
updater.run() | [
"def",
"main",
"(",
")",
":",
"# Bale Bot Authorization Token",
"updater",
"=",
"Updater",
"(",
"\"TOKEN\"",
")",
"# Get the dispatcher to register handlers",
"dp",
"=",
"updater",
".",
"dispatcher",
"# on different commands - answer in Bale",
"dp",
".",
"add_handler",
"(",
"CommandHandler",
"(",
"\"start\"",
",",
"start",
")",
")",
"dp",
".",
"add_handler",
"(",
"CommandHandler",
"(",
"\"help\"",
",",
"help",
")",
")",
"# on noncommand i.e message - echo the message on Bale",
"dp",
".",
"add_handler",
"(",
"MessageHandler",
"(",
"DefaultFilter",
"(",
")",
",",
"echo",
")",
")",
"# log all errors",
"dp",
".",
"add_error_handler",
"(",
"error",
")",
"# Start the Bot",
"updater",
".",
"run",
"(",
")"
] | 25.75 | 0.001873 | [
"def main():\n",
" \"\"\"Start the bot.\"\"\"\n",
" # Bale Bot Authorization Token\n",
" updater = Updater(\"TOKEN\")\n",
"\n",
" # Get the dispatcher to register handlers\n",
" dp = updater.dispatcher\n",
"\n",
" # on different commands - answer in Bale\n",
" dp.add_handler(CommandHandler(\"start\", start))\n",
" dp.add_handler(CommandHandler(\"help\", help))\n",
"\n",
" # on noncommand i.e message - echo the message on Bale\n",
" dp.add_handler(MessageHandler(DefaultFilter(), echo))\n",
"\n",
" # log all errors\n",
" dp.add_error_handler(error)\n",
"\n",
" # Start the Bot\n",
" updater.run()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.058823529411764705
] | 20 | 0.002941 |
def setParts( self, parts ):
"""
Sets the path for this edit widget by providing the parts to the path.
:param parts | [<str>, ..]
"""
self.setText(self.separator().join(map(str, parts))) | [
"def",
"setParts",
"(",
"self",
",",
"parts",
")",
":",
"self",
".",
"setText",
"(",
"self",
".",
"separator",
"(",
")",
".",
"join",
"(",
"map",
"(",
"str",
",",
"parts",
")",
")",
")"
] | 33.571429 | 0.020747 | [
"def setParts( self, parts ):\n",
" \"\"\"\n",
" Sets the path for this edit widget by providing the parts to the path.\n",
" \n",
" :param parts | [<str>, ..]\n",
" \"\"\"\n",
" self.setText(self.separator().join(map(str, parts)))"
] | [
0.06896551724137931,
0.08333333333333333,
0,
0.1111111111111111,
0,
0,
0.016666666666666666
] | 7 | 0.040011 |
def printDeadCells(self):
"""
Print statistics for the dead cells
"""
columnCasualties = numpy.zeros(self.numberOfColumns())
for cell in self.deadCells:
col = self.columnForCell(cell)
columnCasualties[col] += 1
for col in range(self.numberOfColumns()):
print col, columnCasualties[col] | [
"def",
"printDeadCells",
"(",
"self",
")",
":",
"columnCasualties",
"=",
"numpy",
".",
"zeros",
"(",
"self",
".",
"numberOfColumns",
"(",
")",
")",
"for",
"cell",
"in",
"self",
".",
"deadCells",
":",
"col",
"=",
"self",
".",
"columnForCell",
"(",
"cell",
")",
"columnCasualties",
"[",
"col",
"]",
"+=",
"1",
"for",
"col",
"in",
"range",
"(",
"self",
".",
"numberOfColumns",
"(",
")",
")",
":",
"print",
"col",
",",
"columnCasualties",
"[",
"col",
"]"
] | 31.8 | 0.012232 | [
"def printDeadCells(self):\n",
" \"\"\"\n",
" Print statistics for the dead cells\n",
" \"\"\"\n",
" columnCasualties = numpy.zeros(self.numberOfColumns())\n",
" for cell in self.deadCells:\n",
" col = self.columnForCell(cell)\n",
" columnCasualties[col] += 1\n",
" for col in range(self.numberOfColumns()):\n",
" print col, columnCasualties[col]"
] | [
0,
0,
0,
0,
0,
0,
0.02702702702702703,
0.030303030303030304,
0,
0.05263157894736842
] | 10 | 0.010996 |
def scan_file(fullpath, relpath, assign_id):
""" scan a file and put it into the index """
# pylint: disable=too-many-branches,too-many-statements,too-many-locals
# Since a file has changed, the lrucache is invalid.
load_message.cache_clear()
try:
entry = load_message(fullpath)
except FileNotFoundError:
# The file doesn't exist, so remove it from the index
record = model.Entry.get(file_path=fullpath)
if record:
expire_record(record)
return True
entry_id = get_entry_id(entry, fullpath, assign_id)
if entry_id is None:
return False
fixup_needed = False
basename = os.path.basename(relpath)
title = entry['title'] or guess_title(basename)
values = {
'file_path': fullpath,
'category': entry.get('Category', utils.get_category(relpath)),
'status': model.PublishStatus[entry.get('Status', 'SCHEDULED').upper()].value,
'entry_type': entry.get('Entry-Type', ''),
'slug_text': make_slug(entry.get('Slug-Text', title)),
'redirect_url': entry.get('Redirect-To', ''),
'title': title,
'sort_title': entry.get('Sort-Title', title),
'entry_template': entry.get('Entry-Template', '')
}
entry_date = None
if 'Date' in entry:
try:
entry_date = arrow.get(entry['Date'], tzinfo=config.timezone)
except arrow.parser.ParserError:
entry_date = None
if entry_date is None:
del entry['Date']
entry_date = arrow.get(
os.stat(fullpath).st_ctime).to(config.timezone)
entry['Date'] = entry_date.format()
fixup_needed = True
if 'Last-Modified' in entry:
last_modified_str = entry['Last-Modified']
try:
last_modified = arrow.get(
last_modified_str, tzinfo=config.timezone)
except arrow.parser.ParserError:
last_modified = arrow.get()
del entry['Last-Modified']
entry['Last-Modified'] = last_modified.format()
fixup_needed = True
values['display_date'] = entry_date.isoformat()
values['utc_date'] = entry_date.to('utc').datetime
values['local_date'] = entry_date.naive
logger.debug("getting entry %s with id %d", fullpath, entry_id)
record = model.Entry.get(id=entry_id)
if record:
logger.debug("Reusing existing entry %d", record.id)
record.set(**values)
else:
record = model.Entry(id=entry_id, **values)
# Update the entry ID
if str(record.id) != entry['Entry-ID']:
del entry['Entry-ID']
entry['Entry-ID'] = str(record.id)
fixup_needed = True
if 'UUID' not in entry:
entry['UUID'] = str(uuid.uuid5(
uuid.NAMESPACE_URL, 'file://' + fullpath))
fixup_needed = True
# add other relationships to the index
path_alias.remove_aliases(record)
if record.visible:
for alias in entry.get_all('Path-Alias', []):
path_alias.set_alias(alias, entry=record)
with orm.db_session:
set_tags = {
t.lower()
for t in entry.get_all('Tag', [])
+ entry.get_all('Hidden-Tag', [])
}
for tag in record.tags:
if tag.key in set_tags:
set_tags.remove(tag.key)
else:
tag.delete()
for tag in set_tags:
model.EntryTag(entry=record, key=tag)
orm.commit()
if record.status == model.PublishStatus.DRAFT.value:
logger.info("Not touching draft entry %s", fullpath)
elif fixup_needed:
logger.info("Fixing up entry %s", fullpath)
save_file(fullpath, entry)
return record | [
"def",
"scan_file",
"(",
"fullpath",
",",
"relpath",
",",
"assign_id",
")",
":",
"# pylint: disable=too-many-branches,too-many-statements,too-many-locals",
"# Since a file has changed, the lrucache is invalid.",
"load_message",
".",
"cache_clear",
"(",
")",
"try",
":",
"entry",
"=",
"load_message",
"(",
"fullpath",
")",
"except",
"FileNotFoundError",
":",
"# The file doesn't exist, so remove it from the index",
"record",
"=",
"model",
".",
"Entry",
".",
"get",
"(",
"file_path",
"=",
"fullpath",
")",
"if",
"record",
":",
"expire_record",
"(",
"record",
")",
"return",
"True",
"entry_id",
"=",
"get_entry_id",
"(",
"entry",
",",
"fullpath",
",",
"assign_id",
")",
"if",
"entry_id",
"is",
"None",
":",
"return",
"False",
"fixup_needed",
"=",
"False",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"relpath",
")",
"title",
"=",
"entry",
"[",
"'title'",
"]",
"or",
"guess_title",
"(",
"basename",
")",
"values",
"=",
"{",
"'file_path'",
":",
"fullpath",
",",
"'category'",
":",
"entry",
".",
"get",
"(",
"'Category'",
",",
"utils",
".",
"get_category",
"(",
"relpath",
")",
")",
",",
"'status'",
":",
"model",
".",
"PublishStatus",
"[",
"entry",
".",
"get",
"(",
"'Status'",
",",
"'SCHEDULED'",
")",
".",
"upper",
"(",
")",
"]",
".",
"value",
",",
"'entry_type'",
":",
"entry",
".",
"get",
"(",
"'Entry-Type'",
",",
"''",
")",
",",
"'slug_text'",
":",
"make_slug",
"(",
"entry",
".",
"get",
"(",
"'Slug-Text'",
",",
"title",
")",
")",
",",
"'redirect_url'",
":",
"entry",
".",
"get",
"(",
"'Redirect-To'",
",",
"''",
")",
",",
"'title'",
":",
"title",
",",
"'sort_title'",
":",
"entry",
".",
"get",
"(",
"'Sort-Title'",
",",
"title",
")",
",",
"'entry_template'",
":",
"entry",
".",
"get",
"(",
"'Entry-Template'",
",",
"''",
")",
"}",
"entry_date",
"=",
"None",
"if",
"'Date'",
"in",
"entry",
":",
"try",
":",
"entry_date",
"=",
"arrow",
".",
"get",
"(",
"entry",
"[",
"'Date'",
"]",
",",
"tzinfo",
"=",
"config",
".",
"timezone",
")",
"except",
"arrow",
".",
"parser",
".",
"ParserError",
":",
"entry_date",
"=",
"None",
"if",
"entry_date",
"is",
"None",
":",
"del",
"entry",
"[",
"'Date'",
"]",
"entry_date",
"=",
"arrow",
".",
"get",
"(",
"os",
".",
"stat",
"(",
"fullpath",
")",
".",
"st_ctime",
")",
".",
"to",
"(",
"config",
".",
"timezone",
")",
"entry",
"[",
"'Date'",
"]",
"=",
"entry_date",
".",
"format",
"(",
")",
"fixup_needed",
"=",
"True",
"if",
"'Last-Modified'",
"in",
"entry",
":",
"last_modified_str",
"=",
"entry",
"[",
"'Last-Modified'",
"]",
"try",
":",
"last_modified",
"=",
"arrow",
".",
"get",
"(",
"last_modified_str",
",",
"tzinfo",
"=",
"config",
".",
"timezone",
")",
"except",
"arrow",
".",
"parser",
".",
"ParserError",
":",
"last_modified",
"=",
"arrow",
".",
"get",
"(",
")",
"del",
"entry",
"[",
"'Last-Modified'",
"]",
"entry",
"[",
"'Last-Modified'",
"]",
"=",
"last_modified",
".",
"format",
"(",
")",
"fixup_needed",
"=",
"True",
"values",
"[",
"'display_date'",
"]",
"=",
"entry_date",
".",
"isoformat",
"(",
")",
"values",
"[",
"'utc_date'",
"]",
"=",
"entry_date",
".",
"to",
"(",
"'utc'",
")",
".",
"datetime",
"values",
"[",
"'local_date'",
"]",
"=",
"entry_date",
".",
"naive",
"logger",
".",
"debug",
"(",
"\"getting entry %s with id %d\"",
",",
"fullpath",
",",
"entry_id",
")",
"record",
"=",
"model",
".",
"Entry",
".",
"get",
"(",
"id",
"=",
"entry_id",
")",
"if",
"record",
":",
"logger",
".",
"debug",
"(",
"\"Reusing existing entry %d\"",
",",
"record",
".",
"id",
")",
"record",
".",
"set",
"(",
"*",
"*",
"values",
")",
"else",
":",
"record",
"=",
"model",
".",
"Entry",
"(",
"id",
"=",
"entry_id",
",",
"*",
"*",
"values",
")",
"# Update the entry ID",
"if",
"str",
"(",
"record",
".",
"id",
")",
"!=",
"entry",
"[",
"'Entry-ID'",
"]",
":",
"del",
"entry",
"[",
"'Entry-ID'",
"]",
"entry",
"[",
"'Entry-ID'",
"]",
"=",
"str",
"(",
"record",
".",
"id",
")",
"fixup_needed",
"=",
"True",
"if",
"'UUID'",
"not",
"in",
"entry",
":",
"entry",
"[",
"'UUID'",
"]",
"=",
"str",
"(",
"uuid",
".",
"uuid5",
"(",
"uuid",
".",
"NAMESPACE_URL",
",",
"'file://'",
"+",
"fullpath",
")",
")",
"fixup_needed",
"=",
"True",
"# add other relationships to the index",
"path_alias",
".",
"remove_aliases",
"(",
"record",
")",
"if",
"record",
".",
"visible",
":",
"for",
"alias",
"in",
"entry",
".",
"get_all",
"(",
"'Path-Alias'",
",",
"[",
"]",
")",
":",
"path_alias",
".",
"set_alias",
"(",
"alias",
",",
"entry",
"=",
"record",
")",
"with",
"orm",
".",
"db_session",
":",
"set_tags",
"=",
"{",
"t",
".",
"lower",
"(",
")",
"for",
"t",
"in",
"entry",
".",
"get_all",
"(",
"'Tag'",
",",
"[",
"]",
")",
"+",
"entry",
".",
"get_all",
"(",
"'Hidden-Tag'",
",",
"[",
"]",
")",
"}",
"for",
"tag",
"in",
"record",
".",
"tags",
":",
"if",
"tag",
".",
"key",
"in",
"set_tags",
":",
"set_tags",
".",
"remove",
"(",
"tag",
".",
"key",
")",
"else",
":",
"tag",
".",
"delete",
"(",
")",
"for",
"tag",
"in",
"set_tags",
":",
"model",
".",
"EntryTag",
"(",
"entry",
"=",
"record",
",",
"key",
"=",
"tag",
")",
"orm",
".",
"commit",
"(",
")",
"if",
"record",
".",
"status",
"==",
"model",
".",
"PublishStatus",
".",
"DRAFT",
".",
"value",
":",
"logger",
".",
"info",
"(",
"\"Not touching draft entry %s\"",
",",
"fullpath",
")",
"elif",
"fixup_needed",
":",
"logger",
".",
"info",
"(",
"\"Fixing up entry %s\"",
",",
"fullpath",
")",
"save_file",
"(",
"fullpath",
",",
"entry",
")",
"return",
"record"
] | 32.088496 | 0.000535 | [
"def scan_file(fullpath, relpath, assign_id):\n",
" \"\"\" scan a file and put it into the index \"\"\"\n",
" # pylint: disable=too-many-branches,too-many-statements,too-many-locals\n",
"\n",
" # Since a file has changed, the lrucache is invalid.\n",
" load_message.cache_clear()\n",
"\n",
" try:\n",
" entry = load_message(fullpath)\n",
" except FileNotFoundError:\n",
" # The file doesn't exist, so remove it from the index\n",
" record = model.Entry.get(file_path=fullpath)\n",
" if record:\n",
" expire_record(record)\n",
" return True\n",
"\n",
" entry_id = get_entry_id(entry, fullpath, assign_id)\n",
" if entry_id is None:\n",
" return False\n",
"\n",
" fixup_needed = False\n",
"\n",
" basename = os.path.basename(relpath)\n",
" title = entry['title'] or guess_title(basename)\n",
"\n",
" values = {\n",
" 'file_path': fullpath,\n",
" 'category': entry.get('Category', utils.get_category(relpath)),\n",
" 'status': model.PublishStatus[entry.get('Status', 'SCHEDULED').upper()].value,\n",
" 'entry_type': entry.get('Entry-Type', ''),\n",
" 'slug_text': make_slug(entry.get('Slug-Text', title)),\n",
" 'redirect_url': entry.get('Redirect-To', ''),\n",
" 'title': title,\n",
" 'sort_title': entry.get('Sort-Title', title),\n",
" 'entry_template': entry.get('Entry-Template', '')\n",
" }\n",
"\n",
" entry_date = None\n",
" if 'Date' in entry:\n",
" try:\n",
" entry_date = arrow.get(entry['Date'], tzinfo=config.timezone)\n",
" except arrow.parser.ParserError:\n",
" entry_date = None\n",
" if entry_date is None:\n",
" del entry['Date']\n",
" entry_date = arrow.get(\n",
" os.stat(fullpath).st_ctime).to(config.timezone)\n",
" entry['Date'] = entry_date.format()\n",
" fixup_needed = True\n",
"\n",
" if 'Last-Modified' in entry:\n",
" last_modified_str = entry['Last-Modified']\n",
" try:\n",
" last_modified = arrow.get(\n",
" last_modified_str, tzinfo=config.timezone)\n",
" except arrow.parser.ParserError:\n",
" last_modified = arrow.get()\n",
" del entry['Last-Modified']\n",
" entry['Last-Modified'] = last_modified.format()\n",
" fixup_needed = True\n",
"\n",
" values['display_date'] = entry_date.isoformat()\n",
" values['utc_date'] = entry_date.to('utc').datetime\n",
" values['local_date'] = entry_date.naive\n",
"\n",
" logger.debug(\"getting entry %s with id %d\", fullpath, entry_id)\n",
" record = model.Entry.get(id=entry_id)\n",
" if record:\n",
" logger.debug(\"Reusing existing entry %d\", record.id)\n",
" record.set(**values)\n",
" else:\n",
" record = model.Entry(id=entry_id, **values)\n",
"\n",
" # Update the entry ID\n",
" if str(record.id) != entry['Entry-ID']:\n",
" del entry['Entry-ID']\n",
" entry['Entry-ID'] = str(record.id)\n",
" fixup_needed = True\n",
"\n",
" if 'UUID' not in entry:\n",
" entry['UUID'] = str(uuid.uuid5(\n",
" uuid.NAMESPACE_URL, 'file://' + fullpath))\n",
" fixup_needed = True\n",
"\n",
" # add other relationships to the index\n",
" path_alias.remove_aliases(record)\n",
" if record.visible:\n",
" for alias in entry.get_all('Path-Alias', []):\n",
" path_alias.set_alias(alias, entry=record)\n",
"\n",
" with orm.db_session:\n",
" set_tags = {\n",
" t.lower()\n",
" for t in entry.get_all('Tag', [])\n",
" + entry.get_all('Hidden-Tag', [])\n",
" }\n",
"\n",
" for tag in record.tags:\n",
" if tag.key in set_tags:\n",
" set_tags.remove(tag.key)\n",
" else:\n",
" tag.delete()\n",
" for tag in set_tags:\n",
" model.EntryTag(entry=record, key=tag)\n",
" orm.commit()\n",
"\n",
" if record.status == model.PublishStatus.DRAFT.value:\n",
" logger.info(\"Not touching draft entry %s\", fullpath)\n",
" elif fixup_needed:\n",
" logger.info(\"Fixing up entry %s\", fullpath)\n",
" save_file(fullpath, entry)\n",
"\n",
" return record"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011494252873563218,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.058823529411764705
] | 113 | 0.000622 |
def get_route_to(self, destination="", protocol=""):
"""
Only IPv4 supported, vrf aware, longer_prefixes parameter ready
"""
longer_pref = "" # longer_prefixes support, for future use
vrf = ""
ip_version = None
try:
ip_version = IPNetwork(destination).version
except AddrFormatError:
return "Please specify a valid destination!"
if ip_version == 4: # process IPv4 routing table
routes = {}
if vrf:
send_cmd = "show ip route vrf {vrf} {destination} {longer}".format(
vrf=vrf, destination=destination, longer=longer_pref
).rstrip()
else:
send_cmd = "show ip route vrf all {destination} {longer}".format(
destination=destination, longer=longer_pref
).rstrip()
out_sh_ip_rou = self._send_command(send_cmd)
# IP Route Table for VRF "TEST"
for vrfsec in out_sh_ip_rou.split("IP Route Table for ")[1:]:
if "Route not found" in vrfsec:
continue
vrffound = False
preffound = False
nh_list = []
cur_prefix = ""
for line in vrfsec.split("\n"):
if not vrffound:
vrfstr = RE_RT_VRF_NAME.match(line)
if vrfstr:
curvrf = vrfstr.group(1)
vrffound = True
else:
# 10.10.56.0/24, ubest/mbest: 2/0
prefstr = RE_RT_IPV4_ROUTE_PREF.match(line)
if prefstr:
if preffound: # precess previous prefix
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
nh_list = []
else:
preffound = True
cur_prefix = prefstr.group(1)
continue
# *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct
# via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000,
# type-2, tag 2112
# *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2,
# tag 2121
# *via 10.17.207.73, [1/0], 1y18w, static
# *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000,
# external, tag 65000
# *via Vlan596, [1/0], 1y18w, static
viastr = RE_IP_ROUTE_VIA_REGEX.match(line)
if viastr:
nh_used = viastr.group("used") == "*"
nh_ip = viastr.group("ip") or ""
# when next hop is leaked from other vrf, for future use
# nh_vrf = viastr.group('vrf')
nh_int = viastr.group("int")
nh_metric = viastr.group("metric")
nh_age = bgp_time_conversion(viastr.group("age"))
nh_source = viastr.group("source")
# for future use
# rest_of_line = viastr.group('rest')
# use only routes from specified protocol
if protocol and protocol != nh_source:
continue
# routing protocol process number, for future use
# nh_source_proc_nr = viastr.group('procnr)
if nh_int:
nh_int_canon = helpers.canonical_interface_name(nh_int)
else:
nh_int_canon = ""
route_entry = {
"protocol": nh_source,
"outgoing_interface": nh_int_canon,
"age": nh_age,
"current_active": nh_used,
"routing_table": curvrf,
"last_active": nh_used,
"next_hop": nh_ip,
"selected_next_hop": nh_used,
"inactive_reason": "",
"preference": int(nh_metric),
}
if nh_source == "bgp":
route_entry[
"protocol_attributes"
] = self._get_bgp_route_attr(cur_prefix, curvrf, nh_ip)
else:
route_entry["protocol_attributes"] = {}
nh_list.append(route_entry)
# process last next hop entries
if preffound:
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
return routes | [
"def",
"get_route_to",
"(",
"self",
",",
"destination",
"=",
"\"\"",
",",
"protocol",
"=",
"\"\"",
")",
":",
"longer_pref",
"=",
"\"\"",
"# longer_prefixes support, for future use",
"vrf",
"=",
"\"\"",
"ip_version",
"=",
"None",
"try",
":",
"ip_version",
"=",
"IPNetwork",
"(",
"destination",
")",
".",
"version",
"except",
"AddrFormatError",
":",
"return",
"\"Please specify a valid destination!\"",
"if",
"ip_version",
"==",
"4",
":",
"# process IPv4 routing table",
"routes",
"=",
"{",
"}",
"if",
"vrf",
":",
"send_cmd",
"=",
"\"show ip route vrf {vrf} {destination} {longer}\"",
".",
"format",
"(",
"vrf",
"=",
"vrf",
",",
"destination",
"=",
"destination",
",",
"longer",
"=",
"longer_pref",
")",
".",
"rstrip",
"(",
")",
"else",
":",
"send_cmd",
"=",
"\"show ip route vrf all {destination} {longer}\"",
".",
"format",
"(",
"destination",
"=",
"destination",
",",
"longer",
"=",
"longer_pref",
")",
".",
"rstrip",
"(",
")",
"out_sh_ip_rou",
"=",
"self",
".",
"_send_command",
"(",
"send_cmd",
")",
"# IP Route Table for VRF \"TEST\"",
"for",
"vrfsec",
"in",
"out_sh_ip_rou",
".",
"split",
"(",
"\"IP Route Table for \"",
")",
"[",
"1",
":",
"]",
":",
"if",
"\"Route not found\"",
"in",
"vrfsec",
":",
"continue",
"vrffound",
"=",
"False",
"preffound",
"=",
"False",
"nh_list",
"=",
"[",
"]",
"cur_prefix",
"=",
"\"\"",
"for",
"line",
"in",
"vrfsec",
".",
"split",
"(",
"\"\\n\"",
")",
":",
"if",
"not",
"vrffound",
":",
"vrfstr",
"=",
"RE_RT_VRF_NAME",
".",
"match",
"(",
"line",
")",
"if",
"vrfstr",
":",
"curvrf",
"=",
"vrfstr",
".",
"group",
"(",
"1",
")",
"vrffound",
"=",
"True",
"else",
":",
"# 10.10.56.0/24, ubest/mbest: 2/0",
"prefstr",
"=",
"RE_RT_IPV4_ROUTE_PREF",
".",
"match",
"(",
"line",
")",
"if",
"prefstr",
":",
"if",
"preffound",
":",
"# precess previous prefix",
"if",
"cur_prefix",
"not",
"in",
"routes",
":",
"routes",
"[",
"cur_prefix",
"]",
"=",
"[",
"]",
"for",
"nh",
"in",
"nh_list",
":",
"routes",
"[",
"cur_prefix",
"]",
".",
"append",
"(",
"nh",
")",
"nh_list",
"=",
"[",
"]",
"else",
":",
"preffound",
"=",
"True",
"cur_prefix",
"=",
"prefstr",
".",
"group",
"(",
"1",
")",
"continue",
"# *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct",
"# via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000,",
"# type-2, tag 2112",
"# *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2,",
"# tag 2121",
"# *via 10.17.207.73, [1/0], 1y18w, static",
"# *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000,",
"# external, tag 65000",
"# *via Vlan596, [1/0], 1y18w, static",
"viastr",
"=",
"RE_IP_ROUTE_VIA_REGEX",
".",
"match",
"(",
"line",
")",
"if",
"viastr",
":",
"nh_used",
"=",
"viastr",
".",
"group",
"(",
"\"used\"",
")",
"==",
"\"*\"",
"nh_ip",
"=",
"viastr",
".",
"group",
"(",
"\"ip\"",
")",
"or",
"\"\"",
"# when next hop is leaked from other vrf, for future use",
"# nh_vrf = viastr.group('vrf')",
"nh_int",
"=",
"viastr",
".",
"group",
"(",
"\"int\"",
")",
"nh_metric",
"=",
"viastr",
".",
"group",
"(",
"\"metric\"",
")",
"nh_age",
"=",
"bgp_time_conversion",
"(",
"viastr",
".",
"group",
"(",
"\"age\"",
")",
")",
"nh_source",
"=",
"viastr",
".",
"group",
"(",
"\"source\"",
")",
"# for future use",
"# rest_of_line = viastr.group('rest')",
"# use only routes from specified protocol",
"if",
"protocol",
"and",
"protocol",
"!=",
"nh_source",
":",
"continue",
"# routing protocol process number, for future use",
"# nh_source_proc_nr = viastr.group('procnr)",
"if",
"nh_int",
":",
"nh_int_canon",
"=",
"helpers",
".",
"canonical_interface_name",
"(",
"nh_int",
")",
"else",
":",
"nh_int_canon",
"=",
"\"\"",
"route_entry",
"=",
"{",
"\"protocol\"",
":",
"nh_source",
",",
"\"outgoing_interface\"",
":",
"nh_int_canon",
",",
"\"age\"",
":",
"nh_age",
",",
"\"current_active\"",
":",
"nh_used",
",",
"\"routing_table\"",
":",
"curvrf",
",",
"\"last_active\"",
":",
"nh_used",
",",
"\"next_hop\"",
":",
"nh_ip",
",",
"\"selected_next_hop\"",
":",
"nh_used",
",",
"\"inactive_reason\"",
":",
"\"\"",
",",
"\"preference\"",
":",
"int",
"(",
"nh_metric",
")",
",",
"}",
"if",
"nh_source",
"==",
"\"bgp\"",
":",
"route_entry",
"[",
"\"protocol_attributes\"",
"]",
"=",
"self",
".",
"_get_bgp_route_attr",
"(",
"cur_prefix",
",",
"curvrf",
",",
"nh_ip",
")",
"else",
":",
"route_entry",
"[",
"\"protocol_attributes\"",
"]",
"=",
"{",
"}",
"nh_list",
".",
"append",
"(",
"route_entry",
")",
"# process last next hop entries",
"if",
"preffound",
":",
"if",
"cur_prefix",
"not",
"in",
"routes",
":",
"routes",
"[",
"cur_prefix",
"]",
"=",
"[",
"]",
"for",
"nh",
"in",
"nh_list",
":",
"routes",
"[",
"cur_prefix",
"]",
".",
"append",
"(",
"nh",
")",
"return",
"routes"
] | 50.841121 | 0.001803 | [
"def get_route_to(self, destination=\"\", protocol=\"\"):\n",
" \"\"\"\n",
" Only IPv4 supported, vrf aware, longer_prefixes parameter ready\n",
" \"\"\"\n",
" longer_pref = \"\" # longer_prefixes support, for future use\n",
" vrf = \"\"\n",
"\n",
" ip_version = None\n",
" try:\n",
" ip_version = IPNetwork(destination).version\n",
" except AddrFormatError:\n",
" return \"Please specify a valid destination!\"\n",
" if ip_version == 4: # process IPv4 routing table\n",
" routes = {}\n",
" if vrf:\n",
" send_cmd = \"show ip route vrf {vrf} {destination} {longer}\".format(\n",
" vrf=vrf, destination=destination, longer=longer_pref\n",
" ).rstrip()\n",
" else:\n",
" send_cmd = \"show ip route vrf all {destination} {longer}\".format(\n",
" destination=destination, longer=longer_pref\n",
" ).rstrip()\n",
" out_sh_ip_rou = self._send_command(send_cmd)\n",
" # IP Route Table for VRF \"TEST\"\n",
" for vrfsec in out_sh_ip_rou.split(\"IP Route Table for \")[1:]:\n",
" if \"Route not found\" in vrfsec:\n",
" continue\n",
" vrffound = False\n",
" preffound = False\n",
" nh_list = []\n",
" cur_prefix = \"\"\n",
" for line in vrfsec.split(\"\\n\"):\n",
" if not vrffound:\n",
" vrfstr = RE_RT_VRF_NAME.match(line)\n",
" if vrfstr:\n",
" curvrf = vrfstr.group(1)\n",
" vrffound = True\n",
" else:\n",
" # 10.10.56.0/24, ubest/mbest: 2/0\n",
" prefstr = RE_RT_IPV4_ROUTE_PREF.match(line)\n",
" if prefstr:\n",
" if preffound: # precess previous prefix\n",
" if cur_prefix not in routes:\n",
" routes[cur_prefix] = []\n",
" for nh in nh_list:\n",
" routes[cur_prefix].append(nh)\n",
" nh_list = []\n",
" else:\n",
" preffound = True\n",
" cur_prefix = prefstr.group(1)\n",
" continue\n",
" # *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct\n",
" # via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000,\n",
" # type-2, tag 2112\n",
" # *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2,\n",
" # tag 2121\n",
" # *via 10.17.207.73, [1/0], 1y18w, static\n",
" # *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000,\n",
" # external, tag 65000\n",
" # *via Vlan596, [1/0], 1y18w, static\n",
" viastr = RE_IP_ROUTE_VIA_REGEX.match(line)\n",
" if viastr:\n",
" nh_used = viastr.group(\"used\") == \"*\"\n",
" nh_ip = viastr.group(\"ip\") or \"\"\n",
" # when next hop is leaked from other vrf, for future use\n",
" # nh_vrf = viastr.group('vrf')\n",
" nh_int = viastr.group(\"int\")\n",
" nh_metric = viastr.group(\"metric\")\n",
" nh_age = bgp_time_conversion(viastr.group(\"age\"))\n",
" nh_source = viastr.group(\"source\")\n",
" # for future use\n",
" # rest_of_line = viastr.group('rest')\n",
" # use only routes from specified protocol\n",
" if protocol and protocol != nh_source:\n",
" continue\n",
" # routing protocol process number, for future use\n",
" # nh_source_proc_nr = viastr.group('procnr)\n",
" if nh_int:\n",
" nh_int_canon = helpers.canonical_interface_name(nh_int)\n",
" else:\n",
" nh_int_canon = \"\"\n",
" route_entry = {\n",
" \"protocol\": nh_source,\n",
" \"outgoing_interface\": nh_int_canon,\n",
" \"age\": nh_age,\n",
" \"current_active\": nh_used,\n",
" \"routing_table\": curvrf,\n",
" \"last_active\": nh_used,\n",
" \"next_hop\": nh_ip,\n",
" \"selected_next_hop\": nh_used,\n",
" \"inactive_reason\": \"\",\n",
" \"preference\": int(nh_metric),\n",
" }\n",
" if nh_source == \"bgp\":\n",
" route_entry[\n",
" \"protocol_attributes\"\n",
" ] = self._get_bgp_route_attr(cur_prefix, curvrf, nh_ip)\n",
" else:\n",
" route_entry[\"protocol_attributes\"] = {}\n",
" nh_list.append(route_entry)\n",
" # process last next hop entries\n",
" if preffound:\n",
" if cur_prefix not in routes:\n",
" routes[cur_prefix] = []\n",
" for nh in nh_list:\n",
" routes[cur_prefix].append(nh)\n",
" return routes"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011904761904761904,
0,
0,
0,
0.012195121951219513,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011235955056179775,
0,
0.01,
0,
0,
0.010752688172043012,
0,
0,
0,
0,
0,
0,
0.011764705882352941,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011363636363636364,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011363636363636364,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.047619047619047616
] | 107 | 0.00207 |
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
"""
Load the schedulers state.
Parameters
----------
state_dict : ``Dict[str, Any]``
Scheduler state. Should be an object returned from a call to ``state_dict``.
"""
self.__dict__.update(state_dict) | [
"def",
"load_state_dict",
"(",
"self",
",",
"state_dict",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
")",
"->",
"None",
":",
"self",
".",
"__dict__",
".",
"update",
"(",
"state_dict",
")"
] | 32.1 | 0.009091 | [
"def load_state_dict(self, state_dict: Dict[str, Any]) -> None:\n",
" \"\"\"\n",
" Load the schedulers state.\n",
"\n",
" Parameters\n",
" ----------\n",
" state_dict : ``Dict[str, Any]``\n",
" Scheduler state. Should be an object returned from a call to ``state_dict``.\n",
" \"\"\"\n",
" self.__dict__.update(state_dict)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0.011235955056179775,
0,
0.025
] | 10 | 0.011957 |
def _read_from_paths():
"""
Try to read data from configuration paths ($HOME/_SETTINGS_PATH,
/etc/_SETTINGS_PATH).
"""
home = os.environ.get("HOME", "")
home_path = os.path.join(home, _SETTINGS_PATH)
etc_path = os.path.join("/etc", _SETTINGS_PATH)
env_path = os.environ.get("SETTINGS_PATH", "")
read_path = None
if env_path and os.path.exists(env_path):
read_path = env_path
elif home and os.path.exists(home_path):
read_path = home_path
elif os.path.exists(etc_path):
read_path = etc_path
if not read_path:
return "{}"
with open(read_path) as f:
return f.read() | [
"def",
"_read_from_paths",
"(",
")",
":",
"home",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"HOME\"",
",",
"\"\"",
")",
"home_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"_SETTINGS_PATH",
")",
"etc_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"\"/etc\"",
",",
"_SETTINGS_PATH",
")",
"env_path",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"SETTINGS_PATH\"",
",",
"\"\"",
")",
"read_path",
"=",
"None",
"if",
"env_path",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"env_path",
")",
":",
"read_path",
"=",
"env_path",
"elif",
"home",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"home_path",
")",
":",
"read_path",
"=",
"home_path",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"etc_path",
")",
":",
"read_path",
"=",
"etc_path",
"if",
"not",
"read_path",
":",
"return",
"\"{}\"",
"with",
"open",
"(",
"read_path",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")"
] | 27.782609 | 0.001513 | [
"def _read_from_paths():\n",
" \"\"\"\n",
" Try to read data from configuration paths ($HOME/_SETTINGS_PATH,\n",
" /etc/_SETTINGS_PATH).\n",
" \"\"\"\n",
" home = os.environ.get(\"HOME\", \"\")\n",
" home_path = os.path.join(home, _SETTINGS_PATH)\n",
" etc_path = os.path.join(\"/etc\", _SETTINGS_PATH)\n",
" env_path = os.environ.get(\"SETTINGS_PATH\", \"\")\n",
"\n",
" read_path = None\n",
" if env_path and os.path.exists(env_path):\n",
" read_path = env_path\n",
" elif home and os.path.exists(home_path):\n",
" read_path = home_path\n",
" elif os.path.exists(etc_path):\n",
" read_path = etc_path\n",
"\n",
" if not read_path:\n",
" return \"{}\"\n",
"\n",
" with open(read_path) as f:\n",
" return f.read()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.043478260869565216
] | 23 | 0.00189 |
def A(*a):
"""convert iterable object into numpy array"""
return np.array(a[0]) if len(a)==1 else [np.array(o) for o in a] | [
"def",
"A",
"(",
"*",
"a",
")",
":",
"return",
"np",
".",
"array",
"(",
"a",
"[",
"0",
"]",
")",
"if",
"len",
"(",
"a",
")",
"==",
"1",
"else",
"[",
"np",
".",
"array",
"(",
"o",
")",
"for",
"o",
"in",
"a",
"]"
] | 42.666667 | 0.015385 | [
"def A(*a):\n",
" \"\"\"convert iterable object into numpy array\"\"\"\n",
" return np.array(a[0]) if len(a)==1 else [np.array(o) for o in a]"
] | [
0,
0,
0.029411764705882353
] | 3 | 0.009804 |
def _gl_look_at(self, pos, target, up):
"""
The standard lookAt method
:param pos: current position
:param target: target position to look at
:param up: direction up
"""
z = vector.normalise(pos - target)
x = vector.normalise(vector3.cross(vector.normalise(up), z))
y = vector3.cross(z, x)
translate = matrix44.create_identity()
translate[3][0] = -pos.x
translate[3][1] = -pos.y
translate[3][2] = -pos.z
rotate = matrix44.create_identity()
rotate[0][0] = x[0] # -- X
rotate[1][0] = x[1]
rotate[2][0] = x[2]
rotate[0][1] = y[0] # -- Y
rotate[1][1] = y[1]
rotate[2][1] = y[2]
rotate[0][2] = z[0] # -- Z
rotate[1][2] = z[1]
rotate[2][2] = z[2]
return matrix44.multiply(translate, rotate) | [
"def",
"_gl_look_at",
"(",
"self",
",",
"pos",
",",
"target",
",",
"up",
")",
":",
"z",
"=",
"vector",
".",
"normalise",
"(",
"pos",
"-",
"target",
")",
"x",
"=",
"vector",
".",
"normalise",
"(",
"vector3",
".",
"cross",
"(",
"vector",
".",
"normalise",
"(",
"up",
")",
",",
"z",
")",
")",
"y",
"=",
"vector3",
".",
"cross",
"(",
"z",
",",
"x",
")",
"translate",
"=",
"matrix44",
".",
"create_identity",
"(",
")",
"translate",
"[",
"3",
"]",
"[",
"0",
"]",
"=",
"-",
"pos",
".",
"x",
"translate",
"[",
"3",
"]",
"[",
"1",
"]",
"=",
"-",
"pos",
".",
"y",
"translate",
"[",
"3",
"]",
"[",
"2",
"]",
"=",
"-",
"pos",
".",
"z",
"rotate",
"=",
"matrix44",
".",
"create_identity",
"(",
")",
"rotate",
"[",
"0",
"]",
"[",
"0",
"]",
"=",
"x",
"[",
"0",
"]",
"# -- X",
"rotate",
"[",
"1",
"]",
"[",
"0",
"]",
"=",
"x",
"[",
"1",
"]",
"rotate",
"[",
"2",
"]",
"[",
"0",
"]",
"=",
"x",
"[",
"2",
"]",
"rotate",
"[",
"0",
"]",
"[",
"1",
"]",
"=",
"y",
"[",
"0",
"]",
"# -- Y",
"rotate",
"[",
"1",
"]",
"[",
"1",
"]",
"=",
"y",
"[",
"1",
"]",
"rotate",
"[",
"2",
"]",
"[",
"1",
"]",
"=",
"y",
"[",
"2",
"]",
"rotate",
"[",
"0",
"]",
"[",
"2",
"]",
"=",
"z",
"[",
"0",
"]",
"# -- Z",
"rotate",
"[",
"1",
"]",
"[",
"2",
"]",
"=",
"z",
"[",
"1",
"]",
"rotate",
"[",
"2",
"]",
"[",
"2",
"]",
"=",
"z",
"[",
"2",
"]",
"return",
"matrix44",
".",
"multiply",
"(",
"translate",
",",
"rotate",
")"
] | 29.482759 | 0.002265 | [
"def _gl_look_at(self, pos, target, up):\n",
" \"\"\"\n",
" The standard lookAt method\n",
"\n",
" :param pos: current position\n",
" :param target: target position to look at\n",
" :param up: direction up\n",
" \"\"\"\n",
" z = vector.normalise(pos - target)\n",
" x = vector.normalise(vector3.cross(vector.normalise(up), z))\n",
" y = vector3.cross(z, x)\n",
"\n",
" translate = matrix44.create_identity()\n",
" translate[3][0] = -pos.x\n",
" translate[3][1] = -pos.y\n",
" translate[3][2] = -pos.z\n",
"\n",
" rotate = matrix44.create_identity()\n",
" rotate[0][0] = x[0] # -- X\n",
" rotate[1][0] = x[1]\n",
" rotate[2][0] = x[2]\n",
" rotate[0][1] = y[0] # -- Y\n",
" rotate[1][1] = y[1]\n",
" rotate[2][1] = y[2]\n",
" rotate[0][2] = z[0] # -- Z\n",
" rotate[1][2] = z[1]\n",
" rotate[2][2] = z[2]\n",
"\n",
" return matrix44.multiply(translate, rotate)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.0196078431372549
] | 29 | 0.00355 |
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the Digest object to a stream.
Args:
ostream (Stream): A data stream in which to encode object data,
supporting a write method; usually a BytearrayStream object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
"""
tstream = BytearrayStream()
self.hashing_algorithm.write(tstream, kmip_version=kmip_version)
self.digest_value.write(tstream, kmip_version=kmip_version)
self.key_format_type.write(tstream, kmip_version=kmip_version)
self.length = tstream.length()
super(Digest, self).write(ostream, kmip_version=kmip_version)
ostream.write(tstream.buffer) | [
"def",
"write",
"(",
"self",
",",
"ostream",
",",
"kmip_version",
"=",
"enums",
".",
"KMIPVersion",
".",
"KMIP_1_0",
")",
":",
"tstream",
"=",
"BytearrayStream",
"(",
")",
"self",
".",
"hashing_algorithm",
".",
"write",
"(",
"tstream",
",",
"kmip_version",
"=",
"kmip_version",
")",
"self",
".",
"digest_value",
".",
"write",
"(",
"tstream",
",",
"kmip_version",
"=",
"kmip_version",
")",
"self",
".",
"key_format_type",
".",
"write",
"(",
"tstream",
",",
"kmip_version",
"=",
"kmip_version",
")",
"self",
".",
"length",
"=",
"tstream",
".",
"length",
"(",
")",
"super",
"(",
"Digest",
",",
"self",
")",
".",
"write",
"(",
"ostream",
",",
"kmip_version",
"=",
"kmip_version",
")",
"ostream",
".",
"write",
"(",
"tstream",
".",
"buffer",
")"
] | 44.15 | 0.002217 | [
"def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0):\n",
" \"\"\"\n",
" Write the data encoding the Digest object to a stream.\n",
"\n",
" Args:\n",
" ostream (Stream): A data stream in which to encode object data,\n",
" supporting a write method; usually a BytearrayStream object.\n",
" kmip_version (KMIPVersion): An enumeration defining the KMIP\n",
" version with which the object will be encoded. Optional,\n",
" defaults to KMIP 1.0.\n",
" \"\"\"\n",
" tstream = BytearrayStream()\n",
"\n",
" self.hashing_algorithm.write(tstream, kmip_version=kmip_version)\n",
" self.digest_value.write(tstream, kmip_version=kmip_version)\n",
" self.key_format_type.write(tstream, kmip_version=kmip_version)\n",
"\n",
" self.length = tstream.length()\n",
" super(Digest, self).write(ostream, kmip_version=kmip_version)\n",
" ostream.write(tstream.buffer)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.02702702702702703
] | 20 | 0.005518 |
def get_key(raw=False):
""" Gets a single key from stdin
"""
file_descriptor = stdin.fileno()
state = tcgetattr(file_descriptor)
chars = []
try:
setraw(stdin.fileno())
for i in range(3):
char = stdin.read(1)
ordinal = ord(char)
chars.append(char)
if i == 0 and ordinal != 27:
break
elif i == 1 and ordinal != 91:
break
elif i == 2 and ordinal != 51:
break
finally:
tcsetattr(file_descriptor, TCSADRAIN, state)
result = "".join(chars)
return result if raw else codes.keys_flipped.get(result, result) | [
"def",
"get_key",
"(",
"raw",
"=",
"False",
")",
":",
"file_descriptor",
"=",
"stdin",
".",
"fileno",
"(",
")",
"state",
"=",
"tcgetattr",
"(",
"file_descriptor",
")",
"chars",
"=",
"[",
"]",
"try",
":",
"setraw",
"(",
"stdin",
".",
"fileno",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"char",
"=",
"stdin",
".",
"read",
"(",
"1",
")",
"ordinal",
"=",
"ord",
"(",
"char",
")",
"chars",
".",
"append",
"(",
"char",
")",
"if",
"i",
"==",
"0",
"and",
"ordinal",
"!=",
"27",
":",
"break",
"elif",
"i",
"==",
"1",
"and",
"ordinal",
"!=",
"91",
":",
"break",
"elif",
"i",
"==",
"2",
"and",
"ordinal",
"!=",
"51",
":",
"break",
"finally",
":",
"tcsetattr",
"(",
"file_descriptor",
",",
"TCSADRAIN",
",",
"state",
")",
"result",
"=",
"\"\"",
".",
"join",
"(",
"chars",
")",
"return",
"result",
"if",
"raw",
"else",
"codes",
".",
"keys_flipped",
".",
"get",
"(",
"result",
",",
"result",
")"
] | 29.863636 | 0.001475 | [
"def get_key(raw=False):\n",
" \"\"\" Gets a single key from stdin\n",
" \"\"\"\n",
" file_descriptor = stdin.fileno()\n",
" state = tcgetattr(file_descriptor)\n",
" chars = []\n",
" try:\n",
" setraw(stdin.fileno())\n",
" for i in range(3):\n",
" char = stdin.read(1)\n",
" ordinal = ord(char)\n",
" chars.append(char)\n",
" if i == 0 and ordinal != 27:\n",
" break\n",
" elif i == 1 and ordinal != 91:\n",
" break\n",
" elif i == 2 and ordinal != 51:\n",
" break\n",
" finally:\n",
" tcsetattr(file_descriptor, TCSADRAIN, state)\n",
" result = \"\".join(chars)\n",
" return result if raw else codes.keys_flipped.get(result, result)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.014705882352941176
] | 22 | 0.000668 |
def make_view(robot):
"""
为一个 BaseRoBot 生成 Bottle view。
Usage ::
from werobot import WeRoBot
robot = WeRoBot(token='token')
@robot.handler
def hello(message):
return 'Hello World!'
from bottle import Bottle
from werobot.contrib.bottle import make_view
app = Bottle()
app.route(
'/robot', # WeRoBot 挂载地址
['GET', 'POST'],
make_view(robot)
)
:param robot: 一个 BaseRoBot 实例
:return: 一个标准的 Bottle view
"""
def werobot_view(*args, **kwargs):
if not robot.check_signature(
request.query.timestamp, request.query.nonce,
request.query.signature
):
return HTTPResponse(
status=403,
body=robot.make_error_page(html.escape(request.url))
)
if request.method == 'GET':
return request.query.echostr
else:
body = request.body.read()
message = robot.parse_message(
body,
timestamp=request.query.timestamp,
nonce=request.query.nonce,
msg_signature=request.query.msg_signature
)
return robot.get_encrypted_reply(message)
return werobot_view | [
"def",
"make_view",
"(",
"robot",
")",
":",
"def",
"werobot_view",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"robot",
".",
"check_signature",
"(",
"request",
".",
"query",
".",
"timestamp",
",",
"request",
".",
"query",
".",
"nonce",
",",
"request",
".",
"query",
".",
"signature",
")",
":",
"return",
"HTTPResponse",
"(",
"status",
"=",
"403",
",",
"body",
"=",
"robot",
".",
"make_error_page",
"(",
"html",
".",
"escape",
"(",
"request",
".",
"url",
")",
")",
")",
"if",
"request",
".",
"method",
"==",
"'GET'",
":",
"return",
"request",
".",
"query",
".",
"echostr",
"else",
":",
"body",
"=",
"request",
".",
"body",
".",
"read",
"(",
")",
"message",
"=",
"robot",
".",
"parse_message",
"(",
"body",
",",
"timestamp",
"=",
"request",
".",
"query",
".",
"timestamp",
",",
"nonce",
"=",
"request",
".",
"query",
".",
"nonce",
",",
"msg_signature",
"=",
"request",
".",
"query",
".",
"msg_signature",
")",
"return",
"robot",
".",
"get_encrypted_reply",
"(",
"message",
")",
"return",
"werobot_view"
] | 24.403846 | 0.000758 | [
"def make_view(robot):\n",
" \"\"\"\n",
" 为一个 BaseRoBot 生成 Bottle view。\n",
"\n",
" Usage ::\n",
"\n",
" from werobot import WeRoBot\n",
"\n",
" robot = WeRoBot(token='token')\n",
"\n",
"\n",
" @robot.handler\n",
" def hello(message):\n",
" return 'Hello World!'\n",
"\n",
" from bottle import Bottle\n",
" from werobot.contrib.bottle import make_view\n",
"\n",
" app = Bottle()\n",
" app.route(\n",
" '/robot', # WeRoBot 挂载地址\n",
" ['GET', 'POST'],\n",
" make_view(robot)\n",
" )\n",
"\n",
"\n",
" :param robot: 一个 BaseRoBot 实例\n",
" :return: 一个标准的 Bottle view\n",
" \"\"\"\n",
"\n",
" def werobot_view(*args, **kwargs):\n",
" if not robot.check_signature(\n",
" request.query.timestamp, request.query.nonce,\n",
" request.query.signature\n",
" ):\n",
" return HTTPResponse(\n",
" status=403,\n",
" body=robot.make_error_page(html.escape(request.url))\n",
" )\n",
" if request.method == 'GET':\n",
" return request.query.echostr\n",
" else:\n",
" body = request.body.read()\n",
" message = robot.parse_message(\n",
" body,\n",
" timestamp=request.query.timestamp,\n",
" nonce=request.query.nonce,\n",
" msg_signature=request.query.msg_signature\n",
" )\n",
" return robot.get_encrypted_reply(message)\n",
"\n",
" return werobot_view"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.043478260869565216
] | 52 | 0.000836 |
def delete(self, **context):
"""
Removes this record from the database. If the dryRun \
flag is specified then the command will be logged and \
not executed.
:note From version 0.6.0 on, this method now accepts a mutable
keyword dictionary of values. You can supply any member
value for either the <orb.LookupOptions> or
<orb.Context>, as well as the keyword 'lookup' to
an instance of <orb.LookupOptions> and 'options' for
an instance of the <orb.Context>
:return <int>
"""
if not self.isRecord():
return 0
event = orb.events.DeleteEvent(record=self, context=self.context(**context))
if self.processEvent(event):
self.onDelete(event)
if event.preventDefault:
return 0
if self.__delayed:
self.__delayed = False
self.read()
with WriteLocker(self.__dataLock):
self.__loaded.clear()
context = self.context(**context)
conn = context.db.connection()
_, count = conn.delete([self], context)
# clear out the old values
if count == 1:
col = self.schema().column(self.schema().idColumn())
with WriteLocker(self.__dataLock):
self.__values[col.name()] = (None, None)
return count | [
"def",
"delete",
"(",
"self",
",",
"*",
"*",
"context",
")",
":",
"if",
"not",
"self",
".",
"isRecord",
"(",
")",
":",
"return",
"0",
"event",
"=",
"orb",
".",
"events",
".",
"DeleteEvent",
"(",
"record",
"=",
"self",
",",
"context",
"=",
"self",
".",
"context",
"(",
"*",
"*",
"context",
")",
")",
"if",
"self",
".",
"processEvent",
"(",
"event",
")",
":",
"self",
".",
"onDelete",
"(",
"event",
")",
"if",
"event",
".",
"preventDefault",
":",
"return",
"0",
"if",
"self",
".",
"__delayed",
":",
"self",
".",
"__delayed",
"=",
"False",
"self",
".",
"read",
"(",
")",
"with",
"WriteLocker",
"(",
"self",
".",
"__dataLock",
")",
":",
"self",
".",
"__loaded",
".",
"clear",
"(",
")",
"context",
"=",
"self",
".",
"context",
"(",
"*",
"*",
"context",
")",
"conn",
"=",
"context",
".",
"db",
".",
"connection",
"(",
")",
"_",
",",
"count",
"=",
"conn",
".",
"delete",
"(",
"[",
"self",
"]",
",",
"context",
")",
"# clear out the old values",
"if",
"count",
"==",
"1",
":",
"col",
"=",
"self",
".",
"schema",
"(",
")",
".",
"column",
"(",
"self",
".",
"schema",
"(",
")",
".",
"idColumn",
"(",
")",
")",
"with",
"WriteLocker",
"(",
"self",
".",
"__dataLock",
")",
":",
"self",
".",
"__values",
"[",
"col",
".",
"name",
"(",
")",
"]",
"=",
"(",
"None",
",",
"None",
")",
"return",
"count"
] | 32.813953 | 0.002065 | [
"def delete(self, **context):\n",
" \"\"\"\n",
" Removes this record from the database. If the dryRun \\\n",
" flag is specified then the command will be logged and \\\n",
" not executed.\n",
"\n",
" :note From version 0.6.0 on, this method now accepts a mutable\n",
" keyword dictionary of values. You can supply any member\n",
" value for either the <orb.LookupOptions> or\n",
" <orb.Context>, as well as the keyword 'lookup' to\n",
" an instance of <orb.LookupOptions> and 'options' for\n",
" an instance of the <orb.Context>\n",
"\n",
" :return <int>\n",
" \"\"\"\n",
" if not self.isRecord():\n",
" return 0\n",
"\n",
" event = orb.events.DeleteEvent(record=self, context=self.context(**context))\n",
" if self.processEvent(event):\n",
" self.onDelete(event)\n",
"\n",
" if event.preventDefault:\n",
" return 0\n",
"\n",
" if self.__delayed:\n",
" self.__delayed = False\n",
" self.read()\n",
"\n",
" with WriteLocker(self.__dataLock):\n",
" self.__loaded.clear()\n",
"\n",
" context = self.context(**context)\n",
" conn = context.db.connection()\n",
" _, count = conn.delete([self], context)\n",
"\n",
" # clear out the old values\n",
" if count == 1:\n",
" col = self.schema().column(self.schema().idColumn())\n",
" with WriteLocker(self.__dataLock):\n",
" self.__values[col.name()] = (None, None)\n",
"\n",
" return count"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011764705882352941,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 43 | 0.003374 |
def find_common_root(elements):
"""
Find root which is common for all `elements`.
Args:
elements (list): List of double-linked HTMLElement objects.
Returns:
list: Vector of HTMLElement containing path to common root.
"""
if not elements:
raise UserWarning("Can't find common root - no elements suplied.")
root_path = el_to_path_vector(elements.pop())
for el in elements:
el_path = el_to_path_vector(el)
root_path = common_vector_root(root_path, el_path)
if not root_path:
raise UserWarning(
"Vectors without common root:\n%s" % str(el_path)
)
return root_path | [
"def",
"find_common_root",
"(",
"elements",
")",
":",
"if",
"not",
"elements",
":",
"raise",
"UserWarning",
"(",
"\"Can't find common root - no elements suplied.\"",
")",
"root_path",
"=",
"el_to_path_vector",
"(",
"elements",
".",
"pop",
"(",
")",
")",
"for",
"el",
"in",
"elements",
":",
"el_path",
"=",
"el_to_path_vector",
"(",
"el",
")",
"root_path",
"=",
"common_vector_root",
"(",
"root_path",
",",
"el_path",
")",
"if",
"not",
"root_path",
":",
"raise",
"UserWarning",
"(",
"\"Vectors without common root:\\n%s\"",
"%",
"str",
"(",
"el_path",
")",
")",
"return",
"root_path"
] | 25.576923 | 0.001449 | [
"def find_common_root(elements):\n",
" \"\"\"\n",
" Find root which is common for all `elements`.\n",
"\n",
" Args:\n",
" elements (list): List of double-linked HTMLElement objects.\n",
"\n",
" Returns:\n",
" list: Vector of HTMLElement containing path to common root.\n",
" \"\"\"\n",
" if not elements:\n",
" raise UserWarning(\"Can't find common root - no elements suplied.\")\n",
"\n",
" root_path = el_to_path_vector(elements.pop())\n",
"\n",
" for el in elements:\n",
" el_path = el_to_path_vector(el)\n",
"\n",
" root_path = common_vector_root(root_path, el_path)\n",
"\n",
" if not root_path:\n",
" raise UserWarning(\n",
" \"Vectors without common root:\\n%s\" % str(el_path)\n",
" )\n",
"\n",
" return root_path"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 26 | 0.001923 |
def dependency_to_rpm(dep, runtime):
"""Converts a dependency got by pkg_resources.Requirement.parse()
to RPM format.
Args:
dep - a dependency retrieved by pkg_resources.Requirement.parse()
runtime - whether the returned dependency should be runtime (True)
or build time (False)
Returns:
List of semi-SPECFILE dependencies (package names are not properly
converted yet).
For example: [['Requires', 'jinja2'],
['Conflicts', 'jinja2', '=', '2.0.1']]
"""
logger.debug('Dependencies provided: {0} runtime: {1}.'.format(
dep, runtime))
converted = []
if not len(dep.specs):
converted.append(['Requires', dep.project_name])
else:
for ver_spec in dep.specs:
if ver_spec[0] == '!=':
converted.append(
['Conflicts', dep.project_name, '=', ver_spec[1]])
elif ver_spec[0] == '==':
converted.append(
['Requires', dep.project_name, '=', ver_spec[1]])
else:
converted.append(
['Requires', dep.project_name, ver_spec[0], ver_spec[1]])
if not runtime:
for conv in converted:
conv[0] = "Build" + conv[0]
logger.debug('Converted dependencies: {0}.'.format(converted))
return converted | [
"def",
"dependency_to_rpm",
"(",
"dep",
",",
"runtime",
")",
":",
"logger",
".",
"debug",
"(",
"'Dependencies provided: {0} runtime: {1}.'",
".",
"format",
"(",
"dep",
",",
"runtime",
")",
")",
"converted",
"=",
"[",
"]",
"if",
"not",
"len",
"(",
"dep",
".",
"specs",
")",
":",
"converted",
".",
"append",
"(",
"[",
"'Requires'",
",",
"dep",
".",
"project_name",
"]",
")",
"else",
":",
"for",
"ver_spec",
"in",
"dep",
".",
"specs",
":",
"if",
"ver_spec",
"[",
"0",
"]",
"==",
"'!='",
":",
"converted",
".",
"append",
"(",
"[",
"'Conflicts'",
",",
"dep",
".",
"project_name",
",",
"'='",
",",
"ver_spec",
"[",
"1",
"]",
"]",
")",
"elif",
"ver_spec",
"[",
"0",
"]",
"==",
"'=='",
":",
"converted",
".",
"append",
"(",
"[",
"'Requires'",
",",
"dep",
".",
"project_name",
",",
"'='",
",",
"ver_spec",
"[",
"1",
"]",
"]",
")",
"else",
":",
"converted",
".",
"append",
"(",
"[",
"'Requires'",
",",
"dep",
".",
"project_name",
",",
"ver_spec",
"[",
"0",
"]",
",",
"ver_spec",
"[",
"1",
"]",
"]",
")",
"if",
"not",
"runtime",
":",
"for",
"conv",
"in",
"converted",
":",
"conv",
"[",
"0",
"]",
"=",
"\"Build\"",
"+",
"conv",
"[",
"0",
"]",
"logger",
".",
"debug",
"(",
"'Converted dependencies: {0}.'",
".",
"format",
"(",
"converted",
")",
")",
"return",
"converted"
] | 37.194444 | 0.000728 | [
"def dependency_to_rpm(dep, runtime):\n",
" \"\"\"Converts a dependency got by pkg_resources.Requirement.parse()\n",
" to RPM format.\n",
" Args:\n",
" dep - a dependency retrieved by pkg_resources.Requirement.parse()\n",
" runtime - whether the returned dependency should be runtime (True)\n",
" or build time (False)\n",
" Returns:\n",
" List of semi-SPECFILE dependencies (package names are not properly\n",
" converted yet).\n",
" For example: [['Requires', 'jinja2'],\n",
" ['Conflicts', 'jinja2', '=', '2.0.1']]\n",
" \"\"\"\n",
" logger.debug('Dependencies provided: {0} runtime: {1}.'.format(\n",
" dep, runtime))\n",
" converted = []\n",
" if not len(dep.specs):\n",
" converted.append(['Requires', dep.project_name])\n",
" else:\n",
" for ver_spec in dep.specs:\n",
" if ver_spec[0] == '!=':\n",
" converted.append(\n",
" ['Conflicts', dep.project_name, '=', ver_spec[1]])\n",
" elif ver_spec[0] == '==':\n",
" converted.append(\n",
" ['Requires', dep.project_name, '=', ver_spec[1]])\n",
" else:\n",
" converted.append(\n",
" ['Requires', dep.project_name, ver_spec[0], ver_spec[1]])\n",
"\n",
" if not runtime:\n",
" for conv in converted:\n",
" conv[0] = \"Build\" + conv[0]\n",
" logger.debug('Converted dependencies: {0}.'.format(converted))\n",
"\n",
" return converted"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 36 | 0.001389 |
def execute_nonstop_tasks(self, tasks_cls):
"""
Just a wrapper to the execute_batch_tasks method
"""
self.execute_batch_tasks(tasks_cls,
self.conf['sortinghat']['sleep_for'],
self.conf['general']['min_update_delay'], False) | [
"def",
"execute_nonstop_tasks",
"(",
"self",
",",
"tasks_cls",
")",
":",
"self",
".",
"execute_batch_tasks",
"(",
"tasks_cls",
",",
"self",
".",
"conf",
"[",
"'sortinghat'",
"]",
"[",
"'sleep_for'",
"]",
",",
"self",
".",
"conf",
"[",
"'general'",
"]",
"[",
"'min_update_delay'",
"]",
",",
"False",
")"
] | 45.571429 | 0.009231 | [
"def execute_nonstop_tasks(self, tasks_cls):\n",
" \"\"\"\n",
" Just a wrapper to the execute_batch_tasks method\n",
" \"\"\"\n",
" self.execute_batch_tasks(tasks_cls,\n",
" self.conf['sortinghat']['sleep_for'],\n",
" self.conf['general']['min_update_delay'], False)"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0.024691358024691357
] | 7 | 0.015432 |
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json(self.USER_INFO_URL, method="POST", headers=self._get_headers(access_token)) | [
"def",
"user_data",
"(",
"self",
",",
"access_token",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"get_json",
"(",
"self",
".",
"USER_INFO_URL",
",",
"method",
"=",
"\"POST\"",
",",
"headers",
"=",
"self",
".",
"_get_headers",
"(",
"access_token",
")",
")"
] | 49.25 | 0.015 | [
"def user_data(self, access_token, *args, **kwargs):\n",
" \"\"\"Loads user data from service\"\"\"\n",
"\n",
" return self.get_json(self.USER_INFO_URL, method=\"POST\", headers=self._get_headers(access_token))"
] | [
0,
0.023255813953488372,
0,
0.019230769230769232
] | 4 | 0.010622 |
def run_initialization_experiment(seed,
num_neurons = 50,
dim = 40,
num_bins = 10,
num_samples = 50*600,
neuron_size = 10000,
num_dendrites = 400,
dendrite_length = 25,
power = 10,
):
"""
Runs an experiment testing classifying a binary dataset, based on Poirazi &
Mel's original experiment. Learning is using our modified variant of their
rule, and positive and negative neurons compete to classify a datapoint.
Performance has historically been poor, noticeably worse than what is
achieved with only a single neuron using an HTM-style learning rule on
datasets of similar size. It is suspected that the simplifications made
to the P&M learning rule are having a negative effect.
Furthermore, P&M report that they are willing to train for an exceptional
amount of time, up to 96,000 iterations per neuron. We have never even
begun to approach this long a training time, so it is possible that our
performance would converge with theirs given more time.
This experiment does not correspond to a figure in the paper, but we report
our results across an average of 50 trials, using the settings above.
"""
numpy.random.seed(seed)
nonlinearity = power_nonlinearity(power)
pos_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)]
neg_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)]
#pos, neg = generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2), generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2)
pos, neg = generate_data(dim = dim, num_bins = num_bins, num_samples = num_samples, sparse = True)
if (pos.nRows() > num_dendrites*len(pos_neurons)):
print "Too much data to have unique dendrites for positive neurons, clustering"
pos = pos.toDense()
model = KMeans(n_clusters = len(pos_neurons), n_jobs=1)
clusters = model.fit_predict(pos)
neuron_data = [SM32() for i in range(len(pos_neurons))]
for datapoint, cluster in zip(pos, clusters):
neuron_data[cluster].append(SM32([datapoint]))
for i, neuron in enumerate(pos_neurons):
neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())])
pos = SM32(pos)
else:
print "Directly initializing positive neurons with unique dendrites"
neuron_data = split_sparse_matrix(pos, len(pos_neurons))
for neuron, data in zip(pos_neurons, neuron_data):
neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())])
if (neg.nRows() > num_dendrites*len(neg_neurons)):
print "Too much data to have unique dendrites for negative neurons, clustering"
neg = neg.toDense()
model = KMeans(n_clusters = len(neg_neurons), n_jobs=1)
clusters = model.fit_predict(neg)
neuron_data = [SM32() for i in range(len(neg_neurons))]
for datapoint, cluster in zip(neg, clusters):
neuron_data[cluster].append(SM32([datapoint]))
for i, neuron in enumerate(neg_neurons):
neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())])
neg = SM32(neg)
else:
print "Directly initializing negative neurons with unique dendrites"
neuron_data = split_sparse_matrix(neg, len(neg_neurons))
for neuron, data in zip(neg_neurons, neuron_data):
neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())])
print "Calculating error"
labels = [1 for i in range(pos.nRows())] + [-1 for i in range(neg.nRows())]
data = pos
data.append(neg)
error, fp, fn = get_error(data, labels, pos_neurons, neg_neurons)
print "Error at initialization is {}, with {} false positives and {} false negatives".format(error, fp, fn)
return error | [
"def",
"run_initialization_experiment",
"(",
"seed",
",",
"num_neurons",
"=",
"50",
",",
"dim",
"=",
"40",
",",
"num_bins",
"=",
"10",
",",
"num_samples",
"=",
"50",
"*",
"600",
",",
"neuron_size",
"=",
"10000",
",",
"num_dendrites",
"=",
"400",
",",
"dendrite_length",
"=",
"25",
",",
"power",
"=",
"10",
",",
")",
":",
"numpy",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"nonlinearity",
"=",
"power_nonlinearity",
"(",
"power",
")",
"pos_neurons",
"=",
"[",
"Neuron",
"(",
"size",
"=",
"neuron_size",
",",
"num_dendrites",
"=",
"num_dendrites",
",",
"dendrite_length",
"=",
"dendrite_length",
",",
"nonlinearity",
"=",
"nonlinearity",
",",
"dim",
"=",
"dim",
"*",
"num_bins",
")",
"for",
"i",
"in",
"range",
"(",
"num_neurons",
"/",
"2",
")",
"]",
"neg_neurons",
"=",
"[",
"Neuron",
"(",
"size",
"=",
"neuron_size",
",",
"num_dendrites",
"=",
"num_dendrites",
",",
"dendrite_length",
"=",
"dendrite_length",
",",
"nonlinearity",
"=",
"nonlinearity",
",",
"dim",
"=",
"dim",
"*",
"num_bins",
")",
"for",
"i",
"in",
"range",
"(",
"num_neurons",
"/",
"2",
")",
"]",
"#pos, neg = generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2), generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2)",
"pos",
",",
"neg",
"=",
"generate_data",
"(",
"dim",
"=",
"dim",
",",
"num_bins",
"=",
"num_bins",
",",
"num_samples",
"=",
"num_samples",
",",
"sparse",
"=",
"True",
")",
"if",
"(",
"pos",
".",
"nRows",
"(",
")",
">",
"num_dendrites",
"*",
"len",
"(",
"pos_neurons",
")",
")",
":",
"print",
"\"Too much data to have unique dendrites for positive neurons, clustering\"",
"pos",
"=",
"pos",
".",
"toDense",
"(",
")",
"model",
"=",
"KMeans",
"(",
"n_clusters",
"=",
"len",
"(",
"pos_neurons",
")",
",",
"n_jobs",
"=",
"1",
")",
"clusters",
"=",
"model",
".",
"fit_predict",
"(",
"pos",
")",
"neuron_data",
"=",
"[",
"SM32",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"pos_neurons",
")",
")",
"]",
"for",
"datapoint",
",",
"cluster",
"in",
"zip",
"(",
"pos",
",",
"clusters",
")",
":",
"neuron_data",
"[",
"cluster",
"]",
".",
"append",
"(",
"SM32",
"(",
"[",
"datapoint",
"]",
")",
")",
"for",
"i",
",",
"neuron",
"in",
"enumerate",
"(",
"pos_neurons",
")",
":",
"neuron",
".",
"HTM_style_initialize_on_data",
"(",
"neuron_data",
"[",
"i",
"]",
",",
"[",
"1",
"for",
"i",
"in",
"range",
"(",
"neuron_data",
"[",
"i",
"]",
".",
"nRows",
"(",
")",
")",
"]",
")",
"pos",
"=",
"SM32",
"(",
"pos",
")",
"else",
":",
"print",
"\"Directly initializing positive neurons with unique dendrites\"",
"neuron_data",
"=",
"split_sparse_matrix",
"(",
"pos",
",",
"len",
"(",
"pos_neurons",
")",
")",
"for",
"neuron",
",",
"data",
"in",
"zip",
"(",
"pos_neurons",
",",
"neuron_data",
")",
":",
"neuron",
".",
"HTM_style_initialize_on_data",
"(",
"data",
",",
"[",
"1",
"for",
"i",
"in",
"range",
"(",
"data",
".",
"nRows",
"(",
")",
")",
"]",
")",
"if",
"(",
"neg",
".",
"nRows",
"(",
")",
">",
"num_dendrites",
"*",
"len",
"(",
"neg_neurons",
")",
")",
":",
"print",
"\"Too much data to have unique dendrites for negative neurons, clustering\"",
"neg",
"=",
"neg",
".",
"toDense",
"(",
")",
"model",
"=",
"KMeans",
"(",
"n_clusters",
"=",
"len",
"(",
"neg_neurons",
")",
",",
"n_jobs",
"=",
"1",
")",
"clusters",
"=",
"model",
".",
"fit_predict",
"(",
"neg",
")",
"neuron_data",
"=",
"[",
"SM32",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"neg_neurons",
")",
")",
"]",
"for",
"datapoint",
",",
"cluster",
"in",
"zip",
"(",
"neg",
",",
"clusters",
")",
":",
"neuron_data",
"[",
"cluster",
"]",
".",
"append",
"(",
"SM32",
"(",
"[",
"datapoint",
"]",
")",
")",
"for",
"i",
",",
"neuron",
"in",
"enumerate",
"(",
"neg_neurons",
")",
":",
"neuron",
".",
"HTM_style_initialize_on_data",
"(",
"neuron_data",
"[",
"i",
"]",
",",
"[",
"1",
"for",
"i",
"in",
"range",
"(",
"neuron_data",
"[",
"i",
"]",
".",
"nRows",
"(",
")",
")",
"]",
")",
"neg",
"=",
"SM32",
"(",
"neg",
")",
"else",
":",
"print",
"\"Directly initializing negative neurons with unique dendrites\"",
"neuron_data",
"=",
"split_sparse_matrix",
"(",
"neg",
",",
"len",
"(",
"neg_neurons",
")",
")",
"for",
"neuron",
",",
"data",
"in",
"zip",
"(",
"neg_neurons",
",",
"neuron_data",
")",
":",
"neuron",
".",
"HTM_style_initialize_on_data",
"(",
"data",
",",
"[",
"1",
"for",
"i",
"in",
"range",
"(",
"data",
".",
"nRows",
"(",
")",
")",
"]",
")",
"print",
"\"Calculating error\"",
"labels",
"=",
"[",
"1",
"for",
"i",
"in",
"range",
"(",
"pos",
".",
"nRows",
"(",
")",
")",
"]",
"+",
"[",
"-",
"1",
"for",
"i",
"in",
"range",
"(",
"neg",
".",
"nRows",
"(",
")",
")",
"]",
"data",
"=",
"pos",
"data",
".",
"append",
"(",
"neg",
")",
"error",
",",
"fp",
",",
"fn",
"=",
"get_error",
"(",
"data",
",",
"labels",
",",
"pos_neurons",
",",
"neg_neurons",
")",
"print",
"\"Error at initialization is {}, with {} false positives and {} false negatives\"",
".",
"format",
"(",
"error",
",",
"fp",
",",
"fn",
")",
"return",
"error"
] | 51.195122 | 0.020332 | [
"def run_initialization_experiment(seed,\n",
" num_neurons = 50,\n",
" dim = 40,\n",
" num_bins = 10,\n",
" num_samples = 50*600,\n",
" neuron_size = 10000,\n",
" num_dendrites = 400,\n",
" dendrite_length = 25,\n",
" power = 10,\n",
" ):\n",
" \"\"\"\n",
" Runs an experiment testing classifying a binary dataset, based on Poirazi &\n",
" Mel's original experiment. Learning is using our modified variant of their\n",
" rule, and positive and negative neurons compete to classify a datapoint.\n",
"\n",
" Performance has historically been poor, noticeably worse than what is\n",
" achieved with only a single neuron using an HTM-style learning rule on\n",
" datasets of similar size. It is suspected that the simplifications made\n",
" to the P&M learning rule are having a negative effect.\n",
"\n",
" Furthermore, P&M report that they are willing to train for an exceptional\n",
" amount of time, up to 96,000 iterations per neuron. We have never even\n",
" begun to approach this long a training time, so it is possible that our\n",
" performance would converge with theirs given more time.\n",
"\n",
" This experiment does not correspond to a figure in the paper, but we report\n",
" our results across an average of 50 trials, using the settings above.\n",
" \"\"\"\n",
"\n",
" numpy.random.seed(seed)\n",
"\n",
" nonlinearity = power_nonlinearity(power)\n",
" pos_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)]\n",
" neg_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)]\n",
" #pos, neg = generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2), generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2)\n",
" pos, neg = generate_data(dim = dim, num_bins = num_bins, num_samples = num_samples, sparse = True)\n",
"\n",
" if (pos.nRows() > num_dendrites*len(pos_neurons)):\n",
" print \"Too much data to have unique dendrites for positive neurons, clustering\"\n",
" pos = pos.toDense()\n",
" model = KMeans(n_clusters = len(pos_neurons), n_jobs=1)\n",
" clusters = model.fit_predict(pos)\n",
" neuron_data = [SM32() for i in range(len(pos_neurons))]\n",
" for datapoint, cluster in zip(pos, clusters):\n",
" neuron_data[cluster].append(SM32([datapoint]))\n",
" for i, neuron in enumerate(pos_neurons):\n",
" neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())])\n",
" pos = SM32(pos)\n",
" else:\n",
" print \"Directly initializing positive neurons with unique dendrites\"\n",
" neuron_data = split_sparse_matrix(pos, len(pos_neurons))\n",
" for neuron, data in zip(pos_neurons, neuron_data):\n",
" neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())])\n",
"\n",
"\n",
" if (neg.nRows() > num_dendrites*len(neg_neurons)):\n",
" print \"Too much data to have unique dendrites for negative neurons, clustering\"\n",
" neg = neg.toDense()\n",
" model = KMeans(n_clusters = len(neg_neurons), n_jobs=1)\n",
" clusters = model.fit_predict(neg)\n",
" neuron_data = [SM32() for i in range(len(neg_neurons))]\n",
" for datapoint, cluster in zip(neg, clusters):\n",
" neuron_data[cluster].append(SM32([datapoint]))\n",
" for i, neuron in enumerate(neg_neurons):\n",
" neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())])\n",
" neg = SM32(neg)\n",
"\n",
" else:\n",
" print \"Directly initializing negative neurons with unique dendrites\"\n",
" neuron_data = split_sparse_matrix(neg, len(neg_neurons))\n",
" for neuron, data in zip(neg_neurons, neuron_data):\n",
" neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())])\n",
"\n",
"\n",
" print \"Calculating error\"\n",
" labels = [1 for i in range(pos.nRows())] + [-1 for i in range(neg.nRows())]\n",
" data = pos\n",
" data.append(neg)\n",
"\n",
" error, fp, fn = get_error(data, labels, pos_neurons, neg_neurons)\n",
" print \"Error at initialization is {}, with {} false positives and {} false negatives\".format(error, fp, fn)\n",
" return error"
] | [
0,
0.038461538461538464,
0.045454545454545456,
0.04081632653061224,
0.03571428571428571,
0.03636363636363636,
0.03636363636363636,
0.03571428571428571,
0.043478260869565216,
0,
0.16666666666666666,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.038461538461538464,
0,
0.023255813953488372,
0.06315789473684211,
0.06315789473684211,
0.014354066985645933,
0.09900990099009901,
0,
0.018867924528301886,
0.011904761904761904,
0,
0.03333333333333333,
0,
0,
0,
0.018867924528301886,
0,
0.0196078431372549,
0,
0.125,
0,
0,
0,
0.024390243902439025,
0,
0,
0.03773584905660377,
0.011904761904761904,
0,
0.03333333333333333,
0,
0,
0,
0.018867924528301886,
0,
0.0196078431372549,
0,
0,
0.125,
0,
0,
0,
0.024390243902439025,
0,
0,
0.07142857142857142,
0.01282051282051282,
0.07692307692307693,
0.05263157894736842,
0,
0.014705882352941176,
0.01818181818181818,
0.14285714285714285
] | 82 | 0.020644 |
def poisson(grid, spacing=None, dtype=float, format=None, type='FD'):
"""Return a sparse matrix for the N-dimensional Poisson problem.
The matrix represents a finite Difference approximation to the
Poisson problem on a regular n-dimensional grid with unit grid
spacing and Dirichlet boundary conditions.
Parameters
----------
grid : tuple of integers
grid dimensions e.g. (100,100)
Notes
-----
The matrix is symmetric and positive definite (SPD).
Examples
--------
>>> from pyamg.gallery import poisson
>>> # 4 nodes in one dimension
>>> poisson( (4,) ).todense()
matrix([[ 2., -1., 0., 0.],
[-1., 2., -1., 0.],
[ 0., -1., 2., -1.],
[ 0., 0., -1., 2.]])
>>> # rectangular two dimensional grid
>>> poisson( (2,3) ).todense()
matrix([[ 4., -1., 0., -1., 0., 0.],
[-1., 4., -1., 0., -1., 0.],
[ 0., -1., 4., 0., 0., -1.],
[-1., 0., 0., 4., -1., 0.],
[ 0., -1., 0., -1., 4., -1.],
[ 0., 0., -1., 0., -1., 4.]])
"""
grid = tuple(grid)
N = len(grid) # grid dimension
if N < 1 or min(grid) < 1:
raise ValueError('invalid grid shape: %s' % str(grid))
# create N-dimension Laplacian stencil
if type == 'FD':
stencil = np.zeros((3,) * N, dtype=dtype)
for i in range(N):
stencil[(1,)*i + (0,) + (1,)*(N-i-1)] = -1
stencil[(1,)*i + (2,) + (1,)*(N-i-1)] = -1
stencil[(1,)*N] = 2*N
if type == 'FE':
stencil = -np.ones((3,) * N, dtype=dtype)
stencil[(1,)*N] = 3**N - 1
return stencil_grid(stencil, grid, format=format) | [
"def",
"poisson",
"(",
"grid",
",",
"spacing",
"=",
"None",
",",
"dtype",
"=",
"float",
",",
"format",
"=",
"None",
",",
"type",
"=",
"'FD'",
")",
":",
"grid",
"=",
"tuple",
"(",
"grid",
")",
"N",
"=",
"len",
"(",
"grid",
")",
"# grid dimension",
"if",
"N",
"<",
"1",
"or",
"min",
"(",
"grid",
")",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'invalid grid shape: %s'",
"%",
"str",
"(",
"grid",
")",
")",
"# create N-dimension Laplacian stencil",
"if",
"type",
"==",
"'FD'",
":",
"stencil",
"=",
"np",
".",
"zeros",
"(",
"(",
"3",
",",
")",
"*",
"N",
",",
"dtype",
"=",
"dtype",
")",
"for",
"i",
"in",
"range",
"(",
"N",
")",
":",
"stencil",
"[",
"(",
"1",
",",
")",
"*",
"i",
"+",
"(",
"0",
",",
")",
"+",
"(",
"1",
",",
")",
"*",
"(",
"N",
"-",
"i",
"-",
"1",
")",
"]",
"=",
"-",
"1",
"stencil",
"[",
"(",
"1",
",",
")",
"*",
"i",
"+",
"(",
"2",
",",
")",
"+",
"(",
"1",
",",
")",
"*",
"(",
"N",
"-",
"i",
"-",
"1",
")",
"]",
"=",
"-",
"1",
"stencil",
"[",
"(",
"1",
",",
")",
"*",
"N",
"]",
"=",
"2",
"*",
"N",
"if",
"type",
"==",
"'FE'",
":",
"stencil",
"=",
"-",
"np",
".",
"ones",
"(",
"(",
"3",
",",
")",
"*",
"N",
",",
"dtype",
"=",
"dtype",
")",
"stencil",
"[",
"(",
"1",
",",
")",
"*",
"N",
"]",
"=",
"3",
"**",
"N",
"-",
"1",
"return",
"stencil_grid",
"(",
"stencil",
",",
"grid",
",",
"format",
"=",
"format",
")"
] | 29.803571 | 0.00058 | [
"def poisson(grid, spacing=None, dtype=float, format=None, type='FD'):\n",
" \"\"\"Return a sparse matrix for the N-dimensional Poisson problem.\n",
"\n",
" The matrix represents a finite Difference approximation to the\n",
" Poisson problem on a regular n-dimensional grid with unit grid\n",
" spacing and Dirichlet boundary conditions.\n",
"\n",
" Parameters\n",
" ----------\n",
" grid : tuple of integers\n",
" grid dimensions e.g. (100,100)\n",
"\n",
" Notes\n",
" -----\n",
" The matrix is symmetric and positive definite (SPD).\n",
"\n",
" Examples\n",
" --------\n",
" >>> from pyamg.gallery import poisson\n",
" >>> # 4 nodes in one dimension\n",
" >>> poisson( (4,) ).todense()\n",
" matrix([[ 2., -1., 0., 0.],\n",
" [-1., 2., -1., 0.],\n",
" [ 0., -1., 2., -1.],\n",
" [ 0., 0., -1., 2.]])\n",
"\n",
" >>> # rectangular two dimensional grid\n",
" >>> poisson( (2,3) ).todense()\n",
" matrix([[ 4., -1., 0., -1., 0., 0.],\n",
" [-1., 4., -1., 0., -1., 0.],\n",
" [ 0., -1., 4., 0., 0., -1.],\n",
" [-1., 0., 0., 4., -1., 0.],\n",
" [ 0., -1., 0., -1., 4., -1.],\n",
" [ 0., 0., -1., 0., -1., 4.]])\n",
"\n",
" \"\"\"\n",
" grid = tuple(grid)\n",
"\n",
" N = len(grid) # grid dimension\n",
"\n",
" if N < 1 or min(grid) < 1:\n",
" raise ValueError('invalid grid shape: %s' % str(grid))\n",
"\n",
" # create N-dimension Laplacian stencil\n",
" if type == 'FD':\n",
" stencil = np.zeros((3,) * N, dtype=dtype)\n",
" for i in range(N):\n",
" stencil[(1,)*i + (0,) + (1,)*(N-i-1)] = -1\n",
" stencil[(1,)*i + (2,) + (1,)*(N-i-1)] = -1\n",
" stencil[(1,)*N] = 2*N\n",
"\n",
" if type == 'FE':\n",
" stencil = -np.ones((3,) * N, dtype=dtype)\n",
" stencil[(1,)*N] = 3**N - 1\n",
"\n",
" return stencil_grid(stencil, grid, format=format)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.018867924528301886
] | 56 | 0.000337 |
def summarize_video_metrics(hook_args):
"""Computes video metrics summaries using the decoder output."""
problem_name = hook_args.problem.name
current_problem = hook_args.problem
hparams = hook_args.hparams
output_dirs = hook_args.output_dirs
predictions = hook_args.predictions
frame_shape = [
current_problem.frame_height, current_problem.frame_width,
current_problem.num_channels
]
metrics_graph = tf.Graph()
with metrics_graph.as_default():
if predictions:
metrics_results, _ = video_metrics.compute_video_metrics_from_predictions(
predictions, decode_hparams=hook_args.decode_hparams)
else:
metrics_results, _ = video_metrics.compute_video_metrics_from_png_files(
output_dirs, problem_name, hparams.video_num_target_frames,
frame_shape)
summary_values = []
for name, array in six.iteritems(metrics_results):
for ind, val in enumerate(array):
tag = "metric_{}/{}".format(name, ind)
summary_values.append(tf.Summary.Value(tag=tag, simple_value=val))
return summary_values | [
"def",
"summarize_video_metrics",
"(",
"hook_args",
")",
":",
"problem_name",
"=",
"hook_args",
".",
"problem",
".",
"name",
"current_problem",
"=",
"hook_args",
".",
"problem",
"hparams",
"=",
"hook_args",
".",
"hparams",
"output_dirs",
"=",
"hook_args",
".",
"output_dirs",
"predictions",
"=",
"hook_args",
".",
"predictions",
"frame_shape",
"=",
"[",
"current_problem",
".",
"frame_height",
",",
"current_problem",
".",
"frame_width",
",",
"current_problem",
".",
"num_channels",
"]",
"metrics_graph",
"=",
"tf",
".",
"Graph",
"(",
")",
"with",
"metrics_graph",
".",
"as_default",
"(",
")",
":",
"if",
"predictions",
":",
"metrics_results",
",",
"_",
"=",
"video_metrics",
".",
"compute_video_metrics_from_predictions",
"(",
"predictions",
",",
"decode_hparams",
"=",
"hook_args",
".",
"decode_hparams",
")",
"else",
":",
"metrics_results",
",",
"_",
"=",
"video_metrics",
".",
"compute_video_metrics_from_png_files",
"(",
"output_dirs",
",",
"problem_name",
",",
"hparams",
".",
"video_num_target_frames",
",",
"frame_shape",
")",
"summary_values",
"=",
"[",
"]",
"for",
"name",
",",
"array",
"in",
"six",
".",
"iteritems",
"(",
"metrics_results",
")",
":",
"for",
"ind",
",",
"val",
"in",
"enumerate",
"(",
"array",
")",
":",
"tag",
"=",
"\"metric_{}/{}\"",
".",
"format",
"(",
"name",
",",
"ind",
")",
"summary_values",
".",
"append",
"(",
"tf",
".",
"Summary",
".",
"Value",
"(",
"tag",
"=",
"tag",
",",
"simple_value",
"=",
"val",
")",
")",
"return",
"summary_values"
] | 39 | 0.016682 | [
"def summarize_video_metrics(hook_args):\n",
" \"\"\"Computes video metrics summaries using the decoder output.\"\"\"\n",
" problem_name = hook_args.problem.name\n",
" current_problem = hook_args.problem\n",
" hparams = hook_args.hparams\n",
" output_dirs = hook_args.output_dirs\n",
" predictions = hook_args.predictions\n",
" frame_shape = [\n",
" current_problem.frame_height, current_problem.frame_width,\n",
" current_problem.num_channels\n",
" ]\n",
" metrics_graph = tf.Graph()\n",
" with metrics_graph.as_default():\n",
" if predictions:\n",
" metrics_results, _ = video_metrics.compute_video_metrics_from_predictions(\n",
" predictions, decode_hparams=hook_args.decode_hparams)\n",
" else:\n",
" metrics_results, _ = video_metrics.compute_video_metrics_from_png_files(\n",
" output_dirs, problem_name, hparams.video_num_target_frames,\n",
" frame_shape)\n",
"\n",
" summary_values = []\n",
" for name, array in six.iteritems(metrics_results):\n",
" for ind, val in enumerate(array):\n",
" tag = \"metric_{}/{}\".format(name, ind)\n",
" summary_values.append(tf.Summary.Value(tag=tag, simple_value=val))\n",
" return summary_values"
] | [
0,
0.014925373134328358,
0.025,
0.02631578947368421,
0.03333333333333333,
0.02631578947368421,
0.02631578947368421,
0.05555555555555555,
0,
0,
0,
0.034482758620689655,
0.02857142857142857,
0,
0.024691358024691357,
0,
0,
0.012658227848101266,
0,
0,
0,
0.045454545454545456,
0.018867924528301886,
0,
0.022222222222222223,
0.0136986301369863,
0.08695652173913043
] | 27 | 0.018347 |
def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argument
'''
Executes multiple runner modules on the master in parallel.
.. versionadded:: 2017.x.0 (Nitrogen)
A separate thread is spawned for each runner. This state is intended to be
used with the orchestrate runner in place of the ``saltmod.runner`` state
when different tasks should be run in parallel. In general, Salt states are
not safe when used concurrently, so ensure that they are used in a safe way
(e.g. by only targeting separate minions in parallel tasks).
name:
name identifying this state. The name is provided as part of the
output, but not used for anything else.
runners:
list of runners that should be run in parallel. Each element of the
list has to be a dictionary. This dictionary's name entry stores the
name of the runner function that shall be invoked. The optional kwarg
entry stores a dictionary of named arguments that are passed to the
runner function.
.. code-block:: yaml
parallel-state:
salt.parallel_runners:
- runners:
my_runner_1:
- name: state.orchestrate
- kwarg:
mods: orchestrate_state_1
my_runner_2:
- name: state.orchestrate
- kwarg:
mods: orchestrate_state_2
'''
# For the sake of consistency, we treat a single string in the same way as
# a key without a value. This allows something like
# salt.parallel_runners:
# - runners:
# state.orchestrate
# Obviously, this will only work if the specified runner does not need any
# arguments.
if isinstance(runners, six.string_types):
runners = {runners: [{name: runners}]}
# If the runners argument is not a string, it must be a dict. Everything
# else is considered an error.
if not isinstance(runners, dict):
return {
'name': name,
'result': False,
'changes': {},
'comment': 'The runners parameter must be a string or dict.'
}
# The configuration for each runner is given as a list of key-value pairs.
# This is not very useful for what we want to do, but it is the typical
# style used in Salt. For further processing, we convert each of these
# lists to a dict. This also makes it easier to check whether a name has
# been specified explicitly.
for runner_id, runner_config in six.iteritems(runners):
if runner_config is None:
runner_config = {}
else:
runner_config = salt.utils.data.repack_dictlist(runner_config)
if 'name' not in runner_config:
runner_config['name'] = runner_id
runners[runner_id] = runner_config
try:
jid = __orchestration_jid__
except NameError:
log.debug(
'Unable to fire args event due to missing __orchestration_jid__')
jid = None
def call_runner(runner_config):
return __salt__['saltutil.runner'](runner_config['name'],
__orchestration_jid__=jid,
__env__=__env__,
full_return=True,
**(runner_config.get('kwarg', {})))
try:
outputs = _parallel_map(call_runner, list(six.itervalues(runners)))
except salt.exceptions.SaltException as exc:
return {
'name': name,
'result': False,
'success': False,
'changes': {},
'comment': 'One of the runners raised an exception: {0}'.format(
exc)
}
# We bundle the results of the runners with the IDs of the runners so that
# we can easily identify which output belongs to which runner. At the same
# time we exctract the actual return value of the runner (saltutil.runner
# adds some extra information that is not interesting to us).
outputs = {
runner_id: out['return']for runner_id, out in
six.moves.zip(six.iterkeys(runners), outputs)
}
# If each of the runners returned its output in the format compatible with
# the 'highstate' outputter, we can leverage this fact when merging the
# outputs.
highstate_output = all(
[out.get('outputter', '') == 'highstate' and 'data' in out for out in
six.itervalues(outputs)]
)
# The following helper function is used to extract changes from highstate
# output.
def extract_changes(obj):
if not isinstance(obj, dict):
return {}
elif 'changes' in obj:
if (isinstance(obj['changes'], dict)
and obj['changes'].get('out', '') == 'highstate'
and 'ret' in obj['changes']):
return obj['changes']['ret']
else:
return obj['changes']
else:
found_changes = {}
for key, value in six.iteritems(obj):
change = extract_changes(value)
if change:
found_changes[key] = change
return found_changes
if highstate_output:
failed_runners = [runner_id for runner_id, out in
six.iteritems(outputs) if
out['data'].get('retcode', 0) != 0]
all_successful = not failed_runners
if all_successful:
comment = 'All runner functions executed successfully.'
else:
runner_comments = [
'Runner {0} failed with return value:\n{1}'.format(
runner_id,
salt.output.out_format(outputs[runner_id],
'nested',
__opts__,
nested_indent=2)
) for runner_id in failed_runners
]
comment = '\n'.join(runner_comments)
changes = {}
for runner_id, out in six.iteritems(outputs):
runner_changes = extract_changes(out['data'])
if runner_changes:
changes[runner_id] = runner_changes
else:
failed_runners = [runner_id for runner_id, out in
six.iteritems(outputs) if
out.get('exit_code', 0) != 0]
all_successful = not failed_runners
if all_successful:
comment = 'All runner functions executed successfully.'
else:
if len(failed_runners) == 1:
comment = 'Runner {0} failed.'.format(failed_runners[0])
else:
comment =\
'Runners {0} failed.'.format(', '.join(failed_runners))
changes = {'ret': {
runner_id: out for runner_id, out in six.iteritems(outputs)
}}
ret = {
'name': name,
'result': all_successful,
'changes': changes,
'comment': comment
}
# The 'runner' function includes out['jid'] as '__jid__' in the returned
# dict, but we cannot do this here because we have more than one JID if
# we have more than one runner.
return ret | [
"def",
"parallel_runners",
"(",
"name",
",",
"runners",
",",
"*",
"*",
"kwargs",
")",
":",
"# pylint: disable=unused-argument",
"# For the sake of consistency, we treat a single string in the same way as",
"# a key without a value. This allows something like",
"# salt.parallel_runners:",
"# - runners:",
"# state.orchestrate",
"# Obviously, this will only work if the specified runner does not need any",
"# arguments.",
"if",
"isinstance",
"(",
"runners",
",",
"six",
".",
"string_types",
")",
":",
"runners",
"=",
"{",
"runners",
":",
"[",
"{",
"name",
":",
"runners",
"}",
"]",
"}",
"# If the runners argument is not a string, it must be a dict. Everything",
"# else is considered an error.",
"if",
"not",
"isinstance",
"(",
"runners",
",",
"dict",
")",
":",
"return",
"{",
"'name'",
":",
"name",
",",
"'result'",
":",
"False",
",",
"'changes'",
":",
"{",
"}",
",",
"'comment'",
":",
"'The runners parameter must be a string or dict.'",
"}",
"# The configuration for each runner is given as a list of key-value pairs.",
"# This is not very useful for what we want to do, but it is the typical",
"# style used in Salt. For further processing, we convert each of these",
"# lists to a dict. This also makes it easier to check whether a name has",
"# been specified explicitly.",
"for",
"runner_id",
",",
"runner_config",
"in",
"six",
".",
"iteritems",
"(",
"runners",
")",
":",
"if",
"runner_config",
"is",
"None",
":",
"runner_config",
"=",
"{",
"}",
"else",
":",
"runner_config",
"=",
"salt",
".",
"utils",
".",
"data",
".",
"repack_dictlist",
"(",
"runner_config",
")",
"if",
"'name'",
"not",
"in",
"runner_config",
":",
"runner_config",
"[",
"'name'",
"]",
"=",
"runner_id",
"runners",
"[",
"runner_id",
"]",
"=",
"runner_config",
"try",
":",
"jid",
"=",
"__orchestration_jid__",
"except",
"NameError",
":",
"log",
".",
"debug",
"(",
"'Unable to fire args event due to missing __orchestration_jid__'",
")",
"jid",
"=",
"None",
"def",
"call_runner",
"(",
"runner_config",
")",
":",
"return",
"__salt__",
"[",
"'saltutil.runner'",
"]",
"(",
"runner_config",
"[",
"'name'",
"]",
",",
"__orchestration_jid__",
"=",
"jid",
",",
"__env__",
"=",
"__env__",
",",
"full_return",
"=",
"True",
",",
"*",
"*",
"(",
"runner_config",
".",
"get",
"(",
"'kwarg'",
",",
"{",
"}",
")",
")",
")",
"try",
":",
"outputs",
"=",
"_parallel_map",
"(",
"call_runner",
",",
"list",
"(",
"six",
".",
"itervalues",
"(",
"runners",
")",
")",
")",
"except",
"salt",
".",
"exceptions",
".",
"SaltException",
"as",
"exc",
":",
"return",
"{",
"'name'",
":",
"name",
",",
"'result'",
":",
"False",
",",
"'success'",
":",
"False",
",",
"'changes'",
":",
"{",
"}",
",",
"'comment'",
":",
"'One of the runners raised an exception: {0}'",
".",
"format",
"(",
"exc",
")",
"}",
"# We bundle the results of the runners with the IDs of the runners so that",
"# we can easily identify which output belongs to which runner. At the same",
"# time we exctract the actual return value of the runner (saltutil.runner",
"# adds some extra information that is not interesting to us).",
"outputs",
"=",
"{",
"runner_id",
":",
"out",
"[",
"'return'",
"]",
"for",
"runner_id",
",",
"out",
"in",
"six",
".",
"moves",
".",
"zip",
"(",
"six",
".",
"iterkeys",
"(",
"runners",
")",
",",
"outputs",
")",
"}",
"# If each of the runners returned its output in the format compatible with",
"# the 'highstate' outputter, we can leverage this fact when merging the",
"# outputs.",
"highstate_output",
"=",
"all",
"(",
"[",
"out",
".",
"get",
"(",
"'outputter'",
",",
"''",
")",
"==",
"'highstate'",
"and",
"'data'",
"in",
"out",
"for",
"out",
"in",
"six",
".",
"itervalues",
"(",
"outputs",
")",
"]",
")",
"# The following helper function is used to extract changes from highstate",
"# output.",
"def",
"extract_changes",
"(",
"obj",
")",
":",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"return",
"{",
"}",
"elif",
"'changes'",
"in",
"obj",
":",
"if",
"(",
"isinstance",
"(",
"obj",
"[",
"'changes'",
"]",
",",
"dict",
")",
"and",
"obj",
"[",
"'changes'",
"]",
".",
"get",
"(",
"'out'",
",",
"''",
")",
"==",
"'highstate'",
"and",
"'ret'",
"in",
"obj",
"[",
"'changes'",
"]",
")",
":",
"return",
"obj",
"[",
"'changes'",
"]",
"[",
"'ret'",
"]",
"else",
":",
"return",
"obj",
"[",
"'changes'",
"]",
"else",
":",
"found_changes",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"obj",
")",
":",
"change",
"=",
"extract_changes",
"(",
"value",
")",
"if",
"change",
":",
"found_changes",
"[",
"key",
"]",
"=",
"change",
"return",
"found_changes",
"if",
"highstate_output",
":",
"failed_runners",
"=",
"[",
"runner_id",
"for",
"runner_id",
",",
"out",
"in",
"six",
".",
"iteritems",
"(",
"outputs",
")",
"if",
"out",
"[",
"'data'",
"]",
".",
"get",
"(",
"'retcode'",
",",
"0",
")",
"!=",
"0",
"]",
"all_successful",
"=",
"not",
"failed_runners",
"if",
"all_successful",
":",
"comment",
"=",
"'All runner functions executed successfully.'",
"else",
":",
"runner_comments",
"=",
"[",
"'Runner {0} failed with return value:\\n{1}'",
".",
"format",
"(",
"runner_id",
",",
"salt",
".",
"output",
".",
"out_format",
"(",
"outputs",
"[",
"runner_id",
"]",
",",
"'nested'",
",",
"__opts__",
",",
"nested_indent",
"=",
"2",
")",
")",
"for",
"runner_id",
"in",
"failed_runners",
"]",
"comment",
"=",
"'\\n'",
".",
"join",
"(",
"runner_comments",
")",
"changes",
"=",
"{",
"}",
"for",
"runner_id",
",",
"out",
"in",
"six",
".",
"iteritems",
"(",
"outputs",
")",
":",
"runner_changes",
"=",
"extract_changes",
"(",
"out",
"[",
"'data'",
"]",
")",
"if",
"runner_changes",
":",
"changes",
"[",
"runner_id",
"]",
"=",
"runner_changes",
"else",
":",
"failed_runners",
"=",
"[",
"runner_id",
"for",
"runner_id",
",",
"out",
"in",
"six",
".",
"iteritems",
"(",
"outputs",
")",
"if",
"out",
".",
"get",
"(",
"'exit_code'",
",",
"0",
")",
"!=",
"0",
"]",
"all_successful",
"=",
"not",
"failed_runners",
"if",
"all_successful",
":",
"comment",
"=",
"'All runner functions executed successfully.'",
"else",
":",
"if",
"len",
"(",
"failed_runners",
")",
"==",
"1",
":",
"comment",
"=",
"'Runner {0} failed.'",
".",
"format",
"(",
"failed_runners",
"[",
"0",
"]",
")",
"else",
":",
"comment",
"=",
"'Runners {0} failed.'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"failed_runners",
")",
")",
"changes",
"=",
"{",
"'ret'",
":",
"{",
"runner_id",
":",
"out",
"for",
"runner_id",
",",
"out",
"in",
"six",
".",
"iteritems",
"(",
"outputs",
")",
"}",
"}",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'result'",
":",
"all_successful",
",",
"'changes'",
":",
"changes",
",",
"'comment'",
":",
"comment",
"}",
"# The 'runner' function includes out['jid'] as '__jid__' in the returned",
"# dict, but we cannot do this here because we have more than one JID if",
"# we have more than one runner.",
"return",
"ret"
] | 39.494505 | 0.000271 | [
"def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argument\n",
" '''\n",
" Executes multiple runner modules on the master in parallel.\n",
"\n",
" .. versionadded:: 2017.x.0 (Nitrogen)\n",
"\n",
" A separate thread is spawned for each runner. This state is intended to be\n",
" used with the orchestrate runner in place of the ``saltmod.runner`` state\n",
" when different tasks should be run in parallel. In general, Salt states are\n",
" not safe when used concurrently, so ensure that they are used in a safe way\n",
" (e.g. by only targeting separate minions in parallel tasks).\n",
"\n",
" name:\n",
" name identifying this state. The name is provided as part of the\n",
" output, but not used for anything else.\n",
"\n",
" runners:\n",
" list of runners that should be run in parallel. Each element of the\n",
" list has to be a dictionary. This dictionary's name entry stores the\n",
" name of the runner function that shall be invoked. The optional kwarg\n",
" entry stores a dictionary of named arguments that are passed to the\n",
" runner function.\n",
"\n",
" .. code-block:: yaml\n",
"\n",
" parallel-state:\n",
" salt.parallel_runners:\n",
" - runners:\n",
" my_runner_1:\n",
" - name: state.orchestrate\n",
" - kwarg:\n",
" mods: orchestrate_state_1\n",
" my_runner_2:\n",
" - name: state.orchestrate\n",
" - kwarg:\n",
" mods: orchestrate_state_2\n",
" '''\n",
" # For the sake of consistency, we treat a single string in the same way as\n",
" # a key without a value. This allows something like\n",
" # salt.parallel_runners:\n",
" # - runners:\n",
" # state.orchestrate\n",
" # Obviously, this will only work if the specified runner does not need any\n",
" # arguments.\n",
" if isinstance(runners, six.string_types):\n",
" runners = {runners: [{name: runners}]}\n",
" # If the runners argument is not a string, it must be a dict. Everything\n",
" # else is considered an error.\n",
" if not isinstance(runners, dict):\n",
" return {\n",
" 'name': name,\n",
" 'result': False,\n",
" 'changes': {},\n",
" 'comment': 'The runners parameter must be a string or dict.'\n",
" }\n",
" # The configuration for each runner is given as a list of key-value pairs.\n",
" # This is not very useful for what we want to do, but it is the typical\n",
" # style used in Salt. For further processing, we convert each of these\n",
" # lists to a dict. This also makes it easier to check whether a name has\n",
" # been specified explicitly.\n",
" for runner_id, runner_config in six.iteritems(runners):\n",
" if runner_config is None:\n",
" runner_config = {}\n",
" else:\n",
" runner_config = salt.utils.data.repack_dictlist(runner_config)\n",
" if 'name' not in runner_config:\n",
" runner_config['name'] = runner_id\n",
" runners[runner_id] = runner_config\n",
"\n",
" try:\n",
" jid = __orchestration_jid__\n",
" except NameError:\n",
" log.debug(\n",
" 'Unable to fire args event due to missing __orchestration_jid__')\n",
" jid = None\n",
"\n",
" def call_runner(runner_config):\n",
" return __salt__['saltutil.runner'](runner_config['name'],\n",
" __orchestration_jid__=jid,\n",
" __env__=__env__,\n",
" full_return=True,\n",
" **(runner_config.get('kwarg', {})))\n",
"\n",
" try:\n",
" outputs = _parallel_map(call_runner, list(six.itervalues(runners)))\n",
" except salt.exceptions.SaltException as exc:\n",
" return {\n",
" 'name': name,\n",
" 'result': False,\n",
" 'success': False,\n",
" 'changes': {},\n",
" 'comment': 'One of the runners raised an exception: {0}'.format(\n",
" exc)\n",
" }\n",
" # We bundle the results of the runners with the IDs of the runners so that\n",
" # we can easily identify which output belongs to which runner. At the same\n",
" # time we exctract the actual return value of the runner (saltutil.runner\n",
" # adds some extra information that is not interesting to us).\n",
" outputs = {\n",
" runner_id: out['return']for runner_id, out in\n",
" six.moves.zip(six.iterkeys(runners), outputs)\n",
" }\n",
"\n",
" # If each of the runners returned its output in the format compatible with\n",
" # the 'highstate' outputter, we can leverage this fact when merging the\n",
" # outputs.\n",
" highstate_output = all(\n",
" [out.get('outputter', '') == 'highstate' and 'data' in out for out in\n",
" six.itervalues(outputs)]\n",
" )\n",
"\n",
" # The following helper function is used to extract changes from highstate\n",
" # output.\n",
"\n",
" def extract_changes(obj):\n",
" if not isinstance(obj, dict):\n",
" return {}\n",
" elif 'changes' in obj:\n",
" if (isinstance(obj['changes'], dict)\n",
" and obj['changes'].get('out', '') == 'highstate'\n",
" and 'ret' in obj['changes']):\n",
" return obj['changes']['ret']\n",
" else:\n",
" return obj['changes']\n",
" else:\n",
" found_changes = {}\n",
" for key, value in six.iteritems(obj):\n",
" change = extract_changes(value)\n",
" if change:\n",
" found_changes[key] = change\n",
" return found_changes\n",
" if highstate_output:\n",
" failed_runners = [runner_id for runner_id, out in\n",
" six.iteritems(outputs) if\n",
" out['data'].get('retcode', 0) != 0]\n",
" all_successful = not failed_runners\n",
" if all_successful:\n",
" comment = 'All runner functions executed successfully.'\n",
" else:\n",
" runner_comments = [\n",
" 'Runner {0} failed with return value:\\n{1}'.format(\n",
" runner_id,\n",
" salt.output.out_format(outputs[runner_id],\n",
" 'nested',\n",
" __opts__,\n",
" nested_indent=2)\n",
" ) for runner_id in failed_runners\n",
" ]\n",
" comment = '\\n'.join(runner_comments)\n",
" changes = {}\n",
" for runner_id, out in six.iteritems(outputs):\n",
" runner_changes = extract_changes(out['data'])\n",
" if runner_changes:\n",
" changes[runner_id] = runner_changes\n",
" else:\n",
" failed_runners = [runner_id for runner_id, out in\n",
" six.iteritems(outputs) if\n",
" out.get('exit_code', 0) != 0]\n",
" all_successful = not failed_runners\n",
" if all_successful:\n",
" comment = 'All runner functions executed successfully.'\n",
" else:\n",
" if len(failed_runners) == 1:\n",
" comment = 'Runner {0} failed.'.format(failed_runners[0])\n",
" else:\n",
" comment =\\\n",
" 'Runners {0} failed.'.format(', '.join(failed_runners))\n",
" changes = {'ret': {\n",
" runner_id: out for runner_id, out in six.iteritems(outputs)\n",
" }}\n",
" ret = {\n",
" 'name': name,\n",
" 'result': all_successful,\n",
" 'changes': changes,\n",
" 'comment': comment\n",
" }\n",
"\n",
" # The 'runner' function includes out['jid'] as '__jid__' in the returned\n",
" # dict, but we cannot do this here because we have more than one JID if\n",
" # we have more than one runner.\n",
"\n",
" return ret"
] | [
0.012195121951219513,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07142857142857142
] | 182 | 0.000459 |
def save(sources, targets, masked=False):
"""
Save the numeric results of each source into its corresponding target.
Parameters
----------
sources: list
The list of source arrays for saving from; limited to length 1.
targets: list
The list of target arrays for saving to; limited to length 1.
masked: boolean
Uses a masked array from sources if True.
"""
# TODO: Remove restriction
assert len(sources) == 1 and len(targets) == 1
array = sources[0]
target = targets[0]
# Request bitesize pieces of the source and assign them to the
# target.
# NB. This algorithm does not use the minimal number of chunks.
# e.g. If the second dimension could be sliced as 0:99, 99:100
# then clearly the first dimension would have to be single
# slices for the 0:99 case, but could be bigger slices for the
# 99:100 case.
# It's not yet clear if this really matters.
all_slices = _all_slices(array)
for index in np.ndindex(*[len(slices) for slices in all_slices]):
keys = tuple(slices[i] for slices, i in zip(all_slices, index))
if masked:
target[keys] = array[keys].masked_array()
else:
target[keys] = array[keys].ndarray() | [
"def",
"save",
"(",
"sources",
",",
"targets",
",",
"masked",
"=",
"False",
")",
":",
"# TODO: Remove restriction",
"assert",
"len",
"(",
"sources",
")",
"==",
"1",
"and",
"len",
"(",
"targets",
")",
"==",
"1",
"array",
"=",
"sources",
"[",
"0",
"]",
"target",
"=",
"targets",
"[",
"0",
"]",
"# Request bitesize pieces of the source and assign them to the",
"# target.",
"# NB. This algorithm does not use the minimal number of chunks.",
"# e.g. If the second dimension could be sliced as 0:99, 99:100",
"# then clearly the first dimension would have to be single",
"# slices for the 0:99 case, but could be bigger slices for the",
"# 99:100 case.",
"# It's not yet clear if this really matters.",
"all_slices",
"=",
"_all_slices",
"(",
"array",
")",
"for",
"index",
"in",
"np",
".",
"ndindex",
"(",
"*",
"[",
"len",
"(",
"slices",
")",
"for",
"slices",
"in",
"all_slices",
"]",
")",
":",
"keys",
"=",
"tuple",
"(",
"slices",
"[",
"i",
"]",
"for",
"slices",
",",
"i",
"in",
"zip",
"(",
"all_slices",
",",
"index",
")",
")",
"if",
"masked",
":",
"target",
"[",
"keys",
"]",
"=",
"array",
"[",
"keys",
"]",
".",
"masked_array",
"(",
")",
"else",
":",
"target",
"[",
"keys",
"]",
"=",
"array",
"[",
"keys",
"]",
".",
"ndarray",
"(",
")"
] | 36.970588 | 0.000775 | [
"def save(sources, targets, masked=False):\n",
" \"\"\"\n",
" Save the numeric results of each source into its corresponding target.\n",
"\n",
" Parameters\n",
" ----------\n",
" sources: list\n",
" The list of source arrays for saving from; limited to length 1.\n",
" targets: list\n",
" The list of target arrays for saving to; limited to length 1.\n",
" masked: boolean\n",
" Uses a masked array from sources if True.\n",
"\n",
" \"\"\"\n",
" # TODO: Remove restriction\n",
" assert len(sources) == 1 and len(targets) == 1\n",
" array = sources[0]\n",
" target = targets[0]\n",
"\n",
" # Request bitesize pieces of the source and assign them to the\n",
" # target.\n",
" # NB. This algorithm does not use the minimal number of chunks.\n",
" # e.g. If the second dimension could be sliced as 0:99, 99:100\n",
" # then clearly the first dimension would have to be single\n",
" # slices for the 0:99 case, but could be bigger slices for the\n",
" # 99:100 case.\n",
" # It's not yet clear if this really matters.\n",
" all_slices = _all_slices(array)\n",
" for index in np.ndindex(*[len(slices) for slices in all_slices]):\n",
" keys = tuple(slices[i] for slices, i in zip(all_slices, index))\n",
" if masked:\n",
" target[keys] = array[keys].masked_array()\n",
" else:\n",
" target[keys] = array[keys].ndarray()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.020833333333333332
] | 34 | 0.000613 |
def get_season_code_from_name(self, season_name) -> int:
"""
Args:
season_name: season name
Returns: season code
"""
self.validator_season_name.validate(season_name, 'get_season_code_from_name')
return self.seasons_enum[season_name] | [
"def",
"get_season_code_from_name",
"(",
"self",
",",
"season_name",
")",
"->",
"int",
":",
"self",
".",
"validator_season_name",
".",
"validate",
"(",
"season_name",
",",
"'get_season_code_from_name'",
")",
"return",
"self",
".",
"seasons_enum",
"[",
"season_name",
"]"
] | 31.666667 | 0.010239 | [
"def get_season_code_from_name(self, season_name) -> int:\n",
" \"\"\"\n",
" Args:\n",
" season_name: season name\n",
"\n",
" Returns: season code\n",
" \"\"\"\n",
" self.validator_season_name.validate(season_name, 'get_season_code_from_name')\n",
" return self.seasons_enum[season_name]"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0.011627906976744186,
0.022222222222222223
] | 9 | 0.01302 |
def check_arguments(c: typing.Callable,
hints: typing.Mapping[str, typing.Optional[type]],
*args, **kwargs) -> None:
"""Check arguments type, raise :class:`TypeError` if argument type is not
expected type.
:param c: callable object want to check types
:param hints: assumed type of given ``c`` result of
:func:`typing.get_type_hints`
"""
signature = inspect.signature(c)
bound = signature.bind(*args, **kwargs)
for argument_name, value in bound.arguments.items():
try:
type_hint = hints[argument_name]
except KeyError:
continue
actual_type, correct = check_type(value, type_hint)
if not correct:
raise TypeError(
'Incorrect type `{}`, expected `{}` for `{}`'.format(
actual_type, type_hint, argument_name
)
) | [
"def",
"check_arguments",
"(",
"c",
":",
"typing",
".",
"Callable",
",",
"hints",
":",
"typing",
".",
"Mapping",
"[",
"str",
",",
"typing",
".",
"Optional",
"[",
"type",
"]",
"]",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"None",
":",
"signature",
"=",
"inspect",
".",
"signature",
"(",
"c",
")",
"bound",
"=",
"signature",
".",
"bind",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"argument_name",
",",
"value",
"in",
"bound",
".",
"arguments",
".",
"items",
"(",
")",
":",
"try",
":",
"type_hint",
"=",
"hints",
"[",
"argument_name",
"]",
"except",
"KeyError",
":",
"continue",
"actual_type",
",",
"correct",
"=",
"check_type",
"(",
"value",
",",
"type_hint",
")",
"if",
"not",
"correct",
":",
"raise",
"TypeError",
"(",
"'Incorrect type `{}`, expected `{}` for `{}`'",
".",
"format",
"(",
"actual_type",
",",
"type_hint",
",",
"argument_name",
")",
")"
] | 36.32 | 0.001073 | [
"def check_arguments(c: typing.Callable,\n",
" hints: typing.Mapping[str, typing.Optional[type]],\n",
" *args, **kwargs) -> None:\n",
" \"\"\"Check arguments type, raise :class:`TypeError` if argument type is not\n",
" expected type.\n",
"\n",
" :param c: callable object want to check types\n",
" :param hints: assumed type of given ``c`` result of\n",
" :func:`typing.get_type_hints`\n",
"\n",
" \"\"\"\n",
" signature = inspect.signature(c)\n",
" bound = signature.bind(*args, **kwargs)\n",
" for argument_name, value in bound.arguments.items():\n",
" try:\n",
" type_hint = hints[argument_name]\n",
" except KeyError:\n",
" continue\n",
" actual_type, correct = check_type(value, type_hint)\n",
" if not correct:\n",
" raise TypeError(\n",
" 'Incorrect type `{}`, expected `{}` for `{}`'.format(\n",
" actual_type, type_hint, argument_name\n",
" )\n",
" )"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07692307692307693
] | 25 | 0.003077 |
def _reconnect(self, errorState):
"""
Attempt to reconnect.
If the current back-off delay is 0, L{connect} is called. Otherwise,
it will cause a transition to the C{'waiting'} state, ultimately
causing a call to L{connect} when the delay expires.
"""
def connect():
if self.noisy:
log.msg("Reconnecting now.")
self.connect()
backOff = self.backOffs[errorState]
if self._errorState != errorState or self._delay is None:
self._errorState = errorState
self._delay = backOff['initial']
else:
self._delay = min(backOff['max'], self._delay * backOff['factor'])
if self._delay == 0:
connect()
else:
self._reconnectDelayedCall = self.reactor.callLater(self._delay,
connect)
self._toState('waiting') | [
"def",
"_reconnect",
"(",
"self",
",",
"errorState",
")",
":",
"def",
"connect",
"(",
")",
":",
"if",
"self",
".",
"noisy",
":",
"log",
".",
"msg",
"(",
"\"Reconnecting now.\"",
")",
"self",
".",
"connect",
"(",
")",
"backOff",
"=",
"self",
".",
"backOffs",
"[",
"errorState",
"]",
"if",
"self",
".",
"_errorState",
"!=",
"errorState",
"or",
"self",
".",
"_delay",
"is",
"None",
":",
"self",
".",
"_errorState",
"=",
"errorState",
"self",
".",
"_delay",
"=",
"backOff",
"[",
"'initial'",
"]",
"else",
":",
"self",
".",
"_delay",
"=",
"min",
"(",
"backOff",
"[",
"'max'",
"]",
",",
"self",
".",
"_delay",
"*",
"backOff",
"[",
"'factor'",
"]",
")",
"if",
"self",
".",
"_delay",
"==",
"0",
":",
"connect",
"(",
")",
"else",
":",
"self",
".",
"_reconnectDelayedCall",
"=",
"self",
".",
"reactor",
".",
"callLater",
"(",
"self",
".",
"_delay",
",",
"connect",
")",
"self",
".",
"_toState",
"(",
"'waiting'",
")"
] | 34.814815 | 0.00207 | [
"def _reconnect(self, errorState):\n",
" \"\"\"\n",
" Attempt to reconnect.\n",
"\n",
" If the current back-off delay is 0, L{connect} is called. Otherwise,\n",
" it will cause a transition to the C{'waiting'} state, ultimately\n",
" causing a call to L{connect} when the delay expires.\n",
" \"\"\"\n",
" def connect():\n",
" if self.noisy:\n",
" log.msg(\"Reconnecting now.\")\n",
" self.connect()\n",
"\n",
" backOff = self.backOffs[errorState]\n",
"\n",
" if self._errorState != errorState or self._delay is None:\n",
" self._errorState = errorState\n",
" self._delay = backOff['initial']\n",
" else:\n",
" self._delay = min(backOff['max'], self._delay * backOff['factor'])\n",
"\n",
" if self._delay == 0:\n",
" connect()\n",
" else:\n",
" self._reconnectDelayedCall = self.reactor.callLater(self._delay,\n",
" connect)\n",
" self._toState('waiting')"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.027777777777777776
] | 27 | 0.004115 |
def read_dictionary_file(dictionary_path):
"""Return all words in dictionary file as set."""
try:
return _user_dictionary_cache[dictionary_path]
except KeyError:
if dictionary_path and os.path.exists(dictionary_path):
with open(dictionary_path, "rt") as dict_f:
words = set(re.findall(r"(\w[\w']*\w|\w)",
" ".join(dict_f.read().splitlines())))
return words
return set() | [
"def",
"read_dictionary_file",
"(",
"dictionary_path",
")",
":",
"try",
":",
"return",
"_user_dictionary_cache",
"[",
"dictionary_path",
"]",
"except",
"KeyError",
":",
"if",
"dictionary_path",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"dictionary_path",
")",
":",
"with",
"open",
"(",
"dictionary_path",
",",
"\"rt\"",
")",
"as",
"dict_f",
":",
"words",
"=",
"set",
"(",
"re",
".",
"findall",
"(",
"r\"(\\w[\\w']*\\w|\\w)\"",
",",
"\" \"",
".",
"join",
"(",
"dict_f",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
")",
")",
")",
"return",
"words",
"return",
"set",
"(",
")"
] | 39.833333 | 0.002045 | [
"def read_dictionary_file(dictionary_path):\n",
" \"\"\"Return all words in dictionary file as set.\"\"\"\n",
" try:\n",
" return _user_dictionary_cache[dictionary_path]\n",
" except KeyError:\n",
" if dictionary_path and os.path.exists(dictionary_path):\n",
" with open(dictionary_path, \"rt\") as dict_f:\n",
" words = set(re.findall(r\"(\\w[\\w']*\\w|\\w)\",\n",
" \" \".join(dict_f.read().splitlines())))\n",
" return words\n",
"\n",
" return set()"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 12 | 0.004167 |
def resize(image, x, y, stretch=False, top=None, left=None, mode='RGB',
resample=None):
"""Return an image resized."""
if x <= 0:
raise ValueError('x must be greater than zero')
if y <= 0:
raise ValueError('y must be greater than zero')
from PIL import Image
resample = Image.ANTIALIAS if resample is None else resample
if not isinstance(resample, numbers.Number):
try:
resample = getattr(Image, resample.upper())
except:
raise ValueError("(1) Didn't understand resample=%s" % resample)
if not isinstance(resample, numbers.Number):
raise ValueError("(2) Didn't understand resample=%s" % resample)
size = x, y
if stretch:
return image.resize(size, resample=resample)
result = Image.new(mode, size)
ratios = [d1 / d2 for d1, d2 in zip(size, image.size)]
if ratios[0] < ratios[1]:
new_size = (size[0], int(image.size[1] * ratios[0]))
else:
new_size = (int(image.size[0] * ratios[1]), size[1])
image = image.resize(new_size, resample=resample)
if left is None:
box_x = int((x - new_size[0]) / 2)
elif left:
box_x = 0
else:
box_x = x - new_size[0]
if top is None:
box_y = int((y - new_size[1]) / 2)
elif top:
box_y = 0
else:
box_y = y - new_size[1]
result.paste(image, box=(box_x, box_y))
return result | [
"def",
"resize",
"(",
"image",
",",
"x",
",",
"y",
",",
"stretch",
"=",
"False",
",",
"top",
"=",
"None",
",",
"left",
"=",
"None",
",",
"mode",
"=",
"'RGB'",
",",
"resample",
"=",
"None",
")",
":",
"if",
"x",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'x must be greater than zero'",
")",
"if",
"y",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'y must be greater than zero'",
")",
"from",
"PIL",
"import",
"Image",
"resample",
"=",
"Image",
".",
"ANTIALIAS",
"if",
"resample",
"is",
"None",
"else",
"resample",
"if",
"not",
"isinstance",
"(",
"resample",
",",
"numbers",
".",
"Number",
")",
":",
"try",
":",
"resample",
"=",
"getattr",
"(",
"Image",
",",
"resample",
".",
"upper",
"(",
")",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"(1) Didn't understand resample=%s\"",
"%",
"resample",
")",
"if",
"not",
"isinstance",
"(",
"resample",
",",
"numbers",
".",
"Number",
")",
":",
"raise",
"ValueError",
"(",
"\"(2) Didn't understand resample=%s\"",
"%",
"resample",
")",
"size",
"=",
"x",
",",
"y",
"if",
"stretch",
":",
"return",
"image",
".",
"resize",
"(",
"size",
",",
"resample",
"=",
"resample",
")",
"result",
"=",
"Image",
".",
"new",
"(",
"mode",
",",
"size",
")",
"ratios",
"=",
"[",
"d1",
"/",
"d2",
"for",
"d1",
",",
"d2",
"in",
"zip",
"(",
"size",
",",
"image",
".",
"size",
")",
"]",
"if",
"ratios",
"[",
"0",
"]",
"<",
"ratios",
"[",
"1",
"]",
":",
"new_size",
"=",
"(",
"size",
"[",
"0",
"]",
",",
"int",
"(",
"image",
".",
"size",
"[",
"1",
"]",
"*",
"ratios",
"[",
"0",
"]",
")",
")",
"else",
":",
"new_size",
"=",
"(",
"int",
"(",
"image",
".",
"size",
"[",
"0",
"]",
"*",
"ratios",
"[",
"1",
"]",
")",
",",
"size",
"[",
"1",
"]",
")",
"image",
"=",
"image",
".",
"resize",
"(",
"new_size",
",",
"resample",
"=",
"resample",
")",
"if",
"left",
"is",
"None",
":",
"box_x",
"=",
"int",
"(",
"(",
"x",
"-",
"new_size",
"[",
"0",
"]",
")",
"/",
"2",
")",
"elif",
"left",
":",
"box_x",
"=",
"0",
"else",
":",
"box_x",
"=",
"x",
"-",
"new_size",
"[",
"0",
"]",
"if",
"top",
"is",
"None",
":",
"box_y",
"=",
"int",
"(",
"(",
"y",
"-",
"new_size",
"[",
"1",
"]",
")",
"/",
"2",
")",
"elif",
"top",
":",
"box_y",
"=",
"0",
"else",
":",
"box_y",
"=",
"y",
"-",
"new_size",
"[",
"1",
"]",
"result",
".",
"paste",
"(",
"image",
",",
"box",
"=",
"(",
"box_x",
",",
"box_y",
")",
")",
"return",
"result"
] | 29.808511 | 0.001382 | [
"def resize(image, x, y, stretch=False, top=None, left=None, mode='RGB',\n",
" resample=None):\n",
" \"\"\"Return an image resized.\"\"\"\n",
" if x <= 0:\n",
" raise ValueError('x must be greater than zero')\n",
" if y <= 0:\n",
" raise ValueError('y must be greater than zero')\n",
"\n",
" from PIL import Image\n",
"\n",
" resample = Image.ANTIALIAS if resample is None else resample\n",
" if not isinstance(resample, numbers.Number):\n",
" try:\n",
" resample = getattr(Image, resample.upper())\n",
" except:\n",
" raise ValueError(\"(1) Didn't understand resample=%s\" % resample)\n",
" if not isinstance(resample, numbers.Number):\n",
" raise ValueError(\"(2) Didn't understand resample=%s\" % resample)\n",
"\n",
" size = x, y\n",
" if stretch:\n",
" return image.resize(size, resample=resample)\n",
" result = Image.new(mode, size)\n",
"\n",
" ratios = [d1 / d2 for d1, d2 in zip(size, image.size)]\n",
" if ratios[0] < ratios[1]:\n",
" new_size = (size[0], int(image.size[1] * ratios[0]))\n",
" else:\n",
" new_size = (int(image.size[0] * ratios[1]), size[1])\n",
"\n",
" image = image.resize(new_size, resample=resample)\n",
" if left is None:\n",
" box_x = int((x - new_size[0]) / 2)\n",
" elif left:\n",
" box_x = 0\n",
" else:\n",
" box_x = x - new_size[0]\n",
"\n",
" if top is None:\n",
" box_y = int((y - new_size[1]) / 2)\n",
" elif top:\n",
" box_y = 0\n",
" else:\n",
" box_y = y - new_size[1]\n",
"\n",
" result.paste(image, box=(box_x, box_y))\n",
" return result"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.0625,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.058823529411764705
] | 47 | 0.002581 |
def p_expr_shl_expr(p):
""" expr : expr SHL expr
"""
if p[1] is None or p[3] is None:
p[0] = None
return
if p[1].type_ in (TYPE.float_, TYPE.fixed):
p[1] = make_typecast(TYPE.ulong, p[1], p.lineno(2))
p[0] = make_binary(p.lineno(2), 'SHL', p[1],
make_typecast(TYPE.ubyte, p[3], p.lineno(2)),
lambda x, y: x << y) | [
"def",
"p_expr_shl_expr",
"(",
"p",
")",
":",
"if",
"p",
"[",
"1",
"]",
"is",
"None",
"or",
"p",
"[",
"3",
"]",
"is",
"None",
":",
"p",
"[",
"0",
"]",
"=",
"None",
"return",
"if",
"p",
"[",
"1",
"]",
".",
"type_",
"in",
"(",
"TYPE",
".",
"float_",
",",
"TYPE",
".",
"fixed",
")",
":",
"p",
"[",
"1",
"]",
"=",
"make_typecast",
"(",
"TYPE",
".",
"ulong",
",",
"p",
"[",
"1",
"]",
",",
"p",
".",
"lineno",
"(",
"2",
")",
")",
"p",
"[",
"0",
"]",
"=",
"make_binary",
"(",
"p",
".",
"lineno",
"(",
"2",
")",
",",
"'SHL'",
",",
"p",
"[",
"1",
"]",
",",
"make_typecast",
"(",
"TYPE",
".",
"ubyte",
",",
"p",
"[",
"3",
"]",
",",
"p",
".",
"lineno",
"(",
"2",
")",
")",
",",
"lambda",
"x",
",",
"y",
":",
"x",
"<<",
"y",
")"
] | 30.153846 | 0.002475 | [
"def p_expr_shl_expr(p):\n",
" \"\"\" expr : expr SHL expr\n",
" \"\"\"\n",
" if p[1] is None or p[3] is None:\n",
" p[0] = None\n",
" return\n",
"\n",
" if p[1].type_ in (TYPE.float_, TYPE.fixed):\n",
" p[1] = make_typecast(TYPE.ulong, p[1], p.lineno(2))\n",
"\n",
" p[0] = make_binary(p.lineno(2), 'SHL', p[1],\n",
" make_typecast(TYPE.ubyte, p[3], p.lineno(2)),\n",
" lambda x, y: x << y)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.023255813953488372
] | 13 | 0.001789 |
def render(self):
"""Render this page and return the rendition.
Converts the markdown content to html, and then renders the
(mako) template specified in the config, using that html.
The task of writing of the rendition to a real file is
responsibility of the generate method.
"""
(pthemedir, ptemplatefname) = self._theme_and_template_fp()
mylookup = TemplateLookup(directories=[self.site.dirs['s2'], pthemedir], input_encoding='utf-8', output_encoding='utf-8')
makotemplate = Template(filename=ptemplatefname, lookup=mylookup,
module_directory=self.site._makodir)
# I don't really need to use the meta extension here, because I render self._content (has no metadata)
#page_html = markdown.markdown(self._content)
md = markdown.Markdown(extensions=['meta','fenced_code', 'codehilite'],output_format="html5")
page_html = md.convert(self._content) # need to trigger the conversion to obtain md.Meta
# We assume that the page is always in a dir one level below www
themepath = "../themes/" + os.path.split(pthemedir)[1] + '/'
commonpath = "../common/"
# HERE I'll pass the config variable to the mako template, so I can use the title etc.
#buf = StringIO()
#ctx = Context(buf, dict(pageContent=page_html, isFrontPage=False, themePath=themepath, pageTitle='pedo',
# commonPath=commonpath))
#makotemplate.render_context(ctx)
#rendition = buf.getvalue()
# IS THERE `PIWIK CODE?
# IS THERE DISQUS CODE?
# READ from s2 if there's disqus_code.html.tpl and piwik_code.html.tpl
# if there's piwik, just define the variable piwik_code with its contents
# if there's disqus... nested render?
# HERE I NEED TO DIRECTLY INCLUDE A TEMPLATE IN ANOTHER TEMPLATE!!! MAKO!
#d_sn = self.site.site_config['disqus_shortname']
#if d_sn: # the site uses disqus
piwik_code = None
disqus_code, disqus_shortname, disqus_identifier, disqus_title, disqus_url= None, None, None, None, None
piwik_code_tpl = os.path.join(self.site.dirs['s2'],'piwik_code.html.tpl')
if os.path.isfile(piwik_code_tpl):
piwik_code = '/piwik_code.html.tpl'
disqus_code_tpl = os.path.join(self.site.dirs['s2'],'disqus_code.html.tpl')
if os.path.isfile(disqus_code_tpl):
disqus_code = '/disqus_code.html.tpl'
disqus_shortname = self.site.site_config['disqus_shortname']
disqus_identifier = self._config['page_id'][0]
disqus_title = self.title
disqus_url = os.path.join(self.site.site_config['site_url'],self._slug)
rendition = makotemplate.render(pageContent=page_html,isFrontPage=False,
themePath=themepath,
commonPath=commonpath,
pageTitle=self.title,
piwik_code=piwik_code,
disqus_code=disqus_code,
disqus_shortname = disqus_shortname,
disqus_identifier = disqus_identifier,
disqus_url = disqus_url,
disqus_title= disqus_title)
return rendition | [
"def",
"render",
"(",
"self",
")",
":",
"(",
"pthemedir",
",",
"ptemplatefname",
")",
"=",
"self",
".",
"_theme_and_template_fp",
"(",
")",
"mylookup",
"=",
"TemplateLookup",
"(",
"directories",
"=",
"[",
"self",
".",
"site",
".",
"dirs",
"[",
"'s2'",
"]",
",",
"pthemedir",
"]",
",",
"input_encoding",
"=",
"'utf-8'",
",",
"output_encoding",
"=",
"'utf-8'",
")",
"makotemplate",
"=",
"Template",
"(",
"filename",
"=",
"ptemplatefname",
",",
"lookup",
"=",
"mylookup",
",",
"module_directory",
"=",
"self",
".",
"site",
".",
"_makodir",
")",
"# I don't really need to use the meta extension here, because I render self._content (has no metadata)",
"#page_html = markdown.markdown(self._content)",
"md",
"=",
"markdown",
".",
"Markdown",
"(",
"extensions",
"=",
"[",
"'meta'",
",",
"'fenced_code'",
",",
"'codehilite'",
"]",
",",
"output_format",
"=",
"\"html5\"",
")",
"page_html",
"=",
"md",
".",
"convert",
"(",
"self",
".",
"_content",
")",
"# need to trigger the conversion to obtain md.Meta",
"# We assume that the page is always in a dir one level below www",
"themepath",
"=",
"\"../themes/\"",
"+",
"os",
".",
"path",
".",
"split",
"(",
"pthemedir",
")",
"[",
"1",
"]",
"+",
"'/'",
"commonpath",
"=",
"\"../common/\"",
"# HERE I'll pass the config variable to the mako template, so I can use the title etc.",
"#buf = StringIO()",
"#ctx = Context(buf, dict(pageContent=page_html, isFrontPage=False, themePath=themepath, pageTitle='pedo',",
"# commonPath=commonpath))",
"#makotemplate.render_context(ctx)",
"#rendition = buf.getvalue()",
"# IS THERE `PIWIK CODE?",
"# IS THERE DISQUS CODE?",
"# READ from s2 if there's disqus_code.html.tpl and piwik_code.html.tpl",
"# if there's piwik, just define the variable piwik_code with its contents",
"# if there's disqus... nested render?",
"# HERE I NEED TO DIRECTLY INCLUDE A TEMPLATE IN ANOTHER TEMPLATE!!! MAKO!",
"#d_sn = self.site.site_config['disqus_shortname']",
"#if d_sn: # the site uses disqus",
"piwik_code",
"=",
"None",
"disqus_code",
",",
"disqus_shortname",
",",
"disqus_identifier",
",",
"disqus_title",
",",
"disqus_url",
"=",
"None",
",",
"None",
",",
"None",
",",
"None",
",",
"None",
"piwik_code_tpl",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"site",
".",
"dirs",
"[",
"'s2'",
"]",
",",
"'piwik_code.html.tpl'",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"piwik_code_tpl",
")",
":",
"piwik_code",
"=",
"'/piwik_code.html.tpl'",
"disqus_code_tpl",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"site",
".",
"dirs",
"[",
"'s2'",
"]",
",",
"'disqus_code.html.tpl'",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"disqus_code_tpl",
")",
":",
"disqus_code",
"=",
"'/disqus_code.html.tpl'",
"disqus_shortname",
"=",
"self",
".",
"site",
".",
"site_config",
"[",
"'disqus_shortname'",
"]",
"disqus_identifier",
"=",
"self",
".",
"_config",
"[",
"'page_id'",
"]",
"[",
"0",
"]",
"disqus_title",
"=",
"self",
".",
"title",
"disqus_url",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"site",
".",
"site_config",
"[",
"'site_url'",
"]",
",",
"self",
".",
"_slug",
")",
"rendition",
"=",
"makotemplate",
".",
"render",
"(",
"pageContent",
"=",
"page_html",
",",
"isFrontPage",
"=",
"False",
",",
"themePath",
"=",
"themepath",
",",
"commonPath",
"=",
"commonpath",
",",
"pageTitle",
"=",
"self",
".",
"title",
",",
"piwik_code",
"=",
"piwik_code",
",",
"disqus_code",
"=",
"disqus_code",
",",
"disqus_shortname",
"=",
"disqus_shortname",
",",
"disqus_identifier",
"=",
"disqus_identifier",
",",
"disqus_url",
"=",
"disqus_url",
",",
"disqus_title",
"=",
"disqus_title",
")",
"return",
"rendition"
] | 49.753623 | 0.010568 | [
"def render(self):\n",
" \"\"\"Render this page and return the rendition.\n",
"\n",
" Converts the markdown content to html, and then renders the\n",
" (mako) template specified in the config, using that html.\n",
"\n",
" The task of writing of the rendition to a real file is\n",
" responsibility of the generate method.\n",
"\n",
" \"\"\"\n",
" (pthemedir, ptemplatefname) = self._theme_and_template_fp()\n",
"\n",
" mylookup = TemplateLookup(directories=[self.site.dirs['s2'], pthemedir], input_encoding='utf-8', output_encoding='utf-8')\n",
"\n",
"\n",
" makotemplate = Template(filename=ptemplatefname, lookup=mylookup,\n",
" module_directory=self.site._makodir)\n",
"\n",
" # I don't really need to use the meta extension here, because I render self._content (has no metadata)\n",
" #page_html = markdown.markdown(self._content)\n",
"\n",
" md = markdown.Markdown(extensions=['meta','fenced_code', 'codehilite'],output_format=\"html5\")\n",
" page_html = md.convert(self._content) # need to trigger the conversion to obtain md.Meta\n",
"\n",
" # We assume that the page is always in a dir one level below www\n",
" themepath = \"../themes/\" + os.path.split(pthemedir)[1] + '/'\n",
" commonpath = \"../common/\"\n",
"\n",
" # HERE I'll pass the config variable to the mako template, so I can use the title etc.\n",
" #buf = StringIO()\n",
" #ctx = Context(buf, dict(pageContent=page_html, isFrontPage=False, themePath=themepath, pageTitle='pedo',\n",
" # commonPath=commonpath))\n",
" #makotemplate.render_context(ctx)\n",
" #rendition = buf.getvalue()\n",
"\n",
" # IS THERE `PIWIK CODE?\n",
" # IS THERE DISQUS CODE?\n",
" # READ from s2 if there's disqus_code.html.tpl and piwik_code.html.tpl\n",
" # if there's piwik, just define the variable piwik_code with its contents\n",
" # if there's disqus... nested render?\n",
" # HERE I NEED TO DIRECTLY INCLUDE A TEMPLATE IN ANOTHER TEMPLATE!!! MAKO!\n",
" #d_sn = self.site.site_config['disqus_shortname']\n",
" #if d_sn: # the site uses disqus\n",
" piwik_code = None\n",
" disqus_code, disqus_shortname, disqus_identifier, disqus_title, disqus_url= None, None, None, None, None\n",
"\n",
" piwik_code_tpl = os.path.join(self.site.dirs['s2'],'piwik_code.html.tpl')\n",
" if os.path.isfile(piwik_code_tpl):\n",
" piwik_code = '/piwik_code.html.tpl'\n",
"\n",
" disqus_code_tpl = os.path.join(self.site.dirs['s2'],'disqus_code.html.tpl')\n",
" if os.path.isfile(disqus_code_tpl):\n",
" disqus_code = '/disqus_code.html.tpl'\n",
" disqus_shortname = self.site.site_config['disqus_shortname']\n",
" disqus_identifier = self._config['page_id'][0]\n",
" disqus_title = self.title\n",
" disqus_url = os.path.join(self.site.site_config['site_url'],self._slug)\n",
"\n",
" rendition = makotemplate.render(pageContent=page_html,isFrontPage=False,\n",
" themePath=themepath,\n",
" commonPath=commonpath,\n",
" pageTitle=self.title,\n",
" piwik_code=piwik_code,\n",
" disqus_code=disqus_code,\n",
" disqus_shortname = disqus_shortname,\n",
" disqus_identifier = disqus_identifier,\n",
" disqus_url = disqus_url,\n",
" disqus_title= disqus_title)\n",
" return rendition"
] | [
0,
0.018518518518518517,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.007692307692307693,
0,
0,
0.013513513513513514,
0,
0,
0.009009009009009009,
0.018518518518518517,
0,
0.029411764705882353,
0.010101010101010102,
0,
0,
0,
0,
0,
0.010526315789473684,
0.038461538461538464,
0.017543859649122806,
0,
0.023809523809523808,
0.027777777777777776,
0,
0,
0,
0,
0.012195121951219513,
0,
0.012195121951219513,
0.017241379310344827,
0.023255813953488372,
0,
0.017699115044247787,
0,
0.024390243902439025,
0,
0,
0,
0.023809523809523808,
0,
0,
0,
0,
0,
0.023809523809523808,
0,
0.024691358024691357,
0,
0,
0,
0,
0,
0.025974025974025976,
0.02531645569620253,
0.03076923076923077,
0.014705882352941176,
0.041666666666666664
] | 69 | 0.007864 |
def voronoi_neighbors_from_pixels_and_ridge_points(pixels, ridge_points):
"""Compute the neighbors of every pixel as a list of the pixel index's each pixel shares a vertex with.
The ridge points of the Voronoi grid are used to derive this.
Parameters
----------
ridge_points : scipy.spatial.Voronoi.ridge_points
Each Voronoi-ridge (two indexes representing a pixel mapping_matrix).
"""
pixel_neighbors_size = np.zeros(shape=(pixels))
for ridge_index in range(ridge_points.shape[0]):
pair0 = ridge_points[ridge_index, 0]
pair1 = ridge_points[ridge_index, 1]
pixel_neighbors_size[pair0] += 1
pixel_neighbors_size[pair1] += 1
pixel_neighbors_index = np.zeros(shape=(pixels))
pixel_neighbors = -1 * np.ones(shape=(pixels, int(np.max(pixel_neighbors_size))))
for ridge_index in range(ridge_points.shape[0]):
pair0 = ridge_points[ridge_index, 0]
pair1 = ridge_points[ridge_index, 1]
pixel_neighbors[pair0, int(pixel_neighbors_index[pair0])] = pair1
pixel_neighbors[pair1, int(pixel_neighbors_index[pair1])] = pair0
pixel_neighbors_index[pair0] += 1
pixel_neighbors_index[pair1] += 1
return pixel_neighbors, pixel_neighbors_size | [
"def",
"voronoi_neighbors_from_pixels_and_ridge_points",
"(",
"pixels",
",",
"ridge_points",
")",
":",
"pixel_neighbors_size",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"(",
"pixels",
")",
")",
"for",
"ridge_index",
"in",
"range",
"(",
"ridge_points",
".",
"shape",
"[",
"0",
"]",
")",
":",
"pair0",
"=",
"ridge_points",
"[",
"ridge_index",
",",
"0",
"]",
"pair1",
"=",
"ridge_points",
"[",
"ridge_index",
",",
"1",
"]",
"pixel_neighbors_size",
"[",
"pair0",
"]",
"+=",
"1",
"pixel_neighbors_size",
"[",
"pair1",
"]",
"+=",
"1",
"pixel_neighbors_index",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"(",
"pixels",
")",
")",
"pixel_neighbors",
"=",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"shape",
"=",
"(",
"pixels",
",",
"int",
"(",
"np",
".",
"max",
"(",
"pixel_neighbors_size",
")",
")",
")",
")",
"for",
"ridge_index",
"in",
"range",
"(",
"ridge_points",
".",
"shape",
"[",
"0",
"]",
")",
":",
"pair0",
"=",
"ridge_points",
"[",
"ridge_index",
",",
"0",
"]",
"pair1",
"=",
"ridge_points",
"[",
"ridge_index",
",",
"1",
"]",
"pixel_neighbors",
"[",
"pair0",
",",
"int",
"(",
"pixel_neighbors_index",
"[",
"pair0",
"]",
")",
"]",
"=",
"pair1",
"pixel_neighbors",
"[",
"pair1",
",",
"int",
"(",
"pixel_neighbors_index",
"[",
"pair1",
"]",
")",
"]",
"=",
"pair0",
"pixel_neighbors_index",
"[",
"pair0",
"]",
"+=",
"1",
"pixel_neighbors_index",
"[",
"pair1",
"]",
"+=",
"1",
"return",
"pixel_neighbors",
",",
"pixel_neighbors_size"
] | 39.806452 | 0.002373 | [
"def voronoi_neighbors_from_pixels_and_ridge_points(pixels, ridge_points):\n",
" \"\"\"Compute the neighbors of every pixel as a list of the pixel index's each pixel shares a vertex with.\n",
"\n",
" The ridge points of the Voronoi grid are used to derive this.\n",
"\n",
" Parameters\n",
" ----------\n",
" ridge_points : scipy.spatial.Voronoi.ridge_points\n",
" Each Voronoi-ridge (two indexes representing a pixel mapping_matrix).\n",
" \"\"\"\n",
"\n",
" pixel_neighbors_size = np.zeros(shape=(pixels))\n",
"\n",
" for ridge_index in range(ridge_points.shape[0]):\n",
" pair0 = ridge_points[ridge_index, 0]\n",
" pair1 = ridge_points[ridge_index, 1]\n",
" pixel_neighbors_size[pair0] += 1\n",
" pixel_neighbors_size[pair1] += 1\n",
"\n",
" pixel_neighbors_index = np.zeros(shape=(pixels))\n",
" pixel_neighbors = -1 * np.ones(shape=(pixels, int(np.max(pixel_neighbors_size))))\n",
"\n",
" for ridge_index in range(ridge_points.shape[0]):\n",
" pair0 = ridge_points[ridge_index, 0]\n",
" pair1 = ridge_points[ridge_index, 1]\n",
" pixel_neighbors[pair0, int(pixel_neighbors_index[pair0])] = pair1\n",
" pixel_neighbors[pair1, int(pixel_neighbors_index[pair1])] = pair0\n",
" pixel_neighbors_index[pair0] += 1\n",
" pixel_neighbors_index[pair1] += 1\n",
"\n",
" return pixel_neighbors, pixel_neighbors_size"
] | [
0,
0.009259259259259259,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011627906976744186,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.020833333333333332
] | 31 | 0.001346 |
def format_hyperlink( val, hlx, hxl, xhl ):
"""
Formats an html hyperlink into other forms.
@hlx, hxl, xhl: values returned by set_output_format
"""
if '<a href="' in str(val) and hlx != '<a href="':
val = val.replace('<a href="', hlx).replace('">', hxl, 1).replace('</a>', xhl)
return val | [
"def",
"format_hyperlink",
"(",
"val",
",",
"hlx",
",",
"hxl",
",",
"xhl",
")",
":",
"if",
"'<a href=\"'",
"in",
"str",
"(",
"val",
")",
"and",
"hlx",
"!=",
"'<a href=\"'",
":",
"val",
"=",
"val",
".",
"replace",
"(",
"'<a href=\"'",
",",
"hlx",
")",
".",
"replace",
"(",
"'\">'",
",",
"hxl",
",",
"1",
")",
".",
"replace",
"(",
"'</a>'",
",",
"xhl",
")",
"return",
"val"
] | 31.5 | 0.015432 | [
"def format_hyperlink( val, hlx, hxl, xhl ):\n",
" \"\"\"\n",
" Formats an html hyperlink into other forms.\n",
"\n",
" @hlx, hxl, xhl: values returned by set_output_format\n",
" \"\"\"\n",
" if '<a href=\"' in str(val) and hlx != '<a href=\"':\n",
" val = val.replace('<a href=\"', hlx).replace('\">', hxl, 1).replace('</a>', xhl) \n",
"\n",
" return val"
] | [
0.045454545454545456,
0,
0,
0,
0,
0,
0,
0.022727272727272728,
0,
0.07142857142857142
] | 10 | 0.013961 |
def fit_apply(fit_result,vec_array):
'''fit_apply(fir_result,vec_array) -> vec_array
Applies a fit result to an array of vectors
'''
return map( lambda x,t1=fit_result[0],mt2=negate(fit_result[1]),
m=fit_result[2]: add(t1,transform(m,add(mt2,x))),vec_array) | [
"def",
"fit_apply",
"(",
"fit_result",
",",
"vec_array",
")",
":",
"return",
"map",
"(",
"lambda",
"x",
",",
"t1",
"=",
"fit_result",
"[",
"0",
"]",
",",
"mt2",
"=",
"negate",
"(",
"fit_result",
"[",
"1",
"]",
")",
",",
"m",
"=",
"fit_result",
"[",
"2",
"]",
":",
"add",
"(",
"t1",
",",
"transform",
"(",
"m",
",",
"add",
"(",
"mt2",
",",
"x",
")",
")",
")",
",",
"vec_array",
")"
] | 35 | 0.038328 | [
"def fit_apply(fit_result,vec_array):\n",
" '''fit_apply(fir_result,vec_array) -> vec_array\n",
" \n",
" Applies a fit result to an array of vectors\n",
" '''\n",
"\n",
" return map( lambda x,t1=fit_result[0],mt2=negate(fit_result[1]),\n",
" m=fit_result[2]: add(t1,transform(m,add(mt2,x))),vec_array)"
] | [
0.02702702702702703,
0,
0.2,
0,
0,
0,
0.043478260869565216,
0.08955223880597014
] | 8 | 0.045007 |
def visualRect(self, index):
"""The rectangle for the bounds of the item at *index*. :qtdoc:`Re-implemented<QAbstractItemView.visualRect>`
:param index: index for the rect you want
:type index: :qtdoc:`QModelIndex`
:returns: :qtdoc:`QRect` -- rectangle of the borders of the item
"""
if len(self._rects[index.row()]) -1 < index.column() or index.row() == -1:
#Er, so I don't know why this was getting called with index -1
return QtCore.QRect()
return self.visualRectRC(index.row(),index.column()) | [
"def",
"visualRect",
"(",
"self",
",",
"index",
")",
":",
"if",
"len",
"(",
"self",
".",
"_rects",
"[",
"index",
".",
"row",
"(",
")",
"]",
")",
"-",
"1",
"<",
"index",
".",
"column",
"(",
")",
"or",
"index",
".",
"row",
"(",
")",
"==",
"-",
"1",
":",
"#Er, so I don't know why this was getting called with index -1",
"return",
"QtCore",
".",
"QRect",
"(",
")",
"return",
"self",
".",
"visualRectRC",
"(",
"index",
".",
"row",
"(",
")",
",",
"index",
".",
"column",
"(",
")",
")"
] | 47.583333 | 0.013746 | [
"def visualRect(self, index):\n",
" \"\"\"The rectangle for the bounds of the item at *index*. :qtdoc:`Re-implemented<QAbstractItemView.visualRect>`\n",
"\n",
" :param index: index for the rect you want\n",
" :type index: :qtdoc:`QModelIndex`\n",
" :returns: :qtdoc:`QRect` -- rectangle of the borders of the item\n",
" \"\"\"\n",
" if len(self._rects[index.row()]) -1 < index.column() or index.row() == -1:\n",
" #Er, so I don't know why this was getting called with index -1\n",
" return QtCore.QRect()\n",
" \n",
" return self.visualRectRC(index.row(),index.column())"
] | [
0,
0.01694915254237288,
0,
0,
0,
0,
0,
0.024096385542168676,
0.013333333333333334,
0,
0.2,
0.03333333333333333
] | 12 | 0.023976 |
def graph_impl(self, run, tag, is_conceptual, limit_attr_size=None, large_attrs_key=None):
"""Result of the form `(body, mime_type)`, or `None` if no graph exists."""
if is_conceptual:
tensor_events = self._multiplexer.Tensors(run, tag)
# Take the first event if there are multiple events written from different
# steps.
keras_model_config = json.loads(tensor_events[0].tensor_proto.string_val[0])
graph = keras_util.keras_model_to_graph_def(keras_model_config)
elif tag:
tensor_events = self._multiplexer.Tensors(run, tag)
# Take the first event if there are multiple events written from different
# steps.
run_metadata = config_pb2.RunMetadata.FromString(
tensor_events[0].tensor_proto.string_val[0])
graph = graph_pb2.GraphDef()
for func_graph in run_metadata.function_graphs:
graph_util.combine_graph_defs(graph, func_graph.pre_optimization_graph)
else:
graph = self._multiplexer.Graph(run)
# This next line might raise a ValueError if the limit parameters
# are invalid (size is negative, size present but key absent, etc.).
process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key)
return (str(graph), 'text/x-protobuf') | [
"def",
"graph_impl",
"(",
"self",
",",
"run",
",",
"tag",
",",
"is_conceptual",
",",
"limit_attr_size",
"=",
"None",
",",
"large_attrs_key",
"=",
"None",
")",
":",
"if",
"is_conceptual",
":",
"tensor_events",
"=",
"self",
".",
"_multiplexer",
".",
"Tensors",
"(",
"run",
",",
"tag",
")",
"# Take the first event if there are multiple events written from different",
"# steps.",
"keras_model_config",
"=",
"json",
".",
"loads",
"(",
"tensor_events",
"[",
"0",
"]",
".",
"tensor_proto",
".",
"string_val",
"[",
"0",
"]",
")",
"graph",
"=",
"keras_util",
".",
"keras_model_to_graph_def",
"(",
"keras_model_config",
")",
"elif",
"tag",
":",
"tensor_events",
"=",
"self",
".",
"_multiplexer",
".",
"Tensors",
"(",
"run",
",",
"tag",
")",
"# Take the first event if there are multiple events written from different",
"# steps.",
"run_metadata",
"=",
"config_pb2",
".",
"RunMetadata",
".",
"FromString",
"(",
"tensor_events",
"[",
"0",
"]",
".",
"tensor_proto",
".",
"string_val",
"[",
"0",
"]",
")",
"graph",
"=",
"graph_pb2",
".",
"GraphDef",
"(",
")",
"for",
"func_graph",
"in",
"run_metadata",
".",
"function_graphs",
":",
"graph_util",
".",
"combine_graph_defs",
"(",
"graph",
",",
"func_graph",
".",
"pre_optimization_graph",
")",
"else",
":",
"graph",
"=",
"self",
".",
"_multiplexer",
".",
"Graph",
"(",
"run",
")",
"# This next line might raise a ValueError if the limit parameters",
"# are invalid (size is negative, size present but key absent, etc.).",
"process_graph",
".",
"prepare_graph_for_ui",
"(",
"graph",
",",
"limit_attr_size",
",",
"large_attrs_key",
")",
"return",
"(",
"str",
"(",
"graph",
")",
",",
"'text/x-protobuf'",
")"
] | 49.76 | 0.013407 | [
"def graph_impl(self, run, tag, is_conceptual, limit_attr_size=None, large_attrs_key=None):\n",
" \"\"\"Result of the form `(body, mime_type)`, or `None` if no graph exists.\"\"\"\n",
" if is_conceptual:\n",
" tensor_events = self._multiplexer.Tensors(run, tag)\n",
" # Take the first event if there are multiple events written from different\n",
" # steps.\n",
" keras_model_config = json.loads(tensor_events[0].tensor_proto.string_val[0])\n",
" graph = keras_util.keras_model_to_graph_def(keras_model_config)\n",
" elif tag:\n",
" tensor_events = self._multiplexer.Tensors(run, tag)\n",
" # Take the first event if there are multiple events written from different\n",
" # steps.\n",
" run_metadata = config_pb2.RunMetadata.FromString(\n",
" tensor_events[0].tensor_proto.string_val[0])\n",
" graph = graph_pb2.GraphDef()\n",
"\n",
" for func_graph in run_metadata.function_graphs:\n",
" graph_util.combine_graph_defs(graph, func_graph.pre_optimization_graph)\n",
" else:\n",
" graph = self._multiplexer.Graph(run)\n",
"\n",
" # This next line might raise a ValueError if the limit parameters\n",
" # are invalid (size is negative, size present but key absent, etc.).\n",
" process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key)\n",
" return (str(graph), 'text/x-protobuf')"
] | [
0.01098901098901099,
0,
0,
0.017241379310344827,
0.024691358024691357,
0.06666666666666667,
0.024096385542168676,
0.014285714285714285,
0,
0.017241379310344827,
0.024691358024691357,
0.06666666666666667,
0.017857142857142856,
0,
0.02857142857142857,
0,
0.018518518518518517,
0,
0,
0.023255813953488372,
0,
0,
0,
0,
0.023809523809523808
] | 25 | 0.015143 |
def readin_rho(filename, rhofile=True, aniso=False):
"""Read in the values of the resistivity in Ohmm.
The format is variable: rho-file or mag-file.
"""
if aniso:
a = [[0, 1, 2], [2, 3, 4]]
else:
a = [0, 2]
if rhofile:
if filename is None:
filename = 'rho/rho.dat'
with open(filename, 'r') as fid:
mag = np.loadtxt(fid, skiprows=1, usecols=(a[0]))
else:
if filename is None:
filename = read_iter()
with open(filename, 'r') as fid:
mag = np.power(10, np.loadtxt(fid, skiprows=1, usecols=(a[1])))
return mag | [
"def",
"readin_rho",
"(",
"filename",
",",
"rhofile",
"=",
"True",
",",
"aniso",
"=",
"False",
")",
":",
"if",
"aniso",
":",
"a",
"=",
"[",
"[",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"2",
",",
"3",
",",
"4",
"]",
"]",
"else",
":",
"a",
"=",
"[",
"0",
",",
"2",
"]",
"if",
"rhofile",
":",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"'rho/rho.dat'",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"fid",
":",
"mag",
"=",
"np",
".",
"loadtxt",
"(",
"fid",
",",
"skiprows",
"=",
"1",
",",
"usecols",
"=",
"(",
"a",
"[",
"0",
"]",
")",
")",
"else",
":",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"read_iter",
"(",
")",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"fid",
":",
"mag",
"=",
"np",
".",
"power",
"(",
"10",
",",
"np",
".",
"loadtxt",
"(",
"fid",
",",
"skiprows",
"=",
"1",
",",
"usecols",
"=",
"(",
"a",
"[",
"1",
"]",
")",
")",
")",
"return",
"mag"
] | 29.285714 | 0.001575 | [
"def readin_rho(filename, rhofile=True, aniso=False):\n",
" \"\"\"Read in the values of the resistivity in Ohmm.\n",
" The format is variable: rho-file or mag-file.\n",
" \"\"\"\n",
" if aniso:\n",
" a = [[0, 1, 2], [2, 3, 4]]\n",
" else:\n",
" a = [0, 2]\n",
" if rhofile:\n",
" if filename is None:\n",
" filename = 'rho/rho.dat'\n",
" with open(filename, 'r') as fid:\n",
" mag = np.loadtxt(fid, skiprows=1, usecols=(a[0]))\n",
"\n",
" else:\n",
" if filename is None:\n",
" filename = read_iter()\n",
" with open(filename, 'r') as fid:\n",
" mag = np.power(10, np.loadtxt(fid, skiprows=1, usecols=(a[1])))\n",
"\n",
" return mag"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07142857142857142
] | 21 | 0.003401 |
def plot_world(world, **kwargs):
"""
Addes a heat-map representing the data in world (an EnvironmentFile object)
to the current plot.
kwargs:
palette - a seaborn palette (list of RGB values) indicating
how to color values. Will be converted to a continuous
colormap if necessary
denom - the maximum value of numbers in the grid (only used
if the grid actually contains numbers). This is used
to normalize values and use the full dynamic range of
the color pallete.
"""
denom, palette = get_kwargs(world, kwargs, False)
world = color_grid(world, palette, denom, True)
plt.tick_params(labelbottom="off", labeltop="off", labelleft="off",
labelright="off", bottom="off", top="off",
left="off", right="off")
# plt.tight_layout()
plt.imshow(world, interpolation="none", zorder=1)
axes = plt.gca()
axes.autoscale(False) | [
"def",
"plot_world",
"(",
"world",
",",
"*",
"*",
"kwargs",
")",
":",
"denom",
",",
"palette",
"=",
"get_kwargs",
"(",
"world",
",",
"kwargs",
",",
"False",
")",
"world",
"=",
"color_grid",
"(",
"world",
",",
"palette",
",",
"denom",
",",
"True",
")",
"plt",
".",
"tick_params",
"(",
"labelbottom",
"=",
"\"off\"",
",",
"labeltop",
"=",
"\"off\"",
",",
"labelleft",
"=",
"\"off\"",
",",
"labelright",
"=",
"\"off\"",
",",
"bottom",
"=",
"\"off\"",
",",
"top",
"=",
"\"off\"",
",",
"left",
"=",
"\"off\"",
",",
"right",
"=",
"\"off\"",
")",
"# plt.tight_layout()",
"plt",
".",
"imshow",
"(",
"world",
",",
"interpolation",
"=",
"\"none\"",
",",
"zorder",
"=",
"1",
")",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"axes",
".",
"autoscale",
"(",
"False",
")"
] | 45.043478 | 0.000945 | [
"def plot_world(world, **kwargs):\n",
" \"\"\"\n",
" Addes a heat-map representing the data in world (an EnvironmentFile object)\n",
" to the current plot.\n",
"\n",
" kwargs:\n",
" palette - a seaborn palette (list of RGB values) indicating\n",
" how to color values. Will be converted to a continuous\n",
" colormap if necessary\n",
" denom - the maximum value of numbers in the grid (only used\n",
" if the grid actually contains numbers). This is used\n",
" to normalize values and use the full dynamic range of\n",
" the color pallete.\n",
" \"\"\"\n",
" denom, palette = get_kwargs(world, kwargs, False)\n",
" world = color_grid(world, palette, denom, True)\n",
" plt.tick_params(labelbottom=\"off\", labeltop=\"off\", labelleft=\"off\",\n",
" labelright=\"off\", bottom=\"off\", top=\"off\",\n",
" left=\"off\", right=\"off\")\n",
" # plt.tight_layout()\n",
" plt.imshow(world, interpolation=\"none\", zorder=1)\n",
" axes = plt.gca()\n",
" axes.autoscale(False)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.04
] | 23 | 0.001739 |
def list_chunks(l, n):
"""
Return a list of chunks
:param l: List
:param n: int The number of items per chunk
:return: List
"""
if n < 1:
n = 1
return [l[i:i + n] for i in range(0, len(l), n)] | [
"def",
"list_chunks",
"(",
"l",
",",
"n",
")",
":",
"if",
"n",
"<",
"1",
":",
"n",
"=",
"1",
"return",
"[",
"l",
"[",
"i",
":",
"i",
"+",
"n",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"l",
")",
",",
"n",
")",
"]"
] | 22.3 | 0.008621 | [
"def list_chunks(l, n):\n",
" \"\"\"\n",
" Return a list of chunks\n",
" :param l: List\n",
" :param n: int The number of items per chunk\n",
" :return: List\n",
" \"\"\"\n",
" if n < 1:\n",
" n = 1\n",
" return [l[i:i + n] for i in range(0, len(l), n)]"
] | [
0.043478260869565216,
0,
0,
0,
0,
0,
0,
0,
0,
0.019230769230769232
] | 10 | 0.006271 |
def update(self, a, b, c, d):
"""
Update contingency table with new values without creating a new object.
"""
self.table.ravel()[:] = [a, b, c, d]
self.N = self.table.sum() | [
"def",
"update",
"(",
"self",
",",
"a",
",",
"b",
",",
"c",
",",
"d",
")",
":",
"self",
".",
"table",
".",
"ravel",
"(",
")",
"[",
":",
"]",
"=",
"[",
"a",
",",
"b",
",",
"c",
",",
"d",
"]",
"self",
".",
"N",
"=",
"self",
".",
"table",
".",
"sum",
"(",
")"
] | 34.5 | 0.009434 | [
"def update(self, a, b, c, d):\n",
" \"\"\"\n",
" Update contingency table with new values without creating a new object.\n",
" \"\"\"\n",
" self.table.ravel()[:] = [a, b, c, d]\n",
" self.N = self.table.sum()"
] | [
0,
0.08333333333333333,
0,
0,
0,
0.030303030303030304
] | 6 | 0.018939 |
def check_auth(self, all_credentials):
"""Update this socket's authentication.
Log in or out to bring this socket's credentials up to date with
those provided. Can raise ConnectionFailure or OperationFailure.
:Parameters:
- `all_credentials`: dict, maps auth source to MongoCredential.
"""
if all_credentials or self.authset:
cached = set(itervalues(all_credentials))
authset = self.authset.copy()
# Logout any credentials that no longer exist in the cache.
for credentials in authset - cached:
auth.logout(credentials.source, self)
self.authset.discard(credentials)
for credentials in cached - authset:
auth.authenticate(credentials, self)
self.authset.add(credentials) | [
"def",
"check_auth",
"(",
"self",
",",
"all_credentials",
")",
":",
"if",
"all_credentials",
"or",
"self",
".",
"authset",
":",
"cached",
"=",
"set",
"(",
"itervalues",
"(",
"all_credentials",
")",
")",
"authset",
"=",
"self",
".",
"authset",
".",
"copy",
"(",
")",
"# Logout any credentials that no longer exist in the cache.",
"for",
"credentials",
"in",
"authset",
"-",
"cached",
":",
"auth",
".",
"logout",
"(",
"credentials",
".",
"source",
",",
"self",
")",
"self",
".",
"authset",
".",
"discard",
"(",
"credentials",
")",
"for",
"credentials",
"in",
"cached",
"-",
"authset",
":",
"auth",
".",
"authenticate",
"(",
"credentials",
",",
"self",
")",
"self",
".",
"authset",
".",
"add",
"(",
"credentials",
")"
] | 39.809524 | 0.002336 | [
"def check_auth(self, all_credentials):\n",
" \"\"\"Update this socket's authentication.\n",
"\n",
" Log in or out to bring this socket's credentials up to date with\n",
" those provided. Can raise ConnectionFailure or OperationFailure.\n",
"\n",
" :Parameters:\n",
" - `all_credentials`: dict, maps auth source to MongoCredential.\n",
" \"\"\"\n",
" if all_credentials or self.authset:\n",
" cached = set(itervalues(all_credentials))\n",
" authset = self.authset.copy()\n",
"\n",
" # Logout any credentials that no longer exist in the cache.\n",
" for credentials in authset - cached:\n",
" auth.logout(credentials.source, self)\n",
" self.authset.discard(credentials)\n",
"\n",
" for credentials in cached - authset:\n",
" auth.authenticate(credentials, self)\n",
" self.authset.add(credentials)"
] | [
0,
0.020833333333333332,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.022222222222222223
] | 21 | 0.00205 |
def from_semiaxes(cls,axes):
"""
Get axis-aligned elliptical conic from axis lenths
This can be converted into a hyperbola by getting the dual conic
"""
ax = list(1/N.array(axes)**2)
#ax[-1] *= -1 # Not sure what is going on here...
arr = N.diag(ax + [-1])
return arr.view(cls) | [
"def",
"from_semiaxes",
"(",
"cls",
",",
"axes",
")",
":",
"ax",
"=",
"list",
"(",
"1",
"/",
"N",
".",
"array",
"(",
"axes",
")",
"**",
"2",
")",
"#ax[-1] *= -1 # Not sure what is going on here...",
"arr",
"=",
"N",
".",
"diag",
"(",
"ax",
"+",
"[",
"-",
"1",
"]",
")",
"return",
"arr",
".",
"view",
"(",
"cls",
")"
] | 37.111111 | 0.011696 | [
"def from_semiaxes(cls,axes):\n",
" \"\"\"\n",
" Get axis-aligned elliptical conic from axis lenths\n",
" This can be converted into a hyperbola by getting the dual conic\n",
" \"\"\"\n",
" ax = list(1/N.array(axes)**2)\n",
" #ax[-1] *= -1 # Not sure what is going on here...\n",
" arr = N.diag(ax + [-1])\n",
" return arr.view(cls)"
] | [
0.034482758620689655,
0.08333333333333333,
0,
0,
0,
0,
0.01694915254237288,
0,
0.03571428571428571
] | 9 | 0.018942 |
def validate(ref_time, ref_freqs, est_time, est_freqs):
"""Checks that the time and frequency inputs are well-formed.
Parameters
----------
ref_time : np.ndarray
reference time stamps in seconds
ref_freqs : list of np.ndarray
reference frequencies in Hz
est_time : np.ndarray
estimate time stamps in seconds
est_freqs : list of np.ndarray
estimated frequencies in Hz
"""
util.validate_events(ref_time, max_time=MAX_TIME)
util.validate_events(est_time, max_time=MAX_TIME)
if ref_time.size == 0:
warnings.warn("Reference times are empty.")
if ref_time.ndim != 1:
raise ValueError("Reference times have invalid dimension")
if len(ref_freqs) == 0:
warnings.warn("Reference frequencies are empty.")
if est_time.size == 0:
warnings.warn("Estimated times are empty.")
if est_time.ndim != 1:
raise ValueError("Estimated times have invalid dimension")
if len(est_freqs) == 0:
warnings.warn("Estimated frequencies are empty.")
if ref_time.size != len(ref_freqs):
raise ValueError('Reference times and frequencies have unequal '
'lengths.')
if est_time.size != len(est_freqs):
raise ValueError('Estimate times and frequencies have unequal '
'lengths.')
for freq in ref_freqs:
util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ,
allow_negatives=False)
for freq in est_freqs:
util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ,
allow_negatives=False) | [
"def",
"validate",
"(",
"ref_time",
",",
"ref_freqs",
",",
"est_time",
",",
"est_freqs",
")",
":",
"util",
".",
"validate_events",
"(",
"ref_time",
",",
"max_time",
"=",
"MAX_TIME",
")",
"util",
".",
"validate_events",
"(",
"est_time",
",",
"max_time",
"=",
"MAX_TIME",
")",
"if",
"ref_time",
".",
"size",
"==",
"0",
":",
"warnings",
".",
"warn",
"(",
"\"Reference times are empty.\"",
")",
"if",
"ref_time",
".",
"ndim",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Reference times have invalid dimension\"",
")",
"if",
"len",
"(",
"ref_freqs",
")",
"==",
"0",
":",
"warnings",
".",
"warn",
"(",
"\"Reference frequencies are empty.\"",
")",
"if",
"est_time",
".",
"size",
"==",
"0",
":",
"warnings",
".",
"warn",
"(",
"\"Estimated times are empty.\"",
")",
"if",
"est_time",
".",
"ndim",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Estimated times have invalid dimension\"",
")",
"if",
"len",
"(",
"est_freqs",
")",
"==",
"0",
":",
"warnings",
".",
"warn",
"(",
"\"Estimated frequencies are empty.\"",
")",
"if",
"ref_time",
".",
"size",
"!=",
"len",
"(",
"ref_freqs",
")",
":",
"raise",
"ValueError",
"(",
"'Reference times and frequencies have unequal '",
"'lengths.'",
")",
"if",
"est_time",
".",
"size",
"!=",
"len",
"(",
"est_freqs",
")",
":",
"raise",
"ValueError",
"(",
"'Estimate times and frequencies have unequal '",
"'lengths.'",
")",
"for",
"freq",
"in",
"ref_freqs",
":",
"util",
".",
"validate_frequencies",
"(",
"freq",
",",
"max_freq",
"=",
"MAX_FREQ",
",",
"min_freq",
"=",
"MIN_FREQ",
",",
"allow_negatives",
"=",
"False",
")",
"for",
"freq",
"in",
"est_freqs",
":",
"util",
".",
"validate_frequencies",
"(",
"freq",
",",
"max_freq",
"=",
"MAX_FREQ",
",",
"min_freq",
"=",
"MIN_FREQ",
",",
"allow_negatives",
"=",
"False",
")"
] | 36.555556 | 0.000592 | [
"def validate(ref_time, ref_freqs, est_time, est_freqs):\n",
" \"\"\"Checks that the time and frequency inputs are well-formed.\n",
"\n",
" Parameters\n",
" ----------\n",
" ref_time : np.ndarray\n",
" reference time stamps in seconds\n",
" ref_freqs : list of np.ndarray\n",
" reference frequencies in Hz\n",
" est_time : np.ndarray\n",
" estimate time stamps in seconds\n",
" est_freqs : list of np.ndarray\n",
" estimated frequencies in Hz\n",
"\n",
" \"\"\"\n",
"\n",
" util.validate_events(ref_time, max_time=MAX_TIME)\n",
" util.validate_events(est_time, max_time=MAX_TIME)\n",
"\n",
" if ref_time.size == 0:\n",
" warnings.warn(\"Reference times are empty.\")\n",
" if ref_time.ndim != 1:\n",
" raise ValueError(\"Reference times have invalid dimension\")\n",
" if len(ref_freqs) == 0:\n",
" warnings.warn(\"Reference frequencies are empty.\")\n",
" if est_time.size == 0:\n",
" warnings.warn(\"Estimated times are empty.\")\n",
" if est_time.ndim != 1:\n",
" raise ValueError(\"Estimated times have invalid dimension\")\n",
" if len(est_freqs) == 0:\n",
" warnings.warn(\"Estimated frequencies are empty.\")\n",
" if ref_time.size != len(ref_freqs):\n",
" raise ValueError('Reference times and frequencies have unequal '\n",
" 'lengths.')\n",
" if est_time.size != len(est_freqs):\n",
" raise ValueError('Estimate times and frequencies have unequal '\n",
" 'lengths.')\n",
"\n",
" for freq in ref_freqs:\n",
" util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ,\n",
" allow_negatives=False)\n",
"\n",
" for freq in est_freqs:\n",
" util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ,\n",
" allow_negatives=False)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.017857142857142856
] | 45 | 0.000397 |
def generate_value_processor(type_, collectionFormat=None, items=None, **kwargs):
"""
Create a callable that will take the string value of a header and cast it
to the appropriate type. This can involve:
- splitting a header of type 'array' by its delimeters.
- type casting the internal elements of the array.
"""
processors = []
if is_non_string_iterable(type_):
assert False, "This should not be possible"
else:
if type_ == ARRAY and collectionFormat:
if collectionFormat in DELIMETERS:
delimeter = DELIMETERS[collectionFormat]
# split the string based on the delimeter specified by the
# `collectionFormat`
processors.append(operator.methodcaller('split', delimeter))
else:
if collectionFormat != MULTI:
raise TypeError("collectionFormat not implemented")
processors.append(add_string_into_list)
# remove any Falsy values like empty strings.
processors.append(functools.partial(filter, bool))
# strip off any whitespace
processors.append(functools.partial(map, operator.methodcaller('strip')))
if items is not None:
if isinstance(items, collections.Mapping):
items_processors = itertools.repeat(
generate_value_processor(**items)
)
elif isinstance(items, collections.Sequence):
items_processors = itertools.chain(
(generate_value_processor(**item) for item in items),
itertools.repeat(lambda v: v),
)
elif isinstance(items, six.string_types):
raise NotImplementedError("Not implemented")
else:
assert False, "Should not be possible"
# 1. zip the processor and the array items together
# 2. apply the processor to each array item.
# 3. cast the starmap generator to a list.
processors.append(
chain_reduce_partial(
functools.partial(zip, items_processors),
functools.partial(itertools.starmap, lambda fn, v: fn(v)),
list,
)
)
else:
processors.append(
functools.partial(cast_value_to_type, type_=type_)
)
def processor(value, **kwargs):
try:
return chain_reduce_partial(*processors)(value)
except (ValueError, TypeError):
return value
return processor | [
"def",
"generate_value_processor",
"(",
"type_",
",",
"collectionFormat",
"=",
"None",
",",
"items",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"processors",
"=",
"[",
"]",
"if",
"is_non_string_iterable",
"(",
"type_",
")",
":",
"assert",
"False",
",",
"\"This should not be possible\"",
"else",
":",
"if",
"type_",
"==",
"ARRAY",
"and",
"collectionFormat",
":",
"if",
"collectionFormat",
"in",
"DELIMETERS",
":",
"delimeter",
"=",
"DELIMETERS",
"[",
"collectionFormat",
"]",
"# split the string based on the delimeter specified by the",
"# `collectionFormat`",
"processors",
".",
"append",
"(",
"operator",
".",
"methodcaller",
"(",
"'split'",
",",
"delimeter",
")",
")",
"else",
":",
"if",
"collectionFormat",
"!=",
"MULTI",
":",
"raise",
"TypeError",
"(",
"\"collectionFormat not implemented\"",
")",
"processors",
".",
"append",
"(",
"add_string_into_list",
")",
"# remove any Falsy values like empty strings.",
"processors",
".",
"append",
"(",
"functools",
".",
"partial",
"(",
"filter",
",",
"bool",
")",
")",
"# strip off any whitespace",
"processors",
".",
"append",
"(",
"functools",
".",
"partial",
"(",
"map",
",",
"operator",
".",
"methodcaller",
"(",
"'strip'",
")",
")",
")",
"if",
"items",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"items",
",",
"collections",
".",
"Mapping",
")",
":",
"items_processors",
"=",
"itertools",
".",
"repeat",
"(",
"generate_value_processor",
"(",
"*",
"*",
"items",
")",
")",
"elif",
"isinstance",
"(",
"items",
",",
"collections",
".",
"Sequence",
")",
":",
"items_processors",
"=",
"itertools",
".",
"chain",
"(",
"(",
"generate_value_processor",
"(",
"*",
"*",
"item",
")",
"for",
"item",
"in",
"items",
")",
",",
"itertools",
".",
"repeat",
"(",
"lambda",
"v",
":",
"v",
")",
",",
")",
"elif",
"isinstance",
"(",
"items",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"Not implemented\"",
")",
"else",
":",
"assert",
"False",
",",
"\"Should not be possible\"",
"# 1. zip the processor and the array items together",
"# 2. apply the processor to each array item.",
"# 3. cast the starmap generator to a list.",
"processors",
".",
"append",
"(",
"chain_reduce_partial",
"(",
"functools",
".",
"partial",
"(",
"zip",
",",
"items_processors",
")",
",",
"functools",
".",
"partial",
"(",
"itertools",
".",
"starmap",
",",
"lambda",
"fn",
",",
"v",
":",
"fn",
"(",
"v",
")",
")",
",",
"list",
",",
")",
")",
"else",
":",
"processors",
".",
"append",
"(",
"functools",
".",
"partial",
"(",
"cast_value_to_type",
",",
"type_",
"=",
"type_",
")",
")",
"def",
"processor",
"(",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"chain_reduce_partial",
"(",
"*",
"processors",
")",
"(",
"value",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"value",
"return",
"processor"
] | 43.403226 | 0.001453 | [
"def generate_value_processor(type_, collectionFormat=None, items=None, **kwargs):\n",
" \"\"\"\n",
" Create a callable that will take the string value of a header and cast it\n",
" to the appropriate type. This can involve:\n",
"\n",
" - splitting a header of type 'array' by its delimeters.\n",
" - type casting the internal elements of the array.\n",
" \"\"\"\n",
" processors = []\n",
" if is_non_string_iterable(type_):\n",
" assert False, \"This should not be possible\"\n",
" else:\n",
" if type_ == ARRAY and collectionFormat:\n",
" if collectionFormat in DELIMETERS:\n",
" delimeter = DELIMETERS[collectionFormat]\n",
" # split the string based on the delimeter specified by the\n",
" # `collectionFormat`\n",
" processors.append(operator.methodcaller('split', delimeter))\n",
" else:\n",
" if collectionFormat != MULTI:\n",
" raise TypeError(\"collectionFormat not implemented\")\n",
" processors.append(add_string_into_list)\n",
" # remove any Falsy values like empty strings.\n",
" processors.append(functools.partial(filter, bool))\n",
" # strip off any whitespace\n",
" processors.append(functools.partial(map, operator.methodcaller('strip')))\n",
" if items is not None:\n",
" if isinstance(items, collections.Mapping):\n",
" items_processors = itertools.repeat(\n",
" generate_value_processor(**items)\n",
" )\n",
" elif isinstance(items, collections.Sequence):\n",
" items_processors = itertools.chain(\n",
" (generate_value_processor(**item) for item in items),\n",
" itertools.repeat(lambda v: v),\n",
" )\n",
" elif isinstance(items, six.string_types):\n",
" raise NotImplementedError(\"Not implemented\")\n",
" else:\n",
" assert False, \"Should not be possible\"\n",
" # 1. zip the processor and the array items together\n",
" # 2. apply the processor to each array item.\n",
" # 3. cast the starmap generator to a list.\n",
" processors.append(\n",
" chain_reduce_partial(\n",
" functools.partial(zip, items_processors),\n",
" functools.partial(itertools.starmap, lambda fn, v: fn(v)),\n",
" list,\n",
" )\n",
" )\n",
" else:\n",
" processors.append(\n",
" functools.partial(cast_value_to_type, type_=type_)\n",
" )\n",
"\n",
" def processor(value, **kwargs):\n",
" try:\n",
" return chain_reduce_partial(*processors)(value)\n",
" except (ValueError, TypeError):\n",
" return value\n",
"\n",
" return processor"
] | [
0.012195121951219513,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.011627906976744186,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.012048192771084338,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05
] | 62 | 0.001385 |
def gettext(*args, **kwargs):
"""
Return the localized translation of message, based on the language, and
locale directory of the domain specified in the translation key (or the
current global domain). This function is usually aliased as ``_``.
"""
key = args[0]
key_match = TRANSLATION_KEY_RE.match(key)
translation = _gettext(*args, **kwargs)
if not key_match or translation != key:
return translation
return _get_domain(key_match).gettext(*args, **kwargs) | [
"def",
"gettext",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"key",
"=",
"args",
"[",
"0",
"]",
"key_match",
"=",
"TRANSLATION_KEY_RE",
".",
"match",
"(",
"key",
")",
"translation",
"=",
"_gettext",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"key_match",
"or",
"translation",
"!=",
"key",
":",
"return",
"translation",
"return",
"_get_domain",
"(",
"key_match",
")",
".",
"gettext",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 38.076923 | 0.001972 | [
"def gettext(*args, **kwargs):\n",
" \"\"\"\n",
" Return the localized translation of message, based on the language, and\n",
" locale directory of the domain specified in the translation key (or the\n",
" current global domain). This function is usually aliased as ``_``.\n",
" \"\"\"\n",
" key = args[0]\n",
" key_match = TRANSLATION_KEY_RE.match(key)\n",
" translation = _gettext(*args, **kwargs)\n",
" if not key_match or translation != key:\n",
" return translation\n",
"\n",
" return _get_domain(key_match).gettext(*args, **kwargs)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.017241379310344827
] | 13 | 0.001326 |
def rl_marks(x):
"""
Replace +-, (c), (tm), (r), (p), etc by its typographic eqivalents
"""
# простые замены, можно без регулярок
replacements = (
(u'(r)', u'\u00ae'), # ®
(u'(R)', u'\u00ae'), # ®
(u'(p)', u'\u00a7'), # §
(u'(P)', u'\u00a7'), # §
(u'(tm)', u'\u2122'), # ™
(u'(TM)', u'\u2122'), # ™
)
patterns = (
# копирайт ставится до года: © 2008 Юрий Юревич
(re.compile(u'\\([cCсС]\\)\\s*(\\d+)', re.UNICODE), u'\u00a9\u202f\\1'),
(r'([^+])(\+\-|\-\+)', u'\\1\u00b1'), # ±
# градусы с минусом
(u'\\-(\\d+)[\\s]*([FCС][^\\w])', u'\u2212\\1\202f\u00b0\\2'), # −12 °C, −53 °F
# градусы без минуса
(u'(\\d+)[\\s]*([FCС][^\\w])', u'\\1\u202f\u00b0\\2'), # 12 °C, 53 °F
# ® и ™ приклеиваются к предыдущему слову, без пробела
(re.compile(u'([A-Za-zА-Яа-я\\!\\?])\\s*(\xae|\u2122)', re.UNICODE), u'\\1\\2'),
# No5 -> № 5
(re.compile(u'(\\s)(No|no|NO|\u2116)[\\s\u2009]*(\\d+)', re.UNICODE), u'\\1\u2116\u2009\\3'),
)
for what, to in replacements:
x = x.replace(what, to)
return _sub_patterns(patterns, x) | [
"def",
"rl_marks",
"(",
"x",
")",
":",
"# простые замены, можно без регулярок",
"replacements",
"=",
"(",
"(",
"u'(r)'",
",",
"u'\\u00ae'",
")",
",",
"# ®",
"(",
"u'(R)'",
",",
"u'\\u00ae'",
")",
",",
"# ®",
"(",
"u'(p)'",
",",
"u'\\u00a7'",
")",
",",
"# §",
"(",
"u'(P)'",
",",
"u'\\u00a7'",
")",
",",
"# §",
"(",
"u'(tm)'",
",",
"u'\\u2122'",
")",
",",
"# ™",
"(",
"u'(TM)'",
",",
"u'\\u2122'",
")",
",",
"# ™",
")",
"patterns",
"=",
"(",
"# копирайт ставится до года: © 2008 Юрий Юревич",
"(",
"re",
".",
"compile",
"(",
"u'\\\\([cCсС]\\\\)\\\\s*(\\\\d+)', ",
"r",
".U",
"N",
"ICODE),",
" ",
"u",
"\\u00a9\\u202f\\\\1'),",
"",
"",
"(",
"r'([^+])(\\+\\-|\\-\\+)'",
",",
"u'\\\\1\\u00b1'",
")",
",",
"# ±",
"# градусы с минусом",
"(",
"u'\\\\-(\\\\d+)[\\\\s]*([FCС][^\\\\w])',",
" ",
"'\\u2212\\\\1\\202f\\u00b0\\\\2')",
",",
" ",
" −12 °C, −53 °F",
"# градусы без минуса",
"(",
"u'(\\\\d+)[\\\\s]*([FCС][^\\\\w])',",
" ",
"'\\\\1\\u202f\\u00b0\\\\2')",
",",
" ",
" 12 °C, 53 °F",
"# ® и ™ приклеиваются к предыдущему слову, без пробела",
"(",
"re",
".",
"compile",
"(",
"u'([A-Za-zА-Яа-я\\\\!\\\\?])\\\\s*(\\xae|\\u2122)', re",
".",
"NI",
"C",
"ODE), u",
"'",
"\\",
"1\\\\2'),",
"",
"",
"# No5 -> № 5",
"(",
"re",
".",
"compile",
"(",
"u'(\\\\s)(No|no|NO|\\u2116)[\\\\s\\u2009]*(\\\\d+)'",
",",
"re",
".",
"UNICODE",
")",
",",
"u'\\\\1\\u2116\\u2009\\\\3'",
")",
",",
")",
"for",
"what",
",",
"to",
"in",
"replacements",
":",
"x",
"=",
"x",
".",
"replace",
"(",
"what",
",",
"to",
")",
"return",
"_sub_patterns",
"(",
"patterns",
",",
"x",
")"
] | 38.533333 | 0.011814 | [
"def rl_marks(x):\n",
" \"\"\"\n",
" Replace +-, (c), (tm), (r), (p), etc by its typographic eqivalents\n",
" \"\"\"\n",
" # простые замены, можно без регулярок\n",
" replacements = (\n",
" (u'(r)', u'\\u00ae'), # ®\n",
" (u'(R)', u'\\u00ae'), # ®\n",
" (u'(p)', u'\\u00a7'), # §\n",
" (u'(P)', u'\\u00a7'), # §\n",
" (u'(tm)', u'\\u2122'), # ™\n",
" (u'(TM)', u'\\u2122'), # ™\n",
" )\n",
" patterns = (\n",
" # копирайт ставится до года: © 2008 Юрий Юревич\n",
" (re.compile(u'\\\\([cCсС]\\\\)\\\\s*(\\\\d+)', re.UNICODE), u'\\u00a9\\u202f\\\\1'),\n",
" (r'([^+])(\\+\\-|\\-\\+)', u'\\\\1\\u00b1'), # ±\n",
" # градусы с минусом\n",
" (u'\\\\-(\\\\d+)[\\\\s]*([FCС][^\\\\w])', u'\\u2212\\\\1\\202f\\u00b0\\\\2'), # −12 °C, −53 °F\n",
" # градусы без минуса\n",
" (u'(\\\\d+)[\\\\s]*([FCС][^\\\\w])', u'\\\\1\\u202f\\u00b0\\\\2'), # 12 °C, 53 °F\n",
" # ® и ™ приклеиваются к предыдущему слову, без пробела\n",
" (re.compile(u'([A-Za-zА-Яа-я\\\\!\\\\?])\\\\s*(\\xae|\\u2122)', re.UNICODE), u'\\\\1\\\\2'),\n",
" # No5 -> № 5\n",
" (re.compile(u'(\\\\s)(No|no|NO|\\u2116)[\\\\s\\u2009]*(\\\\d+)', re.UNICODE), u'\\\\1\\u2116\\u2009\\\\3'),\n",
" )\n",
"\n",
" for what, to in replacements:\n",
" x = x.replace(what, to)\n",
" return _sub_patterns(patterns, x)"
] | [
0,
0,
0,
0,
0,
0,
0.030303030303030304,
0.030303030303030304,
0.030303030303030304,
0.030303030303030304,
0.029411764705882353,
0.029411764705882353,
0,
0,
0,
0.012345679012345678,
0.02,
0,
0.022727272727272728,
0,
0.01282051282051282,
0,
0.011235955056179775,
0,
0.00980392156862745,
0,
0,
0,
0,
0.02702702702702703
] | 30 | 0.009867 |
def _activate_stream(self, idx):
'''Randomly select and create a stream.
StochasticMux adds mode handling to _activate_stream, making it so that
if we're not sampling "with_replacement", the distribution for this
chosen streamer is set to 0, causing the streamer not to be available
until it is exhausted.
Parameters
----------
idx : int, [0:n_streams - 1]
The stream index to replace
'''
# Get the number of samples for this streamer.
n_samples_to_stream = None
if self.rate is not None:
n_samples_to_stream = 1 + self.rng.poisson(lam=self.rate)
# instantiate a new streamer
streamer = self.streamers[idx].iterate(max_iter=n_samples_to_stream)
weight = self.weights[idx]
# If we're sampling without replacement, zero this one out
# This effectively disables this stream as soon as it is chosen,
# preventing it from being chosen again (unless it is revived)
# if not self.with_replacement:
if self.mode != "with_replacement":
self.distribution_[idx] = 0.0
# Correct the distribution
if (self.distribution_ > 0).any():
self.distribution_[:] /= np.sum(self.distribution_)
return streamer, weight | [
"def",
"_activate_stream",
"(",
"self",
",",
"idx",
")",
":",
"# Get the number of samples for this streamer.",
"n_samples_to_stream",
"=",
"None",
"if",
"self",
".",
"rate",
"is",
"not",
"None",
":",
"n_samples_to_stream",
"=",
"1",
"+",
"self",
".",
"rng",
".",
"poisson",
"(",
"lam",
"=",
"self",
".",
"rate",
")",
"# instantiate a new streamer",
"streamer",
"=",
"self",
".",
"streamers",
"[",
"idx",
"]",
".",
"iterate",
"(",
"max_iter",
"=",
"n_samples_to_stream",
")",
"weight",
"=",
"self",
".",
"weights",
"[",
"idx",
"]",
"# If we're sampling without replacement, zero this one out",
"# This effectively disables this stream as soon as it is chosen,",
"# preventing it from being chosen again (unless it is revived)",
"# if not self.with_replacement:",
"if",
"self",
".",
"mode",
"!=",
"\"with_replacement\"",
":",
"self",
".",
"distribution_",
"[",
"idx",
"]",
"=",
"0.0",
"# Correct the distribution",
"if",
"(",
"self",
".",
"distribution_",
">",
"0",
")",
".",
"any",
"(",
")",
":",
"self",
".",
"distribution_",
"[",
":",
"]",
"/=",
"np",
".",
"sum",
"(",
"self",
".",
"distribution_",
")",
"return",
"streamer",
",",
"weight"
] | 38.558824 | 0.001488 | [
"def _activate_stream(self, idx):\n",
" '''Randomly select and create a stream.\n",
"\n",
" StochasticMux adds mode handling to _activate_stream, making it so that\n",
" if we're not sampling \"with_replacement\", the distribution for this\n",
" chosen streamer is set to 0, causing the streamer not to be available\n",
" until it is exhausted.\n",
"\n",
" Parameters\n",
" ----------\n",
" idx : int, [0:n_streams - 1]\n",
" The stream index to replace\n",
" '''\n",
" # Get the number of samples for this streamer.\n",
" n_samples_to_stream = None\n",
" if self.rate is not None:\n",
" n_samples_to_stream = 1 + self.rng.poisson(lam=self.rate)\n",
"\n",
" # instantiate a new streamer\n",
" streamer = self.streamers[idx].iterate(max_iter=n_samples_to_stream)\n",
" weight = self.weights[idx]\n",
"\n",
" # If we're sampling without replacement, zero this one out\n",
" # This effectively disables this stream as soon as it is chosen,\n",
" # preventing it from being chosen again (unless it is revived)\n",
" # if not self.with_replacement:\n",
" if self.mode != \"with_replacement\":\n",
" self.distribution_[idx] = 0.0\n",
"\n",
" # Correct the distribution\n",
" if (self.distribution_ > 0).any():\n",
" self.distribution_[:] /= np.sum(self.distribution_)\n",
"\n",
" return streamer, weight"
] | [
0,
0.020833333333333332,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.03225806451612903
] | 34 | 0.001562 |
def expand_entries(entries, ignore_xs=None):
"""Turn all Xs which are not ignored in all entries into ``0`` s and
``1`` s.
For example::
>>> from rig.routing_table import RoutingTableEntry
>>> entries = [
... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX
... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X
... ]
>>> list(expand_entries(entries)) == [
... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X
... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X
... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b1110), # 001X
... RoutingTableEntry(set(), 0b1010, 0xfffffff0 | 0b1110), # 101X
... RoutingTableEntry(set(), 0b1110, 0xfffffff0 | 0b1110), # 111X
... ]
True
Note that the ``X`` in the LSB was retained because it is common to all
entries.
Any duplicated entries will be removed (in this case the first and second
entries will both match ``0000``, so when the second entry is expanded only
one entry is retained)::
>>> from rig.routing_table import Routes
>>> entries = [
... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N
... RoutingTableEntry({Routes.south}, 0b0000, 0b1011), # 0X00 -> S
... ]
>>> list(expand_entries(entries)) == [
... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N
... RoutingTableEntry({Routes.south}, 0b0100, 0b1111), # 0100 -> S
... ]
True
.. warning::
It is assumed that the input routing table is orthogonal (i.e., there
are no two entries which would match the same key). If this is not the
case, any entries which are covered (i.e. unreachable) in the input
table will be omitted and a warning produced. As a result, all output
routing tables are guaranteed to be orthogonal.
Parameters
----------
entries : [:py:class:`~rig.routing_table.RoutingTableEntry`...] or similar
The entries to expand.
Other Parameters
----------------
ignore_xs : int
Mask of bits in which Xs should not be expanded. If None (the default)
then Xs which are common to all entries will not be expanded.
Yields
------
:py:class:`~rig.routing_table.RoutingTableEntry`
Routing table entries which represent the original entries but with all
Xs not masked off by `ignore_xs` replaced with 1s and 0s.
"""
# Find the common Xs for the entries
if ignore_xs is None:
ignore_xs = get_common_xs(entries)
# Keep a track of keys that we've seen
seen_keys = set({})
# Expand each entry in turn
for entry in entries:
for new_entry in expand_entry(entry, ignore_xs):
if new_entry.key in seen_keys:
# We've already used this key, warn that the table is
# over-complete.
warnings.warn("Table is not orthogonal: Key {:#010x} matches "
"multiple entries.".format(new_entry.key))
else:
# Mark the key as seen and yield the new entry
seen_keys.add(new_entry.key)
yield new_entry | [
"def",
"expand_entries",
"(",
"entries",
",",
"ignore_xs",
"=",
"None",
")",
":",
"# Find the common Xs for the entries",
"if",
"ignore_xs",
"is",
"None",
":",
"ignore_xs",
"=",
"get_common_xs",
"(",
"entries",
")",
"# Keep a track of keys that we've seen",
"seen_keys",
"=",
"set",
"(",
"{",
"}",
")",
"# Expand each entry in turn",
"for",
"entry",
"in",
"entries",
":",
"for",
"new_entry",
"in",
"expand_entry",
"(",
"entry",
",",
"ignore_xs",
")",
":",
"if",
"new_entry",
".",
"key",
"in",
"seen_keys",
":",
"# We've already used this key, warn that the table is",
"# over-complete.",
"warnings",
".",
"warn",
"(",
"\"Table is not orthogonal: Key {:#010x} matches \"",
"\"multiple entries.\"",
".",
"format",
"(",
"new_entry",
".",
"key",
")",
")",
"else",
":",
"# Mark the key as seen and yield the new entry",
"seen_keys",
".",
"add",
"(",
"new_entry",
".",
"key",
")",
"yield",
"new_entry"
] | 39.914634 | 0.000298 | [
"def expand_entries(entries, ignore_xs=None):\n",
" \"\"\"Turn all Xs which are not ignored in all entries into ``0`` s and\n",
" ``1`` s.\n",
"\n",
" For example::\n",
"\n",
" >>> from rig.routing_table import RoutingTableEntry\n",
" >>> entries = [\n",
" ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX\n",
" ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X\n",
" ... ]\n",
" >>> list(expand_entries(entries)) == [\n",
" ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X\n",
" ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X\n",
" ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b1110), # 001X\n",
" ... RoutingTableEntry(set(), 0b1010, 0xfffffff0 | 0b1110), # 101X\n",
" ... RoutingTableEntry(set(), 0b1110, 0xfffffff0 | 0b1110), # 111X\n",
" ... ]\n",
" True\n",
"\n",
" Note that the ``X`` in the LSB was retained because it is common to all\n",
" entries.\n",
"\n",
" Any duplicated entries will be removed (in this case the first and second\n",
" entries will both match ``0000``, so when the second entry is expanded only\n",
" one entry is retained)::\n",
"\n",
" >>> from rig.routing_table import Routes\n",
" >>> entries = [\n",
" ... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N\n",
" ... RoutingTableEntry({Routes.south}, 0b0000, 0b1011), # 0X00 -> S\n",
" ... ]\n",
" >>> list(expand_entries(entries)) == [\n",
" ... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N\n",
" ... RoutingTableEntry({Routes.south}, 0b0100, 0b1111), # 0100 -> S\n",
" ... ]\n",
" True\n",
"\n",
" .. warning::\n",
"\n",
" It is assumed that the input routing table is orthogonal (i.e., there\n",
" are no two entries which would match the same key). If this is not the\n",
" case, any entries which are covered (i.e. unreachable) in the input\n",
" table will be omitted and a warning produced. As a result, all output\n",
" routing tables are guaranteed to be orthogonal.\n",
"\n",
" Parameters\n",
" ----------\n",
" entries : [:py:class:`~rig.routing_table.RoutingTableEntry`...] or similar\n",
" The entries to expand.\n",
"\n",
" Other Parameters\n",
" ----------------\n",
" ignore_xs : int\n",
" Mask of bits in which Xs should not be expanded. If None (the default)\n",
" then Xs which are common to all entries will not be expanded.\n",
"\n",
" Yields\n",
" ------\n",
" :py:class:`~rig.routing_table.RoutingTableEntry`\n",
" Routing table entries which represent the original entries but with all\n",
" Xs not masked off by `ignore_xs` replaced with 1s and 0s.\n",
" \"\"\"\n",
" # Find the common Xs for the entries\n",
" if ignore_xs is None:\n",
" ignore_xs = get_common_xs(entries)\n",
"\n",
" # Keep a track of keys that we've seen\n",
" seen_keys = set({})\n",
"\n",
" # Expand each entry in turn\n",
" for entry in entries:\n",
" for new_entry in expand_entry(entry, ignore_xs):\n",
" if new_entry.key in seen_keys:\n",
" # We've already used this key, warn that the table is\n",
" # over-complete.\n",
" warnings.warn(\"Table is not orthogonal: Key {:#010x} matches \"\n",
" \"multiple entries.\".format(new_entry.key))\n",
" else:\n",
" # Mark the key as seen and yield the new entry\n",
" seen_keys.add(new_entry.key)\n",
" yield new_entry"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.03225806451612903
] | 82 | 0.000393 |
def gts7(Input, flags, output):
'''
/* Thermospheric portion of NRLMSISE-00
* See GTD7 for more extensive comments
* alt > 72.5 km!
*/
'''
zn1 = [120.0, 110.0, 100.0, 90.0, 72.5]
mn1 = 5
dgtr=1.74533E-2;
dr=1.72142E-2;
alpha = [-0.38, 0.0, 0.0, 0.0, 0.17, 0.0, -0.38, 0.0, 0.0]
altl = [200.0, 300.0, 160.0, 250.0, 240.0, 450.0, 320.0, 450.0]
za = pdl[1][15];
zn1[0] = za;
for j in range(9):
output.d[j]=0;
#/* TINF VARIATIONS NOT IMPORTANT BELOW ZA OR ZN1(1) */
if (Input.alt>zn1[0]):
tinf = ptm[0]*pt[0] * \
(1.0+flags.sw[16]*globe7(pt,Input,flags));
else:
tinf = ptm[0]*pt[0];
output.t[0]=tinf;
#/* GRADIENT VARIATIONS NOT IMPORTANT BELOW ZN1(5) */
if (Input.alt>zn1[4]):
g0 = ptm[3]*ps[0] * \
(1.0+flags.sw[19]*globe7(ps,Input,flags));
else:
g0 = ptm[3]*ps[0];
tlb = ptm[1] * (1.0 + flags.sw[17]*globe7(pd[3],Input,flags))*pd[3][0];
s = g0 / (tinf - tlb);
#/* Lower thermosphere temp variations not significant for
# * density above 300 km */
if (Input.alt<300.0):
meso_tn1[1]=ptm[6]*ptl[0][0]/(1.0-flags.sw[18]*glob7s(ptl[0], Input, flags));
meso_tn1[2]=ptm[2]*ptl[1][0]/(1.0-flags.sw[18]*glob7s(ptl[1], Input, flags));
meso_tn1[3]=ptm[7]*ptl[2][0]/(1.0-flags.sw[18]*glob7s(ptl[2], Input, flags));
meso_tn1[4]=ptm[4]*ptl[3][0]/(1.0-flags.sw[18]*flags.sw[20]*glob7s(ptl[3], Input, flags));
meso_tgn1[1]=ptm[8]*pma[8][0]*(1.0+flags.sw[18]*flags.sw[20]*glob7s(pma[8], Input, flags))*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0));
else:
meso_tn1[1]=ptm[6]*ptl[0][0];
meso_tn1[2]=ptm[2]*ptl[1][0];
meso_tn1[3]=ptm[7]*ptl[2][0];
meso_tn1[4]=ptm[4]*ptl[3][0];
meso_tgn1[1]=ptm[8]*pma[8][0]*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0));
z0 = zn1[3];
t0 = meso_tn1[3];
tr12 = 1.0;
#/* N2 variation factor at Zlb */
g28=flags.sw[21]*globe7(pd[2], Input, flags);
#/* VARIATION OF TURBOPAUSE HEIGHT */
zhf=pdl[1][24]*(1.0+flags.sw[5]*pdl[0][24]*sin(dgtr*Input.g_lat)*cos(dr*(Input.doy-pt[13])));
output.t[0]=tinf;
xmm = pdm[2][4];
z = Input.alt;
#/**** N2 DENSITY ****/
#/* Diffusive density at Zlb */
db28 = pdm[2][0]*exp(g28)*pd[2][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[2]=densu(z,db28,tinf,tlb,28.0,alpha[2],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[2];
#/* Turbopause */
zh28=pdm[2][2]*zhf;
zhm28=pdm[2][3]*pdl[1][5];
xmd=28.0-xmm;
#/* Mixed density at Zlb */
tz = [0]
b28=densu(zh28,db28,tinf,tlb,xmd,(alpha[2]-1.0),tz,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);
if ((flags.sw[15]) and (z<=altl[2])):
#/* Mixed density at Alt */
global dm28
dm28=densu(z,b28,tinf,tlb,xmm,alpha[2],tz,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
#/* Net density at Alt */
output.d[2]=dnet(output.d[2],dm28,zhm28,xmm,28.0);
#/**** HE DENSITY ****/
#/* Density variation factor at Zlb */
g4 = flags.sw[21]*globe7(pd[0], Input, flags);
#/* Diffusive density at Zlb */
db04 = pdm[0][0]*exp(g4)*pd[0][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[0]=densu(z,db04,tinf,tlb, 4.,alpha[0],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[0];
if ((flags.sw[15]) and (z<altl[0])):
#/* Turbopause */
zh04=pdm[0][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b04=densu(zh04,db04,tinf,tlb,4.-xmm,alpha[0]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm04
dm04=densu(z,b04,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm04=zhm28;
#/* Net density at Alt */
output.d[0]=dnet(output.d[0],dm04,zhm04,xmm,4.);
#/* Correction to specified mixing ratio at ground */
rl=log(b28*pdm[0][1]/b04);
zc04=pdm[0][4]*pdl[1][0];
hc04=pdm[0][5]*pdl[1][1];
#/* Net density corrected at Alt */
output.d[0]=output.d[0]*ccor(z,rl,hc04,zc04);
#/**** O DENSITY ****/
#/* Density variation factor at Zlb */
g16= flags.sw[21]*globe7(pd[1],Input,flags);
#/* Diffusive density at Zlb */
db16 = pdm[1][0]*exp(g16)*pd[1][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[1]=densu(z,db16,tinf,tlb, 16.,alpha[1],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[1];
if ((flags.sw[15]) and (z<=altl[1])):
#/* Turbopause */
zh16=pdm[1][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b16=densu(zh16,db16,tinf,tlb,16.0-xmm,(alpha[1]-1.0), RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm16
dm16=densu(z,b16,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm16=zhm28;
#/* Net density at Alt */
output.d[1]=dnet(output.d[1],dm16,zhm16,xmm,16.);
rl=pdm[1][1]*pdl[1][16]*(1.0+flags.sw[1]*pdl[0][23]*(Input.f107A-150.0));
hc16=pdm[1][5]*pdl[1][3];
zc16=pdm[1][4]*pdl[1][2];
hc216=pdm[1][5]*pdl[1][4];
output.d[1]=output.d[1]*ccor2(z,rl,hc16,zc16,hc216);
#/* Chemistry correction */
hcc16=pdm[1][7]*pdl[1][13];
zcc16=pdm[1][6]*pdl[1][12];
rc16=pdm[1][3]*pdl[1][14];
#/* Net density corrected at Alt */
output.d[1]=output.d[1]*ccor(z,rc16,hcc16,zcc16);
#/**** O2 DENSITY ****/
#/* Density variation factor at Zlb */
g32= flags.sw[21]*globe7(pd[4], Input, flags);
#/* Diffusive density at Zlb */
db32 = pdm[3][0]*exp(g32)*pd[4][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[3]=densu(z,db32,tinf,tlb, 32.,alpha[3],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[3];
if (flags.sw[15]):
if (z<=altl[3]):
#/* Turbopause */
zh32=pdm[3][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b32=densu(zh32,db32,tinf,tlb,32.-xmm,alpha[3]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm32
dm32=densu(z,b32,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm32=zhm28;
#/* Net density at Alt */
output.d[3]=dnet(output.d[3],dm32,zhm32,xmm,32.);
#/* Correction to specified mixing ratio at ground */
rl=log(b28*pdm[3][1]/b32);
hc32=pdm[3][5]*pdl[1][7];
zc32=pdm[3][4]*pdl[1][6];
output.d[3]=output.d[3]*ccor(z,rl,hc32,zc32);
#/* Correction for general departure from diffusive equilibrium above Zlb */
hcc32=pdm[3][7]*pdl[1][22];
hcc232=pdm[3][7]*pdl[0][22];
zcc32=pdm[3][6]*pdl[1][21];
rc32=pdm[3][3]*pdl[1][23]*(1.+flags.sw[1]*pdl[0][23]*(Input.f107A-150.));
#/* Net density corrected at Alt */
output.d[3]=output.d[3]*ccor2(z,rc32,hcc32,zcc32,hcc232);
#/**** AR DENSITY ****/
#/* Density variation factor at Zlb */
g40= flags.sw[21]*globe7(pd[5],Input,flags);
#/* Diffusive density at Zlb */
db40 = pdm[4][0]*exp(g40)*pd[5][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[4]=densu(z,db40,tinf,tlb, 40.,alpha[4],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[4];
if ((flags.sw[15]) and (z<=altl[4])):
#/* Turbopause */
zh40=pdm[4][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b40=densu(zh40,db40,tinf,tlb,40.-xmm,alpha[4]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm40
dm40=densu(z,b40,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm40=zhm28;
#/* Net density at Alt */
output.d[4]=dnet(output.d[4],dm40,zhm40,xmm,40.);
#/* Correction to specified mixing ratio at ground */
rl=log(b28*pdm[4][1]/b40);
hc40=pdm[4][5]*pdl[1][9];
zc40=pdm[4][4]*pdl[1][8];
#/* Net density corrected at Alt */
output.d[4]=output.d[4]*ccor(z,rl,hc40,zc40);
#/**** HYDROGEN DENSITY ****/
#/* Density variation factor at Zlb */
g1 = flags.sw[21]*globe7(pd[6], Input, flags);
#/* Diffusive density at Zlb */
db01 = pdm[5][0]*exp(g1)*pd[6][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[6]=densu(z,db01,tinf,tlb,1.,alpha[6],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[6];
if ((flags.sw[15]) and (z<=altl[6])):
#/* Turbopause */
zh01=pdm[5][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b01=densu(zh01,db01,tinf,tlb,1.-xmm,alpha[6]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm01
dm01=densu(z,b01,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm01=zhm28;
#/* Net density at Alt */
output.d[6]=dnet(output.d[6],dm01,zhm01,xmm,1.);
#/* Correction to specified mixing ratio at ground */
rl=log(b28*pdm[5][1]*sqrt(pdl[1][17]*pdl[1][17])/b01);
hc01=pdm[5][5]*pdl[1][11];
zc01=pdm[5][4]*pdl[1][10];
output.d[6]=output.d[6]*ccor(z,rl,hc01,zc01);
#/* Chemistry correction */
hcc01=pdm[5][7]*pdl[1][19];
zcc01=pdm[5][6]*pdl[1][18];
rc01=pdm[5][3]*pdl[1][20];
#/* Net density corrected at Alt */
output.d[6]=output.d[6]*ccor(z,rc01,hcc01,zcc01);
#/**** ATOMIC NITROGEN DENSITY ****/
#/* Density variation factor at Zlb */
g14 = flags.sw[21]*globe7(pd[7],Input,flags);
#/* Diffusive density at Zlb */
db14 = pdm[6][0]*exp(g14)*pd[7][0];
#/* Diffusive density at Alt */
RandomVariable = [output.t[1]]
output.d[7]=densu(z,db14,tinf,tlb,14.,alpha[7],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
dd=output.d[7];
if ((flags.sw[15]) and (z<=altl[7])):
#/* Turbopause */
zh14=pdm[6][2];
#/* Mixed density at Zlb */
RandomVariable = [output.t[1]]
b14=densu(zh14,db14,tinf,tlb,14.-xmm,alpha[7]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
#/* Mixed density at Alt */
RandomVariable = [output.t[1]]
global dm14
dm14=densu(z,b14,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zhm14=zhm28;
#/* Net density at Alt */
output.d[7]=dnet(output.d[7],dm14,zhm14,xmm,14.);
#/* Correction to specified mixing ratio at ground */
rl=log(b28*pdm[6][1]*sqrt(pdl[0][2]*pdl[0][2])/b14);
hc14=pdm[6][5]*pdl[0][1];
zc14=pdm[6][4]*pdl[0][0];
output.d[7]=output.d[7]*ccor(z,rl,hc14,zc14);
#/* Chemistry correction */
hcc14=pdm[6][7]*pdl[0][4];
zcc14=pdm[6][6]*pdl[0][3];
rc14=pdm[6][3]*pdl[0][5];
#/* Net density corrected at Alt */
output.d[7]=output.d[7]*ccor(z,rc14,hcc14,zcc14);
#/**** Anomalous OXYGEN DENSITY ****/
g16h = flags.sw[21]*globe7(pd[8],Input,flags);
db16h = pdm[7][0]*exp(g16h)*pd[8][0];
tho = pdm[7][9]*pdl[0][6];
RandomVariable = [output.t[1]]
dd=densu(z,db16h,tho,tho,16.,alpha[8],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);
output.t[1] = RandomVariable[0]
zsht=pdm[7][5];
zmho=pdm[7][4];
zsho=scalh(zmho,16.0,tho);
output.d[8]=dd*exp(-zsht/zsho*(exp(-(z-zmho)/zsht)-1.));
#/* total mass density */
output.d[5] = 1.66E-24*(4.0*output.d[0]+16.0*output.d[1]+28.0*output.d[2]+32.0*output.d[3]+40.0*output.d[4]+ output.d[6]+14.0*output.d[7]);
db48=1.66E-24*(4.0*db04+16.0*db16+28.0*db28+32.0*db32+40.0*db40+db01+14.0*db14);
#/* temperature */
z = sqrt(Input.alt*Input.alt);
RandomVariable = [output.t[1]]
ddum = densu(z,1.0, tinf, tlb, 0.0, 0.0, RandomVariable, ptm[5], s, mn1, zn1, meso_tn1, meso_tgn1);
output.t[1] = RandomVariable[0]
if (flags.sw[0]): # pragma: no cover
for i in range(9):
output.d[i]=output.d[i]*1.0E6;
output.d[5]=output.d[5]/1000;
return | [
"def",
"gts7",
"(",
"Input",
",",
"flags",
",",
"output",
")",
":",
"zn1",
"=",
"[",
"120.0",
",",
"110.0",
",",
"100.0",
",",
"90.0",
",",
"72.5",
"]",
"mn1",
"=",
"5",
"dgtr",
"=",
"1.74533E-2",
"dr",
"=",
"1.72142E-2",
"alpha",
"=",
"[",
"-",
"0.38",
",",
"0.0",
",",
"0.0",
",",
"0.0",
",",
"0.17",
",",
"0.0",
",",
"-",
"0.38",
",",
"0.0",
",",
"0.0",
"]",
"altl",
"=",
"[",
"200.0",
",",
"300.0",
",",
"160.0",
",",
"250.0",
",",
"240.0",
",",
"450.0",
",",
"320.0",
",",
"450.0",
"]",
"za",
"=",
"pdl",
"[",
"1",
"]",
"[",
"15",
"]",
"zn1",
"[",
"0",
"]",
"=",
"za",
"for",
"j",
"in",
"range",
"(",
"9",
")",
":",
"output",
".",
"d",
"[",
"j",
"]",
"=",
"0",
"#/* TINF VARIATIONS NOT IMPORTANT BELOW ZA OR ZN1(1) */",
"if",
"(",
"Input",
".",
"alt",
">",
"zn1",
"[",
"0",
"]",
")",
":",
"tinf",
"=",
"ptm",
"[",
"0",
"]",
"*",
"pt",
"[",
"0",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"16",
"]",
"*",
"globe7",
"(",
"pt",
",",
"Input",
",",
"flags",
")",
")",
"else",
":",
"tinf",
"=",
"ptm",
"[",
"0",
"]",
"*",
"pt",
"[",
"0",
"]",
"output",
".",
"t",
"[",
"0",
"]",
"=",
"tinf",
"#/* GRADIENT VARIATIONS NOT IMPORTANT BELOW ZN1(5) */",
"if",
"(",
"Input",
".",
"alt",
">",
"zn1",
"[",
"4",
"]",
")",
":",
"g0",
"=",
"ptm",
"[",
"3",
"]",
"*",
"ps",
"[",
"0",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"19",
"]",
"*",
"globe7",
"(",
"ps",
",",
"Input",
",",
"flags",
")",
")",
"else",
":",
"g0",
"=",
"ptm",
"[",
"3",
"]",
"*",
"ps",
"[",
"0",
"]",
"tlb",
"=",
"ptm",
"[",
"1",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"17",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"3",
"]",
",",
"Input",
",",
"flags",
")",
")",
"*",
"pd",
"[",
"3",
"]",
"[",
"0",
"]",
"s",
"=",
"g0",
"/",
"(",
"tinf",
"-",
"tlb",
")",
"#/* Lower thermosphere temp variations not significant for",
"# * density above 300 km */",
"if",
"(",
"Input",
".",
"alt",
"<",
"300.0",
")",
":",
"meso_tn1",
"[",
"1",
"]",
"=",
"ptm",
"[",
"6",
"]",
"*",
"ptl",
"[",
"0",
"]",
"[",
"0",
"]",
"/",
"(",
"1.0",
"-",
"flags",
".",
"sw",
"[",
"18",
"]",
"*",
"glob7s",
"(",
"ptl",
"[",
"0",
"]",
",",
"Input",
",",
"flags",
")",
")",
"meso_tn1",
"[",
"2",
"]",
"=",
"ptm",
"[",
"2",
"]",
"*",
"ptl",
"[",
"1",
"]",
"[",
"0",
"]",
"/",
"(",
"1.0",
"-",
"flags",
".",
"sw",
"[",
"18",
"]",
"*",
"glob7s",
"(",
"ptl",
"[",
"1",
"]",
",",
"Input",
",",
"flags",
")",
")",
"meso_tn1",
"[",
"3",
"]",
"=",
"ptm",
"[",
"7",
"]",
"*",
"ptl",
"[",
"2",
"]",
"[",
"0",
"]",
"/",
"(",
"1.0",
"-",
"flags",
".",
"sw",
"[",
"18",
"]",
"*",
"glob7s",
"(",
"ptl",
"[",
"2",
"]",
",",
"Input",
",",
"flags",
")",
")",
"meso_tn1",
"[",
"4",
"]",
"=",
"ptm",
"[",
"4",
"]",
"*",
"ptl",
"[",
"3",
"]",
"[",
"0",
"]",
"/",
"(",
"1.0",
"-",
"flags",
".",
"sw",
"[",
"18",
"]",
"*",
"flags",
".",
"sw",
"[",
"20",
"]",
"*",
"glob7s",
"(",
"ptl",
"[",
"3",
"]",
",",
"Input",
",",
"flags",
")",
")",
"meso_tgn1",
"[",
"1",
"]",
"=",
"ptm",
"[",
"8",
"]",
"*",
"pma",
"[",
"8",
"]",
"[",
"0",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"18",
"]",
"*",
"flags",
".",
"sw",
"[",
"20",
"]",
"*",
"glob7s",
"(",
"pma",
"[",
"8",
"]",
",",
"Input",
",",
"flags",
")",
")",
"*",
"meso_tn1",
"[",
"4",
"]",
"*",
"meso_tn1",
"[",
"4",
"]",
"/",
"(",
"pow",
"(",
"(",
"ptm",
"[",
"4",
"]",
"*",
"ptl",
"[",
"3",
"]",
"[",
"0",
"]",
")",
",",
"2.0",
")",
")",
"else",
":",
"meso_tn1",
"[",
"1",
"]",
"=",
"ptm",
"[",
"6",
"]",
"*",
"ptl",
"[",
"0",
"]",
"[",
"0",
"]",
"meso_tn1",
"[",
"2",
"]",
"=",
"ptm",
"[",
"2",
"]",
"*",
"ptl",
"[",
"1",
"]",
"[",
"0",
"]",
"meso_tn1",
"[",
"3",
"]",
"=",
"ptm",
"[",
"7",
"]",
"*",
"ptl",
"[",
"2",
"]",
"[",
"0",
"]",
"meso_tn1",
"[",
"4",
"]",
"=",
"ptm",
"[",
"4",
"]",
"*",
"ptl",
"[",
"3",
"]",
"[",
"0",
"]",
"meso_tgn1",
"[",
"1",
"]",
"=",
"ptm",
"[",
"8",
"]",
"*",
"pma",
"[",
"8",
"]",
"[",
"0",
"]",
"*",
"meso_tn1",
"[",
"4",
"]",
"*",
"meso_tn1",
"[",
"4",
"]",
"/",
"(",
"pow",
"(",
"(",
"ptm",
"[",
"4",
"]",
"*",
"ptl",
"[",
"3",
"]",
"[",
"0",
"]",
")",
",",
"2.0",
")",
")",
"z0",
"=",
"zn1",
"[",
"3",
"]",
"t0",
"=",
"meso_tn1",
"[",
"3",
"]",
"tr12",
"=",
"1.0",
"#/* N2 variation factor at Zlb */",
"g28",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"2",
"]",
",",
"Input",
",",
"flags",
")",
"#/* VARIATION OF TURBOPAUSE HEIGHT */",
"zhf",
"=",
"pdl",
"[",
"1",
"]",
"[",
"24",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"5",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"24",
"]",
"*",
"sin",
"(",
"dgtr",
"*",
"Input",
".",
"g_lat",
")",
"*",
"cos",
"(",
"dr",
"*",
"(",
"Input",
".",
"doy",
"-",
"pt",
"[",
"13",
"]",
")",
")",
")",
"output",
".",
"t",
"[",
"0",
"]",
"=",
"tinf",
"xmm",
"=",
"pdm",
"[",
"2",
"]",
"[",
"4",
"]",
"z",
"=",
"Input",
".",
"alt",
"#/**** N2 DENSITY ****/",
"#/* Diffusive density at Zlb */",
"db28",
"=",
"pdm",
"[",
"2",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g28",
")",
"*",
"pd",
"[",
"2",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"2",
"]",
"=",
"densu",
"(",
"z",
",",
"db28",
",",
"tinf",
",",
"tlb",
",",
"28.0",
",",
"alpha",
"[",
"2",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"2",
"]",
"#/* Turbopause */",
"zh28",
"=",
"pdm",
"[",
"2",
"]",
"[",
"2",
"]",
"*",
"zhf",
"zhm28",
"=",
"pdm",
"[",
"2",
"]",
"[",
"3",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"5",
"]",
"xmd",
"=",
"28.0",
"-",
"xmm",
"#/* Mixed density at Zlb */",
"tz",
"=",
"[",
"0",
"]",
"b28",
"=",
"densu",
"(",
"zh28",
",",
"db28",
",",
"tinf",
",",
"tlb",
",",
"xmd",
",",
"(",
"alpha",
"[",
"2",
"]",
"-",
"1.0",
")",
",",
"tz",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<=",
"altl",
"[",
"2",
"]",
")",
")",
":",
"#/* Mixed density at Alt */",
"global",
"dm28",
"dm28",
"=",
"densu",
"(",
"z",
",",
"b28",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"alpha",
"[",
"2",
"]",
",",
"tz",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"2",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"2",
"]",
",",
"dm28",
",",
"zhm28",
",",
"xmm",
",",
"28.0",
")",
"#/**** HE DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g4",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"0",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db04",
"=",
"pdm",
"[",
"0",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g4",
")",
"*",
"pd",
"[",
"0",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"0",
"]",
"=",
"densu",
"(",
"z",
",",
"db04",
",",
"tinf",
",",
"tlb",
",",
"4.",
",",
"alpha",
"[",
"0",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"0",
"]",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<",
"altl",
"[",
"0",
"]",
")",
")",
":",
"#/* Turbopause */",
"zh04",
"=",
"pdm",
"[",
"0",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b04",
"=",
"densu",
"(",
"zh04",
",",
"db04",
",",
"tinf",
",",
"tlb",
",",
"4.",
"-",
"xmm",
",",
"alpha",
"[",
"0",
"]",
"-",
"1.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm04",
"dm04",
"=",
"densu",
"(",
"z",
",",
"b04",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm04",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"0",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"0",
"]",
",",
"dm04",
",",
"zhm04",
",",
"xmm",
",",
"4.",
")",
"#/* Correction to specified mixing ratio at ground */",
"rl",
"=",
"log",
"(",
"b28",
"*",
"pdm",
"[",
"0",
"]",
"[",
"1",
"]",
"/",
"b04",
")",
"zc04",
"=",
"pdm",
"[",
"0",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"0",
"]",
"hc04",
"=",
"pdm",
"[",
"0",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"1",
"]",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"0",
"]",
"=",
"output",
".",
"d",
"[",
"0",
"]",
"*",
"ccor",
"(",
"z",
",",
"rl",
",",
"hc04",
",",
"zc04",
")",
"#/**** O DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g16",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"1",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g16",
")",
"*",
"pd",
"[",
"1",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"1",
"]",
"=",
"densu",
"(",
"z",
",",
"db16",
",",
"tinf",
",",
"tlb",
",",
"16.",
",",
"alpha",
"[",
"1",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"1",
"]",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<=",
"altl",
"[",
"1",
"]",
")",
")",
":",
"#/* Turbopause */",
"zh16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b16",
"=",
"densu",
"(",
"zh16",
",",
"db16",
",",
"tinf",
",",
"tlb",
",",
"16.0",
"-",
"xmm",
",",
"(",
"alpha",
"[",
"1",
"]",
"-",
"1.0",
")",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm16",
"dm16",
"=",
"densu",
"(",
"z",
",",
"b16",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm16",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"1",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"1",
"]",
",",
"dm16",
",",
"zhm16",
",",
"xmm",
",",
"16.",
")",
"rl",
"=",
"pdm",
"[",
"1",
"]",
"[",
"1",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"16",
"]",
"*",
"(",
"1.0",
"+",
"flags",
".",
"sw",
"[",
"1",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"23",
"]",
"*",
"(",
"Input",
".",
"f107A",
"-",
"150.0",
")",
")",
"hc16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"3",
"]",
"zc16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"2",
"]",
"hc216",
"=",
"pdm",
"[",
"1",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"4",
"]",
"output",
".",
"d",
"[",
"1",
"]",
"=",
"output",
".",
"d",
"[",
"1",
"]",
"*",
"ccor2",
"(",
"z",
",",
"rl",
",",
"hc16",
",",
"zc16",
",",
"hc216",
")",
"#/* Chemistry correction */",
"hcc16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"7",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"13",
"]",
"zcc16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"6",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"12",
"]",
"rc16",
"=",
"pdm",
"[",
"1",
"]",
"[",
"3",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"14",
"]",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"1",
"]",
"=",
"output",
".",
"d",
"[",
"1",
"]",
"*",
"ccor",
"(",
"z",
",",
"rc16",
",",
"hcc16",
",",
"zcc16",
")",
"#/**** O2 DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g32",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"4",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g32",
")",
"*",
"pd",
"[",
"4",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"3",
"]",
"=",
"densu",
"(",
"z",
",",
"db32",
",",
"tinf",
",",
"tlb",
",",
"32.",
",",
"alpha",
"[",
"3",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"3",
"]",
"if",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
":",
"if",
"(",
"z",
"<=",
"altl",
"[",
"3",
"]",
")",
":",
"#/* Turbopause */",
"zh32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b32",
"=",
"densu",
"(",
"zh32",
",",
"db32",
",",
"tinf",
",",
"tlb",
",",
"32.",
"-",
"xmm",
",",
"alpha",
"[",
"3",
"]",
"-",
"1.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm32",
"dm32",
"=",
"densu",
"(",
"z",
",",
"b32",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm32",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"3",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"3",
"]",
",",
"dm32",
",",
"zhm32",
",",
"xmm",
",",
"32.",
")",
"#/* Correction to specified mixing ratio at ground */",
"rl",
"=",
"log",
"(",
"b28",
"*",
"pdm",
"[",
"3",
"]",
"[",
"1",
"]",
"/",
"b32",
")",
"hc32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"7",
"]",
"zc32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"6",
"]",
"output",
".",
"d",
"[",
"3",
"]",
"=",
"output",
".",
"d",
"[",
"3",
"]",
"*",
"ccor",
"(",
"z",
",",
"rl",
",",
"hc32",
",",
"zc32",
")",
"#/* Correction for general departure from diffusive equilibrium above Zlb */",
"hcc32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"7",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"22",
"]",
"hcc232",
"=",
"pdm",
"[",
"3",
"]",
"[",
"7",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"22",
"]",
"zcc32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"6",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"21",
"]",
"rc32",
"=",
"pdm",
"[",
"3",
"]",
"[",
"3",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"23",
"]",
"*",
"(",
"1.",
"+",
"flags",
".",
"sw",
"[",
"1",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"23",
"]",
"*",
"(",
"Input",
".",
"f107A",
"-",
"150.",
")",
")",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"3",
"]",
"=",
"output",
".",
"d",
"[",
"3",
"]",
"*",
"ccor2",
"(",
"z",
",",
"rc32",
",",
"hcc32",
",",
"zcc32",
",",
"hcc232",
")",
"#/**** AR DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g40",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"5",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db40",
"=",
"pdm",
"[",
"4",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g40",
")",
"*",
"pd",
"[",
"5",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"4",
"]",
"=",
"densu",
"(",
"z",
",",
"db40",
",",
"tinf",
",",
"tlb",
",",
"40.",
",",
"alpha",
"[",
"4",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"4",
"]",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<=",
"altl",
"[",
"4",
"]",
")",
")",
":",
"#/* Turbopause */",
"zh40",
"=",
"pdm",
"[",
"4",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b40",
"=",
"densu",
"(",
"zh40",
",",
"db40",
",",
"tinf",
",",
"tlb",
",",
"40.",
"-",
"xmm",
",",
"alpha",
"[",
"4",
"]",
"-",
"1.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm40",
"dm40",
"=",
"densu",
"(",
"z",
",",
"b40",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm40",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"4",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"4",
"]",
",",
"dm40",
",",
"zhm40",
",",
"xmm",
",",
"40.",
")",
"#/* Correction to specified mixing ratio at ground */",
"rl",
"=",
"log",
"(",
"b28",
"*",
"pdm",
"[",
"4",
"]",
"[",
"1",
"]",
"/",
"b40",
")",
"hc40",
"=",
"pdm",
"[",
"4",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"9",
"]",
"zc40",
"=",
"pdm",
"[",
"4",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"8",
"]",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"4",
"]",
"=",
"output",
".",
"d",
"[",
"4",
"]",
"*",
"ccor",
"(",
"z",
",",
"rl",
",",
"hc40",
",",
"zc40",
")",
"#/**** HYDROGEN DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g1",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"6",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g1",
")",
"*",
"pd",
"[",
"6",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"6",
"]",
"=",
"densu",
"(",
"z",
",",
"db01",
",",
"tinf",
",",
"tlb",
",",
"1.",
",",
"alpha",
"[",
"6",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"6",
"]",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<=",
"altl",
"[",
"6",
"]",
")",
")",
":",
"#/* Turbopause */",
"zh01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b01",
"=",
"densu",
"(",
"zh01",
",",
"db01",
",",
"tinf",
",",
"tlb",
",",
"1.",
"-",
"xmm",
",",
"alpha",
"[",
"6",
"]",
"-",
"1.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm01",
"dm01",
"=",
"densu",
"(",
"z",
",",
"b01",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm01",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"6",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"6",
"]",
",",
"dm01",
",",
"zhm01",
",",
"xmm",
",",
"1.",
")",
"#/* Correction to specified mixing ratio at ground */",
"rl",
"=",
"log",
"(",
"b28",
"*",
"pdm",
"[",
"5",
"]",
"[",
"1",
"]",
"*",
"sqrt",
"(",
"pdl",
"[",
"1",
"]",
"[",
"17",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"17",
"]",
")",
"/",
"b01",
")",
"hc01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"11",
"]",
"zc01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"10",
"]",
"output",
".",
"d",
"[",
"6",
"]",
"=",
"output",
".",
"d",
"[",
"6",
"]",
"*",
"ccor",
"(",
"z",
",",
"rl",
",",
"hc01",
",",
"zc01",
")",
"#/* Chemistry correction */",
"hcc01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"7",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"19",
"]",
"zcc01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"6",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"18",
"]",
"rc01",
"=",
"pdm",
"[",
"5",
"]",
"[",
"3",
"]",
"*",
"pdl",
"[",
"1",
"]",
"[",
"20",
"]",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"6",
"]",
"=",
"output",
".",
"d",
"[",
"6",
"]",
"*",
"ccor",
"(",
"z",
",",
"rc01",
",",
"hcc01",
",",
"zcc01",
")",
"#/**** ATOMIC NITROGEN DENSITY ****/",
"#/* Density variation factor at Zlb */",
"g14",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"7",
"]",
",",
"Input",
",",
"flags",
")",
"#/* Diffusive density at Zlb */",
"db14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g14",
")",
"*",
"pd",
"[",
"7",
"]",
"[",
"0",
"]",
"#/* Diffusive density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"output",
".",
"d",
"[",
"7",
"]",
"=",
"densu",
"(",
"z",
",",
"db14",
",",
"tinf",
",",
"tlb",
",",
"14.",
",",
"alpha",
"[",
"7",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"dd",
"=",
"output",
".",
"d",
"[",
"7",
"]",
"if",
"(",
"(",
"flags",
".",
"sw",
"[",
"15",
"]",
")",
"and",
"(",
"z",
"<=",
"altl",
"[",
"7",
"]",
")",
")",
":",
"#/* Turbopause */",
"zh14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"2",
"]",
"#/* Mixed density at Zlb */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"b14",
"=",
"densu",
"(",
"zh14",
",",
"db14",
",",
"tinf",
",",
"tlb",
",",
"14.",
"-",
"xmm",
",",
"alpha",
"[",
"7",
"]",
"-",
"1.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"#/* Mixed density at Alt */",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"global",
"dm14",
"dm14",
"=",
"densu",
"(",
"z",
",",
"b14",
",",
"tinf",
",",
"tlb",
",",
"xmm",
",",
"0.",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zhm14",
"=",
"zhm28",
"#/* Net density at Alt */",
"output",
".",
"d",
"[",
"7",
"]",
"=",
"dnet",
"(",
"output",
".",
"d",
"[",
"7",
"]",
",",
"dm14",
",",
"zhm14",
",",
"xmm",
",",
"14.",
")",
"#/* Correction to specified mixing ratio at ground */",
"rl",
"=",
"log",
"(",
"b28",
"*",
"pdm",
"[",
"6",
"]",
"[",
"1",
"]",
"*",
"sqrt",
"(",
"pdl",
"[",
"0",
"]",
"[",
"2",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"2",
"]",
")",
"/",
"b14",
")",
"hc14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"5",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"1",
"]",
"zc14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"4",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"0",
"]",
"output",
".",
"d",
"[",
"7",
"]",
"=",
"output",
".",
"d",
"[",
"7",
"]",
"*",
"ccor",
"(",
"z",
",",
"rl",
",",
"hc14",
",",
"zc14",
")",
"#/* Chemistry correction */",
"hcc14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"7",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"4",
"]",
"zcc14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"6",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"3",
"]",
"rc14",
"=",
"pdm",
"[",
"6",
"]",
"[",
"3",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"5",
"]",
"#/* Net density corrected at Alt */",
"output",
".",
"d",
"[",
"7",
"]",
"=",
"output",
".",
"d",
"[",
"7",
"]",
"*",
"ccor",
"(",
"z",
",",
"rc14",
",",
"hcc14",
",",
"zcc14",
")",
"#/**** Anomalous OXYGEN DENSITY ****/",
"g16h",
"=",
"flags",
".",
"sw",
"[",
"21",
"]",
"*",
"globe7",
"(",
"pd",
"[",
"8",
"]",
",",
"Input",
",",
"flags",
")",
"db16h",
"=",
"pdm",
"[",
"7",
"]",
"[",
"0",
"]",
"*",
"exp",
"(",
"g16h",
")",
"*",
"pd",
"[",
"8",
"]",
"[",
"0",
"]",
"tho",
"=",
"pdm",
"[",
"7",
"]",
"[",
"9",
"]",
"*",
"pdl",
"[",
"0",
"]",
"[",
"6",
"]",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"dd",
"=",
"densu",
"(",
"z",
",",
"db16h",
",",
"tho",
",",
"tho",
",",
"16.",
",",
"alpha",
"[",
"8",
"]",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"zsht",
"=",
"pdm",
"[",
"7",
"]",
"[",
"5",
"]",
"zmho",
"=",
"pdm",
"[",
"7",
"]",
"[",
"4",
"]",
"zsho",
"=",
"scalh",
"(",
"zmho",
",",
"16.0",
",",
"tho",
")",
"output",
".",
"d",
"[",
"8",
"]",
"=",
"dd",
"*",
"exp",
"(",
"-",
"zsht",
"/",
"zsho",
"*",
"(",
"exp",
"(",
"-",
"(",
"z",
"-",
"zmho",
")",
"/",
"zsht",
")",
"-",
"1.",
")",
")",
"#/* total mass density */",
"output",
".",
"d",
"[",
"5",
"]",
"=",
"1.66E-24",
"*",
"(",
"4.0",
"*",
"output",
".",
"d",
"[",
"0",
"]",
"+",
"16.0",
"*",
"output",
".",
"d",
"[",
"1",
"]",
"+",
"28.0",
"*",
"output",
".",
"d",
"[",
"2",
"]",
"+",
"32.0",
"*",
"output",
".",
"d",
"[",
"3",
"]",
"+",
"40.0",
"*",
"output",
".",
"d",
"[",
"4",
"]",
"+",
"output",
".",
"d",
"[",
"6",
"]",
"+",
"14.0",
"*",
"output",
".",
"d",
"[",
"7",
"]",
")",
"db48",
"=",
"1.66E-24",
"*",
"(",
"4.0",
"*",
"db04",
"+",
"16.0",
"*",
"db16",
"+",
"28.0",
"*",
"db28",
"+",
"32.0",
"*",
"db32",
"+",
"40.0",
"*",
"db40",
"+",
"db01",
"+",
"14.0",
"*",
"db14",
")",
"#/* temperature */",
"z",
"=",
"sqrt",
"(",
"Input",
".",
"alt",
"*",
"Input",
".",
"alt",
")",
"RandomVariable",
"=",
"[",
"output",
".",
"t",
"[",
"1",
"]",
"]",
"ddum",
"=",
"densu",
"(",
"z",
",",
"1.0",
",",
"tinf",
",",
"tlb",
",",
"0.0",
",",
"0.0",
",",
"RandomVariable",
",",
"ptm",
"[",
"5",
"]",
",",
"s",
",",
"mn1",
",",
"zn1",
",",
"meso_tn1",
",",
"meso_tgn1",
")",
"output",
".",
"t",
"[",
"1",
"]",
"=",
"RandomVariable",
"[",
"0",
"]",
"if",
"(",
"flags",
".",
"sw",
"[",
"0",
"]",
")",
":",
"# pragma: no cover",
"for",
"i",
"in",
"range",
"(",
"9",
")",
":",
"output",
".",
"d",
"[",
"i",
"]",
"=",
"output",
".",
"d",
"[",
"i",
"]",
"*",
"1.0E6",
"output",
".",
"d",
"[",
"5",
"]",
"=",
"output",
".",
"d",
"[",
"5",
"]",
"/",
"1000",
"return"
] | 37.477401 | 0.055213 | [
"def gts7(Input, flags, output):\n",
" '''\n",
"/* Thermospheric portion of NRLMSISE-00\n",
" * See GTD7 for more extensive comments\n",
" * alt > 72.5 km! \n",
" */\n",
" '''\n",
" zn1 = [120.0, 110.0, 100.0, 90.0, 72.5]\n",
" mn1 = 5\n",
" dgtr=1.74533E-2;\n",
" dr=1.72142E-2;\n",
" alpha = [-0.38, 0.0, 0.0, 0.0, 0.17, 0.0, -0.38, 0.0, 0.0]\n",
" altl = [200.0, 300.0, 160.0, 250.0, 240.0, 450.0, 320.0, 450.0]\n",
" za = pdl[1][15];\n",
" zn1[0] = za;\n",
"\n",
" for j in range(9):\n",
" output.d[j]=0;\n",
"\n",
" #/* TINF VARIATIONS NOT IMPORTANT BELOW ZA OR ZN1(1) */\n",
" if (Input.alt>zn1[0]):\n",
" tinf = ptm[0]*pt[0] * \\\n",
" (1.0+flags.sw[16]*globe7(pt,Input,flags));\n",
" else:\n",
" tinf = ptm[0]*pt[0];\n",
" output.t[0]=tinf;\n",
"\n",
" #/* GRADIENT VARIATIONS NOT IMPORTANT BELOW ZN1(5) */\n",
" if (Input.alt>zn1[4]):\n",
" g0 = ptm[3]*ps[0] * \\\n",
" (1.0+flags.sw[19]*globe7(ps,Input,flags));\n",
" else:\n",
" g0 = ptm[3]*ps[0];\n",
" tlb = ptm[1] * (1.0 + flags.sw[17]*globe7(pd[3],Input,flags))*pd[3][0];\n",
" s = g0 / (tinf - tlb);\n",
"\n",
"#/* Lower thermosphere temp variations not significant for\n",
"# * density above 300 km */\n",
" if (Input.alt<300.0):\n",
" meso_tn1[1]=ptm[6]*ptl[0][0]/(1.0-flags.sw[18]*glob7s(ptl[0], Input, flags));\n",
" meso_tn1[2]=ptm[2]*ptl[1][0]/(1.0-flags.sw[18]*glob7s(ptl[1], Input, flags));\n",
" meso_tn1[3]=ptm[7]*ptl[2][0]/(1.0-flags.sw[18]*glob7s(ptl[2], Input, flags));\n",
" meso_tn1[4]=ptm[4]*ptl[3][0]/(1.0-flags.sw[18]*flags.sw[20]*glob7s(ptl[3], Input, flags));\n",
" meso_tgn1[1]=ptm[8]*pma[8][0]*(1.0+flags.sw[18]*flags.sw[20]*glob7s(pma[8], Input, flags))*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0));\n",
" else:\n",
" meso_tn1[1]=ptm[6]*ptl[0][0];\n",
" meso_tn1[2]=ptm[2]*ptl[1][0];\n",
" meso_tn1[3]=ptm[7]*ptl[2][0];\n",
" meso_tn1[4]=ptm[4]*ptl[3][0];\n",
" meso_tgn1[1]=ptm[8]*pma[8][0]*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0));\n",
"\t\n",
"\n",
" z0 = zn1[3];\n",
" t0 = meso_tn1[3];\n",
" tr12 = 1.0;\n",
"\n",
" #/* N2 variation factor at Zlb */\n",
" g28=flags.sw[21]*globe7(pd[2], Input, flags);\n",
"\n",
" #/* VARIATION OF TURBOPAUSE HEIGHT */\n",
" zhf=pdl[1][24]*(1.0+flags.sw[5]*pdl[0][24]*sin(dgtr*Input.g_lat)*cos(dr*(Input.doy-pt[13])));\n",
" output.t[0]=tinf;\n",
" xmm = pdm[2][4];\n",
" z = Input.alt;\n",
"\n",
"\n",
" #/**** N2 DENSITY ****/\n",
"\n",
" #/* Diffusive density at Zlb */\n",
" db28 = pdm[2][0]*exp(g28)*pd[2][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[2]=densu(z,db28,tinf,tlb,28.0,alpha[2],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[2];\n",
" #/* Turbopause */\n",
" zh28=pdm[2][2]*zhf;\n",
" zhm28=pdm[2][3]*pdl[1][5]; \n",
" xmd=28.0-xmm;\n",
" #/* Mixed density at Zlb */\n",
" tz = [0]\n",
" b28=densu(zh28,db28,tinf,tlb,xmd,(alpha[2]-1.0),tz,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);\n",
" if ((flags.sw[15]) and (z<=altl[2])):\n",
" #/* Mixed density at Alt */\n",
" global dm28\n",
" dm28=densu(z,b28,tinf,tlb,xmm,alpha[2],tz,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" #/* Net density at Alt */\n",
" output.d[2]=dnet(output.d[2],dm28,zhm28,xmm,28.0);\n",
" \n",
"\n",
"\n",
" #/**** HE DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g4 = flags.sw[21]*globe7(pd[0], Input, flags);\n",
" #/* Diffusive density at Zlb */\n",
" db04 = pdm[0][0]*exp(g4)*pd[0][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[0]=densu(z,db04,tinf,tlb, 4.,alpha[0],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[0];\n",
" if ((flags.sw[15]) and (z<altl[0])):\n",
" #/* Turbopause */\n",
" zh04=pdm[0][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b04=densu(zh04,db04,tinf,tlb,4.-xmm,alpha[0]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm04\n",
" dm04=densu(z,b04,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm04=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[0]=dnet(output.d[0],dm04,zhm04,xmm,4.);\n",
" #/* Correction to specified mixing ratio at ground */\n",
" rl=log(b28*pdm[0][1]/b04);\n",
" zc04=pdm[0][4]*pdl[1][0];\n",
" hc04=pdm[0][5]*pdl[1][1];\n",
" #/* Net density corrected at Alt */\n",
" output.d[0]=output.d[0]*ccor(z,rl,hc04,zc04);\n",
" \n",
"\n",
"\n",
" #/**** O DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g16= flags.sw[21]*globe7(pd[1],Input,flags);\n",
" #/* Diffusive density at Zlb */\n",
" db16 = pdm[1][0]*exp(g16)*pd[1][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[1]=densu(z,db16,tinf,tlb, 16.,alpha[1],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[1];\n",
" if ((flags.sw[15]) and (z<=altl[1])):\n",
" #/* Turbopause */\n",
" zh16=pdm[1][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b16=densu(zh16,db16,tinf,tlb,16.0-xmm,(alpha[1]-1.0), RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm16\n",
" dm16=densu(z,b16,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm16=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[1]=dnet(output.d[1],dm16,zhm16,xmm,16.);\n",
" rl=pdm[1][1]*pdl[1][16]*(1.0+flags.sw[1]*pdl[0][23]*(Input.f107A-150.0));\n",
" hc16=pdm[1][5]*pdl[1][3];\n",
" zc16=pdm[1][4]*pdl[1][2];\n",
" hc216=pdm[1][5]*pdl[1][4];\n",
" output.d[1]=output.d[1]*ccor2(z,rl,hc16,zc16,hc216);\n",
" #/* Chemistry correction */\n",
" hcc16=pdm[1][7]*pdl[1][13];\n",
" zcc16=pdm[1][6]*pdl[1][12];\n",
" rc16=pdm[1][3]*pdl[1][14];\n",
" #/* Net density corrected at Alt */\n",
" output.d[1]=output.d[1]*ccor(z,rc16,hcc16,zcc16);\n",
" \n",
"\n",
"\n",
" #/**** O2 DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g32= flags.sw[21]*globe7(pd[4], Input, flags);\n",
" #/* Diffusive density at Zlb */\n",
" db32 = pdm[3][0]*exp(g32)*pd[4][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[3]=densu(z,db32,tinf,tlb, 32.,alpha[3],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[3];\n",
" if (flags.sw[15]):\n",
" if (z<=altl[3]):\n",
" #/* Turbopause */\n",
" zh32=pdm[3][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b32=densu(zh32,db32,tinf,tlb,32.-xmm,alpha[3]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm32\n",
" dm32=densu(z,b32,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm32=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[3]=dnet(output.d[3],dm32,zhm32,xmm,32.);\n",
" #/* Correction to specified mixing ratio at ground */\n",
" rl=log(b28*pdm[3][1]/b32);\n",
" hc32=pdm[3][5]*pdl[1][7];\n",
" zc32=pdm[3][4]*pdl[1][6];\n",
" output.d[3]=output.d[3]*ccor(z,rl,hc32,zc32);\n",
" \n",
" #/* Correction for general departure from diffusive equilibrium above Zlb */\n",
" hcc32=pdm[3][7]*pdl[1][22];\n",
" hcc232=pdm[3][7]*pdl[0][22];\n",
" zcc32=pdm[3][6]*pdl[1][21];\n",
" rc32=pdm[3][3]*pdl[1][23]*(1.+flags.sw[1]*pdl[0][23]*(Input.f107A-150.));\n",
" #/* Net density corrected at Alt */\n",
" output.d[3]=output.d[3]*ccor2(z,rc32,hcc32,zcc32,hcc232);\n",
" \n",
"\n",
"\n",
" #/**** AR DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g40= flags.sw[21]*globe7(pd[5],Input,flags);\n",
" #/* Diffusive density at Zlb */\n",
" db40 = pdm[4][0]*exp(g40)*pd[5][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[4]=densu(z,db40,tinf,tlb, 40.,alpha[4],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[4];\n",
" if ((flags.sw[15]) and (z<=altl[4])):\n",
" #/* Turbopause */\n",
" zh40=pdm[4][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b40=densu(zh40,db40,tinf,tlb,40.-xmm,alpha[4]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm40\n",
" dm40=densu(z,b40,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm40=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[4]=dnet(output.d[4],dm40,zhm40,xmm,40.);\n",
" #/* Correction to specified mixing ratio at ground */\n",
" rl=log(b28*pdm[4][1]/b40);\n",
" hc40=pdm[4][5]*pdl[1][9];\n",
" zc40=pdm[4][4]*pdl[1][8];\n",
" #/* Net density corrected at Alt */\n",
" output.d[4]=output.d[4]*ccor(z,rl,hc40,zc40);\n",
" \n",
"\n",
"\n",
" #/**** HYDROGEN DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g1 = flags.sw[21]*globe7(pd[6], Input, flags);\n",
" #/* Diffusive density at Zlb */\n",
" db01 = pdm[5][0]*exp(g1)*pd[6][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[6]=densu(z,db01,tinf,tlb,1.,alpha[6],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[6];\n",
" if ((flags.sw[15]) and (z<=altl[6])):\n",
" #/* Turbopause */\n",
" zh01=pdm[5][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b01=densu(zh01,db01,tinf,tlb,1.-xmm,alpha[6]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm01\n",
" dm01=densu(z,b01,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm01=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[6]=dnet(output.d[6],dm01,zhm01,xmm,1.);\n",
" #/* Correction to specified mixing ratio at ground */\n",
" rl=log(b28*pdm[5][1]*sqrt(pdl[1][17]*pdl[1][17])/b01);\n",
" hc01=pdm[5][5]*pdl[1][11];\n",
" zc01=pdm[5][4]*pdl[1][10];\n",
" output.d[6]=output.d[6]*ccor(z,rl,hc01,zc01);\n",
" #/* Chemistry correction */\n",
" hcc01=pdm[5][7]*pdl[1][19];\n",
" zcc01=pdm[5][6]*pdl[1][18];\n",
" rc01=pdm[5][3]*pdl[1][20];\n",
" #/* Net density corrected at Alt */\n",
" output.d[6]=output.d[6]*ccor(z,rc01,hcc01,zcc01);\n",
"\n",
"\n",
"\n",
" #/**** ATOMIC NITROGEN DENSITY ****/\n",
"\n",
" #/* Density variation factor at Zlb */\n",
" g14 = flags.sw[21]*globe7(pd[7],Input,flags);\n",
" #/* Diffusive density at Zlb */\n",
" db14 = pdm[6][0]*exp(g14)*pd[7][0];\n",
" #/* Diffusive density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" output.d[7]=densu(z,db14,tinf,tlb,14.,alpha[7],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" dd=output.d[7];\n",
" if ((flags.sw[15]) and (z<=altl[7])): \n",
" #/* Turbopause */\n",
" zh14=pdm[6][2];\n",
" #/* Mixed density at Zlb */\n",
" RandomVariable = [output.t[1]]\n",
" b14=densu(zh14,db14,tinf,tlb,14.-xmm,alpha[7]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" #/* Mixed density at Alt */\n",
" RandomVariable = [output.t[1]]\n",
" global dm14\n",
" dm14=densu(z,b14,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zhm14=zhm28;\n",
" #/* Net density at Alt */\n",
" output.d[7]=dnet(output.d[7],dm14,zhm14,xmm,14.);\n",
" #/* Correction to specified mixing ratio at ground */\n",
" rl=log(b28*pdm[6][1]*sqrt(pdl[0][2]*pdl[0][2])/b14);\n",
" hc14=pdm[6][5]*pdl[0][1];\n",
" zc14=pdm[6][4]*pdl[0][0];\n",
" output.d[7]=output.d[7]*ccor(z,rl,hc14,zc14);\n",
" #/* Chemistry correction */\n",
" hcc14=pdm[6][7]*pdl[0][4];\n",
" zcc14=pdm[6][6]*pdl[0][3];\n",
" rc14=pdm[6][3]*pdl[0][5];\n",
" #/* Net density corrected at Alt */\n",
" output.d[7]=output.d[7]*ccor(z,rc14,hcc14,zcc14);\n",
" \n",
"\n",
"\n",
" #/**** Anomalous OXYGEN DENSITY ****/\n",
"\n",
" g16h = flags.sw[21]*globe7(pd[8],Input,flags);\n",
" db16h = pdm[7][0]*exp(g16h)*pd[8][0];\n",
" tho = pdm[7][9]*pdl[0][6];\n",
" RandomVariable = [output.t[1]]\n",
" dd=densu(z,db16h,tho,tho,16.,alpha[8],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" zsht=pdm[7][5];\n",
" zmho=pdm[7][4];\n",
" zsho=scalh(zmho,16.0,tho);\n",
" output.d[8]=dd*exp(-zsht/zsho*(exp(-(z-zmho)/zsht)-1.));\n",
"\n",
"\n",
" #/* total mass density */\n",
" output.d[5] = 1.66E-24*(4.0*output.d[0]+16.0*output.d[1]+28.0*output.d[2]+32.0*output.d[3]+40.0*output.d[4]+ output.d[6]+14.0*output.d[7]);\n",
" db48=1.66E-24*(4.0*db04+16.0*db16+28.0*db28+32.0*db32+40.0*db40+db01+14.0*db14);\n",
"\n",
"\n",
"\n",
" #/* temperature */\n",
" z = sqrt(Input.alt*Input.alt);\n",
" RandomVariable = [output.t[1]]\n",
" ddum = densu(z,1.0, tinf, tlb, 0.0, 0.0, RandomVariable, ptm[5], s, mn1, zn1, meso_tn1, meso_tgn1);\n",
" output.t[1] = RandomVariable[0]\n",
" if (flags.sw[0]): # pragma: no cover\n",
" for i in range(9):\n",
" output.d[i]=output.d[i]*1.0E6;\n",
" output.d[5]=output.d[5]/1000;\n",
" return"
] | [
0,
0,
0,
0,
0.043478260869565216,
0,
0,
0,
0,
0.09523809523809523,
0.10526315789473684,
0,
0,
0.047619047619047616,
0.058823529411764705,
0,
0,
0.08695652173913043,
0,
0.016666666666666666,
0.037037037037037035,
0,
0.047619047619047616,
0,
0.034482758620689655,
0.09090909090909091,
0,
0.01694915254237288,
0.037037037037037035,
0,
0.05454545454545454,
0,
0.037037037037037035,
0.039473684210526314,
0.037037037037037035,
0,
0.015625,
0,
0.038461538461538464,
0.03488372093023256,
0.03488372093023256,
0.03488372093023256,
0.030303030303030304,
0.025974025974025976,
0,
0.05263157894736842,
0.05263157894736842,
0.05263157894736842,
0.05263157894736842,
0.043010752688172046,
1.5,
0,
0.17647058823529413,
0.045454545454545456,
0.0625,
0,
0.02631578947368421,
0.04,
0,
0.023809523809523808,
0.030612244897959183,
0.09090909090909091,
0.047619047619047616,
0.05263157894736842,
0,
0,
0.07142857142857142,
0,
0.027777777777777776,
0.025,
0.027777777777777776,
0,
0.14285714285714285,
0,
0.1,
0.045454545454545456,
0.08333333333333333,
0.09375,
0.1111111111111111,
0.03125,
0,
0.14893617021276595,
0.023809523809523808,
0.02702702702702703,
0,
0.17045454545454544,
0.02857142857142857,
0.1016949152542373,
0.2,
0,
0,
0.07142857142857142,
0,
0.022222222222222223,
0.0196078431372549,
0.02702702702702703,
0.02564102564102564,
0.02702702702702703,
0,
0.1346153846153846,
0,
0.1,
0.024390243902439025,
0.037037037037037035,
0.08333333333333333,
0.02702702702702703,
0,
0.13761467889908258,
0,
0.02702702702702703,
0,
0,
0.1595744680851064,
0,
0.09523809523809523,
0.02857142857142857,
0.10526315789473684,
0.015873015873015872,
0.05714285714285714,
0.058823529411764705,
0.058823529411764705,
0.022222222222222223,
0.09259259259259259,
0.2,
0,
0,
0.07407407407407407,
0,
0.022727272727272728,
0.08163265306122448,
0.02702702702702703,
0.04878048780487805,
0.02631578947368421,
0,
0.12264150943396226,
0,
0.1,
0.023809523809523808,
0.03571428571428571,
0.08333333333333333,
0.02702702702702703,
0,
0.12173913043478261,
0,
0.02702702702702703,
0,
0,
0.1595744680851064,
0,
0.09523809523809523,
0.02857142857142857,
0.10344827586206896,
0.036585365853658534,
0.058823529411764705,
0.058823529411764705,
0.05714285714285714,
0.09836065573770492,
0.02631578947368421,
0.05555555555555555,
0.05555555555555555,
0.05714285714285714,
0.022222222222222223,
0.08620689655172414,
0.2,
0,
0,
0.07142857142857142,
0,
0.022222222222222223,
0.0392156862745098,
0.02702702702702703,
0.025,
0.02631578947368421,
0,
0.12264150943396226,
0,
0.1,
0,
0.04,
0.03125,
0.07142857142857142,
0.024390243902439025,
0,
0.12173913043478261,
0,
0.024390243902439025,
0,
0,
0.15306122448979592,
0,
0.08,
0.02564102564102564,
0.0967741935483871,
0.014705882352941176,
0.05128205128205128,
0.05263157894736842,
0.05263157894736842,
0.08620689655172414,
0.1111111111111111,
0.023255813953488372,
0.05555555555555555,
0.05405405405405406,
0.05555555555555555,
0.036585365853658534,
0.022222222222222223,
0.09090909090909091,
0.2,
0,
0,
0.07142857142857142,
0,
0.022222222222222223,
0.08163265306122448,
0.02702702702702703,
0.025,
0.02631578947368421,
0,
0.13333333333333333,
0,
0.1,
0.023809523809523808,
0.03571428571428571,
0.08333333333333333,
0.02702702702702703,
0,
0.13636363636363635,
0,
0.02702702702702703,
0,
0,
0.1595744680851064,
0,
0.09523809523809523,
0.02857142857142857,
0.10344827586206896,
0.015625,
0.05714285714285714,
0.058823529411764705,
0.058823529411764705,
0.022222222222222223,
0.09259259259259259,
0.14285714285714285,
0,
0,
0.058823529411764705,
0,
0.022222222222222223,
0.0196078431372549,
0.02702702702702703,
0.02564102564102564,
0.02631578947368421,
0,
0.14563106796116504,
0,
0.1,
0.023809523809523808,
0.03571428571428571,
0.08333333333333333,
0.02702702702702703,
0,
0.12727272727272726,
0,
0.02702702702702703,
0,
0,
0.1595744680851064,
0,
0.09523809523809523,
0.02857142857142857,
0.10526315789473684,
0.015625,
0.031746031746031744,
0.05714285714285714,
0.05714285714285714,
0.09259259259259259,
0.02631578947368421,
0.05555555555555555,
0.05555555555555555,
0.05714285714285714,
0.022222222222222223,
0.08620689655172414,
0,
0,
0,
0.04878048780487805,
0,
0.022222222222222223,
0.06,
0.02702702702702703,
0.025,
0.02631578947368421,
0,
0.14423076923076922,
0,
0.1,
0.046511627906976744,
0.03571428571428571,
0.08333333333333333,
0.02702702702702703,
0,
0.12612612612612611,
0,
0.02702702702702703,
0,
0,
0.1595744680851064,
0,
0.09523809523809523,
0.02857142857142857,
0.10344827586206896,
0.015625,
0.03278688524590164,
0.058823529411764705,
0.058823529411764705,
0.09259259259259259,
0.02631578947368421,
0.05714285714285714,
0.05714285714285714,
0.058823529411764705,
0.022222222222222223,
0.08620689655172414,
0.2,
0,
0,
0.047619047619047616,
0,
0.058823529411764705,
0.023809523809523808,
0.03225806451612903,
0,
0.14583333333333334,
0,
0.1,
0.1,
0.12903225806451613,
0.03278688524590164,
0,
0,
0.06666666666666667,
0.020833333333333332,
0.03529411764705882,
0,
0,
0,
0.08695652173913043,
0.02857142857142857,
0,
0.028846153846153848,
0,
0.024390243902439025,
0,
0.046511627906976744,
0.05263157894736842,
0.1
] | 354 | 0.048204 |
def creation_ordered(class_to_decorate):
"""
Class decorator that ensures that instances will be ordered after creation order when sorted.
:type class_to_decorate: class
:rtype: class
"""
next_index = functools.partial(next, itertools.count())
__init__orig = class_to_decorate.__init__
@functools.wraps(__init__orig, assigned=['__doc__'])
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_index', next_index())
__init__orig(self, *args, **kwargs)
setattr(class_to_decorate, '__init__', __init__)
# noinspection PyProtectedMember
def __lt__(self, other):
return self._index < other._index # pragma: no mutate
setattr(class_to_decorate, '__lt__', __lt__)
class_to_decorate = functools.total_ordering(class_to_decorate)
return class_to_decorate | [
"def",
"creation_ordered",
"(",
"class_to_decorate",
")",
":",
"next_index",
"=",
"functools",
".",
"partial",
"(",
"next",
",",
"itertools",
".",
"count",
"(",
")",
")",
"__init__orig",
"=",
"class_to_decorate",
".",
"__init__",
"@",
"functools",
".",
"wraps",
"(",
"__init__orig",
",",
"assigned",
"=",
"[",
"'__doc__'",
"]",
")",
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"object",
".",
"__setattr__",
"(",
"self",
",",
"'_index'",
",",
"next_index",
"(",
")",
")",
"__init__orig",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"setattr",
"(",
"class_to_decorate",
",",
"'__init__'",
",",
"__init__",
")",
"# noinspection PyProtectedMember",
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
".",
"_index",
"<",
"other",
".",
"_index",
"# pragma: no mutate",
"setattr",
"(",
"class_to_decorate",
",",
"'__lt__'",
",",
"__lt__",
")",
"class_to_decorate",
"=",
"functools",
".",
"total_ordering",
"(",
"class_to_decorate",
")",
"return",
"class_to_decorate"
] | 29.785714 | 0.002323 | [
"def creation_ordered(class_to_decorate):\n",
" \"\"\"\n",
" Class decorator that ensures that instances will be ordered after creation order when sorted.\n",
"\n",
" :type class_to_decorate: class\n",
" :rtype: class\n",
" \"\"\"\n",
"\n",
" next_index = functools.partial(next, itertools.count())\n",
"\n",
" __init__orig = class_to_decorate.__init__\n",
"\n",
" @functools.wraps(__init__orig, assigned=['__doc__'])\n",
" def __init__(self, *args, **kwargs):\n",
" object.__setattr__(self, '_index', next_index())\n",
" __init__orig(self, *args, **kwargs)\n",
"\n",
" setattr(class_to_decorate, '__init__', __init__)\n",
"\n",
" # noinspection PyProtectedMember\n",
" def __lt__(self, other):\n",
" return self._index < other._index # pragma: no mutate\n",
"\n",
" setattr(class_to_decorate, '__lt__', __lt__)\n",
"\n",
" class_to_decorate = functools.total_ordering(class_to_decorate)\n",
"\n",
" return class_to_decorate"
] | [
0,
0,
0.00980392156862745,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.03571428571428571
] | 28 | 0.001626 |
def radar_xsect(scatterer, h_pol=True):
"""Radar cross section for the current setup.
Args:
scatterer: a Scatterer instance.
h_pol: If True (default), use horizontal polarization.
If False, use vertical polarization.
Returns:
The radar cross section.
"""
Z = scatterer.get_Z()
if h_pol:
return 2 * np.pi * \
(Z[0,0] - Z[0,1] - Z[1,0] + Z[1,1])
else:
return 2 * np.pi * \
(Z[0,0] + Z[0,1] + Z[1,0] + Z[1,1]) | [
"def",
"radar_xsect",
"(",
"scatterer",
",",
"h_pol",
"=",
"True",
")",
":",
"Z",
"=",
"scatterer",
".",
"get_Z",
"(",
")",
"if",
"h_pol",
":",
"return",
"2",
"*",
"np",
".",
"pi",
"*",
"(",
"Z",
"[",
"0",
",",
"0",
"]",
"-",
"Z",
"[",
"0",
",",
"1",
"]",
"-",
"Z",
"[",
"1",
",",
"0",
"]",
"+",
"Z",
"[",
"1",
",",
"1",
"]",
")",
"else",
":",
"return",
"2",
"*",
"np",
".",
"pi",
"*",
"(",
"Z",
"[",
"0",
",",
"0",
"]",
"+",
"Z",
"[",
"0",
",",
"1",
"]",
"+",
"Z",
"[",
"1",
",",
"0",
"]",
"+",
"Z",
"[",
"1",
",",
"1",
"]",
")"
] | 27.5 | 0.019531 | [
"def radar_xsect(scatterer, h_pol=True):\n",
" \"\"\"Radar cross section for the current setup. \n",
"\n",
" Args:\n",
" scatterer: a Scatterer instance.\n",
" h_pol: If True (default), use horizontal polarization.\n",
" If False, use vertical polarization.\n",
"\n",
" Returns:\n",
" The radar cross section.\n",
" \"\"\"\n",
" Z = scatterer.get_Z()\n",
" if h_pol:\n",
" return 2 * np.pi * \\\n",
" (Z[0,0] - Z[0,1] - Z[1,0] + Z[1,1])\n",
" else:\n",
" return 2 * np.pi * \\\n",
" (Z[0,0] + Z[0,1] + Z[1,0] + Z[1,1])"
] | [
0,
0.018518518518518517,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08333333333333333,
0,
0,
0.10638297872340426
] | 18 | 0.011569 |
def list(self, per_page=None, page=None, status=None, service='facebook'):
""" Get a list of Pylon tasks
:param per_page: How many tasks to display per page
:type per_page: int
:param page: Which page of tasks to display
:type page: int
:param status: The status of the tasks to list
:type page: string
:param service: The PYLON service (facebook)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {}
if per_page is not None:
params['per_page'] = per_page
if page is not None:
params['page'] = page
if status:
params['status'] = status
return self.request.get(service + '/task', params) | [
"def",
"list",
"(",
"self",
",",
"per_page",
"=",
"None",
",",
"page",
"=",
"None",
",",
"status",
"=",
"None",
",",
"service",
"=",
"'facebook'",
")",
":",
"params",
"=",
"{",
"}",
"if",
"per_page",
"is",
"not",
"None",
":",
"params",
"[",
"'per_page'",
"]",
"=",
"per_page",
"if",
"page",
"is",
"not",
"None",
":",
"params",
"[",
"'page'",
"]",
"=",
"page",
"if",
"status",
":",
"params",
"[",
"'status'",
"]",
"=",
"status",
"return",
"self",
".",
"request",
".",
"get",
"(",
"service",
"+",
"'/task'",
",",
"params",
")"
] | 36.62963 | 0.00197 | [
"def list(self, per_page=None, page=None, status=None, service='facebook'):\n",
" \"\"\" Get a list of Pylon tasks\n",
"\n",
" :param per_page: How many tasks to display per page\n",
" :type per_page: int\n",
" :param page: Which page of tasks to display\n",
" :type page: int\n",
" :param status: The status of the tasks to list\n",
" :type page: string\n",
" :param service: The PYLON service (facebook)\n",
" :type service: str\n",
" :return: dict of REST API output with headers attached\n",
" :rtype: :class:`~datasift.request.DictResponse`\n",
" :raises: :class:`~datasift.exceptions.DataSiftApiException`,\n",
" :class:`requests.exceptions.HTTPError`\n",
" \"\"\"\n",
"\n",
" params = {}\n",
"\n",
" if per_page is not None:\n",
" params['per_page'] = per_page\n",
" if page is not None:\n",
" params['page'] = page\n",
" if status:\n",
" params['status'] = status\n",
"\n",
" return self.request.get(service + '/task', params)"
] | [
0,
0.02631578947368421,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.017241379310344827
] | 27 | 0.001613 |
def incrby(self, fmt, offset, increment, overflow=None):
"""
Increment a bitfield by a given amount.
:param fmt: format-string for the bitfield being updated, e.g. u8 for
an unsigned 8-bit integer.
:param int offset: offset (in number of bits).
:param int increment: value to increment the bitfield by.
:param str overflow: overflow algorithm. Defaults to WRAP, but other
acceptable values are SAT and FAIL. See the Redis docs for
descriptions of these algorithms.
:returns: a :py:class:`BitFieldOperation` instance.
"""
if overflow is not None and overflow != self._last_overflow:
self._last_overflow = overflow
self.operations.append(('OVERFLOW', overflow))
self.operations.append(('INCRBY', fmt, offset, increment))
return self | [
"def",
"incrby",
"(",
"self",
",",
"fmt",
",",
"offset",
",",
"increment",
",",
"overflow",
"=",
"None",
")",
":",
"if",
"overflow",
"is",
"not",
"None",
"and",
"overflow",
"!=",
"self",
".",
"_last_overflow",
":",
"self",
".",
"_last_overflow",
"=",
"overflow",
"self",
".",
"operations",
".",
"append",
"(",
"(",
"'OVERFLOW'",
",",
"overflow",
")",
")",
"self",
".",
"operations",
".",
"append",
"(",
"(",
"'INCRBY'",
",",
"fmt",
",",
"offset",
",",
"increment",
")",
")",
"return",
"self"
] | 45.368421 | 0.002273 | [
"def incrby(self, fmt, offset, increment, overflow=None):\n",
" \"\"\"\n",
" Increment a bitfield by a given amount.\n",
"\n",
" :param fmt: format-string for the bitfield being updated, e.g. u8 for\n",
" an unsigned 8-bit integer.\n",
" :param int offset: offset (in number of bits).\n",
" :param int increment: value to increment the bitfield by.\n",
" :param str overflow: overflow algorithm. Defaults to WRAP, but other\n",
" acceptable values are SAT and FAIL. See the Redis docs for\n",
" descriptions of these algorithms.\n",
" :returns: a :py:class:`BitFieldOperation` instance.\n",
" \"\"\"\n",
" if overflow is not None and overflow != self._last_overflow:\n",
" self._last_overflow = overflow\n",
" self.operations.append(('OVERFLOW', overflow))\n",
"\n",
" self.operations.append(('INCRBY', fmt, offset, increment))\n",
" return self"
] | [
0,
0.08333333333333333,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 19 | 0.007156 |
def run(
project: 'projects.Project',
step: 'projects.ProjectStep'
) -> dict:
"""
Runs the markdown file and renders the contents to the notebook display
:param project:
:param step:
:return:
A run response dictionary containing
"""
with open(step.source_path, 'r') as f:
code = f.read()
try:
cauldron.display.markdown(code, **project.shared.fetch(None))
return {'success': True}
except Exception as err:
return dict(
success=False,
html_message=templating.render_template(
'markdown-error.html',
error=err
)
) | [
"def",
"run",
"(",
"project",
":",
"'projects.Project'",
",",
"step",
":",
"'projects.ProjectStep'",
")",
"->",
"dict",
":",
"with",
"open",
"(",
"step",
".",
"source_path",
",",
"'r'",
")",
"as",
"f",
":",
"code",
"=",
"f",
".",
"read",
"(",
")",
"try",
":",
"cauldron",
".",
"display",
".",
"markdown",
"(",
"code",
",",
"*",
"*",
"project",
".",
"shared",
".",
"fetch",
"(",
"None",
")",
")",
"return",
"{",
"'success'",
":",
"True",
"}",
"except",
"Exception",
"as",
"err",
":",
"return",
"dict",
"(",
"success",
"=",
"False",
",",
"html_message",
"=",
"templating",
".",
"render_template",
"(",
"'markdown-error.html'",
",",
"error",
"=",
"err",
")",
")"
] | 24.259259 | 0.001468 | [
"def run(\n",
" project: 'projects.Project',\n",
" step: 'projects.ProjectStep'\n",
") -> dict:\n",
" \"\"\"\n",
" Runs the markdown file and renders the contents to the notebook display\n",
"\n",
" :param project:\n",
" :param step:\n",
" :return:\n",
" A run response dictionary containing\n",
" \"\"\"\n",
"\n",
" with open(step.source_path, 'r') as f:\n",
" code = f.read()\n",
"\n",
" try:\n",
" cauldron.display.markdown(code, **project.shared.fetch(None))\n",
" return {'success': True}\n",
" except Exception as err:\n",
" return dict(\n",
" success=False,\n",
" html_message=templating.render_template(\n",
" 'markdown-error.html',\n",
" error=err\n",
" )\n",
" )"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.1111111111111111
] | 27 | 0.004115 |
def append_dynamics(self, t, dynamics, canvas=0, separate=False, color='blue'):
"""!
@brief Append several dynamics to canvas or canvases (defined by 'canvas' and 'separate' arguments).
@param[in] t (list): Time points that corresponds to dynamic values and considered on a X axis.
@param[in] dynamics (list): Dynamics where each of them is considered on Y axis.
@param[in] canvas (uint): Index of canvas where dynamic should be displayed, in case of 'separate'
representation this argument is considered as a first canvas from that displaying should be done.
@param[in] separate (bool|list): If 'True' then each dynamic is displayed on separate canvas, if it is defined
by list, for example, [ [1, 2], [3, 4] ], then the first and the second dynamics are displayed on
the canvas with index 'canvas' and the third and forth are displayed on the next 'canvas + 1'
canvas.
@param[in] color (string): Color that is used to display output dynamic(s).
"""
description = dynamic_descr(canvas, t, dynamics, separate, color);
self.__dynamic_storage.append(description);
self.__update_canvas_xlim(description.time, description.separate); | [
"def",
"append_dynamics",
"(",
"self",
",",
"t",
",",
"dynamics",
",",
"canvas",
"=",
"0",
",",
"separate",
"=",
"False",
",",
"color",
"=",
"'blue'",
")",
":",
"description",
"=",
"dynamic_descr",
"(",
"canvas",
",",
"t",
",",
"dynamics",
",",
"separate",
",",
"color",
")",
"self",
".",
"__dynamic_storage",
".",
"append",
"(",
"description",
")",
"self",
".",
"__update_canvas_xlim",
"(",
"description",
".",
"time",
",",
"description",
".",
"separate",
")"
] | 72.111111 | 0.010646 | [
"def append_dynamics(self, t, dynamics, canvas=0, separate=False, color='blue'):\r\n",
" \"\"\"!\r\n",
" @brief Append several dynamics to canvas or canvases (defined by 'canvas' and 'separate' arguments).\r\n",
"\r\n",
" @param[in] t (list): Time points that corresponds to dynamic values and considered on a X axis.\r\n",
" @param[in] dynamics (list): Dynamics where each of them is considered on Y axis.\r\n",
" @param[in] canvas (uint): Index of canvas where dynamic should be displayed, in case of 'separate'\r\n",
" representation this argument is considered as a first canvas from that displaying should be done.\r\n",
" @param[in] separate (bool|list): If 'True' then each dynamic is displayed on separate canvas, if it is defined\r\n",
" by list, for example, [ [1, 2], [3, 4] ], then the first and the second dynamics are displayed on\r\n",
" the canvas with index 'canvas' and the third and forth are displayed on the next 'canvas + 1'\r\n",
" canvas.\r\n",
" @param[in] color (string): Color that is used to display output dynamic(s).\r\n",
"\r\n",
" \"\"\"\r\n",
" description = dynamic_descr(canvas, t, dynamics, separate, color);\r\n",
" self.__dynamic_storage.append(description);\r\n",
" self.__update_canvas_xlim(description.time, description.separate);"
] | [
0,
0.07142857142857142,
0.00909090909090909,
0,
0.009523809523809525,
0.011111111111111112,
0.009259259259259259,
0.008403361344537815,
0.008333333333333333,
0.008403361344537815,
0.008695652173913044,
0,
0.011764705882352941,
0,
0,
0.013157894736842105,
0.018867924528301886,
0.02702702702702703
] | 18 | 0.011948 |
def resize(att_mat, max_length=None):
"""Normalize attention matrices and reshape as necessary."""
for i, att in enumerate(att_mat):
# Add extra batch dim for viz code to work.
if att.ndim == 3:
att = np.expand_dims(att, axis=0)
if max_length is not None:
# Sum across different attention values for each token.
att = att[:, :, :max_length, :max_length]
row_sums = np.sum(att, axis=2)
# Normalize
att /= row_sums[:, :, np.newaxis]
att_mat[i] = att
return att_mat | [
"def",
"resize",
"(",
"att_mat",
",",
"max_length",
"=",
"None",
")",
":",
"for",
"i",
",",
"att",
"in",
"enumerate",
"(",
"att_mat",
")",
":",
"# Add extra batch dim for viz code to work.",
"if",
"att",
".",
"ndim",
"==",
"3",
":",
"att",
"=",
"np",
".",
"expand_dims",
"(",
"att",
",",
"axis",
"=",
"0",
")",
"if",
"max_length",
"is",
"not",
"None",
":",
"# Sum across different attention values for each token.",
"att",
"=",
"att",
"[",
":",
",",
":",
",",
":",
"max_length",
",",
":",
"max_length",
"]",
"row_sums",
"=",
"np",
".",
"sum",
"(",
"att",
",",
"axis",
"=",
"2",
")",
"# Normalize",
"att",
"/=",
"row_sums",
"[",
":",
",",
":",
",",
"np",
".",
"newaxis",
"]",
"att_mat",
"[",
"i",
"]",
"=",
"att",
"return",
"att_mat"
] | 36.214286 | 0.019231 | [
"def resize(att_mat, max_length=None):\n",
" \"\"\"Normalize attention matrices and reshape as necessary.\"\"\"\n",
" for i, att in enumerate(att_mat):\n",
" # Add extra batch dim for viz code to work.\n",
" if att.ndim == 3:\n",
" att = np.expand_dims(att, axis=0)\n",
" if max_length is not None:\n",
" # Sum across different attention values for each token.\n",
" att = att[:, :, :max_length, :max_length]\n",
" row_sums = np.sum(att, axis=2)\n",
" # Normalize\n",
" att /= row_sums[:, :, np.newaxis]\n",
" att_mat[i] = att\n",
" return att_mat"
] | [
0,
0.015873015873015872,
0.027777777777777776,
0,
0,
0.025,
0,
0.016129032258064516,
0.020833333333333332,
0.02702702702702703,
0.05555555555555555,
0.025,
0,
0.125
] | 14 | 0.024157 |
def dvcircdR(self,R,phi=None):
"""
NAME:
dvcircdR
PURPOSE:
calculate the derivative of the circular velocity at R wrt R
in this potential
INPUT:
R - Galactocentric radius (can be Quantity)
phi= (None) azimuth to use for non-axisymmetric potentials
OUTPUT:
derivative of the circular rotation velocity wrt R
HISTORY:
2013-01-08 - Written - Bovy (IAS)
2016-06-28 - Added phi= keyword for non-axisymmetric potential - Bovy (UofT)
"""
return 0.5*(-self.Rforce(R,0.,phi=phi,use_physical=False)\
+R*self.R2deriv(R,0.,phi=phi,use_physical=False))\
/self.vcirc(R,phi=phi,use_physical=False) | [
"def",
"dvcircdR",
"(",
"self",
",",
"R",
",",
"phi",
"=",
"None",
")",
":",
"return",
"0.5",
"*",
"(",
"-",
"self",
".",
"Rforce",
"(",
"R",
",",
"0.",
",",
"phi",
"=",
"phi",
",",
"use_physical",
"=",
"False",
")",
"+",
"R",
"*",
"self",
".",
"R2deriv",
"(",
"R",
",",
"0.",
",",
"phi",
"=",
"phi",
",",
"use_physical",
"=",
"False",
")",
")",
"/",
"self",
".",
"vcirc",
"(",
"R",
",",
"phi",
"=",
"phi",
",",
"use_physical",
"=",
"False",
")"
] | 26.6875 | 0.031638 | [
"def dvcircdR(self,R,phi=None):\n",
" \"\"\"\n",
" \n",
" NAME:\n",
" \n",
" dvcircdR\n",
" \n",
" PURPOSE:\n",
" \n",
" calculate the derivative of the circular velocity at R wrt R\n",
" in this potential\n",
"\n",
" INPUT:\n",
" \n",
" R - Galactocentric radius (can be Quantity)\n",
" \n",
" phi= (None) azimuth to use for non-axisymmetric potentials\n",
"\n",
" OUTPUT:\n",
" \n",
" derivative of the circular rotation velocity wrt R\n",
" \n",
" HISTORY:\n",
" \n",
" 2013-01-08 - Written - Bovy (IAS)\n",
" \n",
" 2016-06-28 - Added phi= keyword for non-axisymmetric potential - Bovy (UofT)\n",
"\n",
" \"\"\"\n",
" return 0.5*(-self.Rforce(R,0.,phi=phi,use_physical=False)\\\n",
" +R*self.R2deriv(R,0.,phi=phi,use_physical=False))\\\n",
" /self.vcirc(R,phi=phi,use_physical=False)"
] | [
0.06451612903225806,
0.08333333333333333,
0.1111111111111111,
0,
0.1111111111111111,
0,
0.1111111111111111,
0,
0.1111111111111111,
0,
0,
0,
0,
0.1111111111111111,
0,
0.1111111111111111,
0,
0,
0,
0.1111111111111111,
0,
0.1111111111111111,
0,
0.1111111111111111,
0,
0.1111111111111111,
0.011235955056179775,
0,
0,
0.05970149253731343,
0.06578947368421052,
0.07575757575757576
] | 32 | 0.045983 |
def make_router():
"""Return a WSGI application that searches requests to controllers """
global router
routings = [
('GET', '^/$', index),
('GET', '^/api/?$', index),
('POST', '^/api/1/calculate/?$', calculate.api1_calculate),
('GET', '^/api/2/entities/?$', entities.api2_entities),
('GET', '^/api/1/field/?$', field.api1_field),
('GET', '^/api/1/formula/(?P<name>[^/]+)/?$', formula.api1_formula),
('GET', '^/api/2/formula/(?:(?P<period>[A-Za-z0-9:-]*)/)?(?P<names>[A-Za-z0-9_+-]+)/?$', formula.api2_formula),
('GET', '^/api/1/parameters/?$', parameters.api1_parameters),
('GET', '^/api/1/reforms/?$', reforms.api1_reforms),
('POST', '^/api/1/simulate/?$', simulate.api1_simulate),
('GET', '^/api/1/swagger$', swagger.api1_swagger),
('GET', '^/api/1/variables/?$', variables.api1_variables),
]
router = urls.make_router(*routings)
return router | [
"def",
"make_router",
"(",
")",
":",
"global",
"router",
"routings",
"=",
"[",
"(",
"'GET'",
",",
"'^/$'",
",",
"index",
")",
",",
"(",
"'GET'",
",",
"'^/api/?$'",
",",
"index",
")",
",",
"(",
"'POST'",
",",
"'^/api/1/calculate/?$'",
",",
"calculate",
".",
"api1_calculate",
")",
",",
"(",
"'GET'",
",",
"'^/api/2/entities/?$'",
",",
"entities",
".",
"api2_entities",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/field/?$'",
",",
"field",
".",
"api1_field",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/formula/(?P<name>[^/]+)/?$'",
",",
"formula",
".",
"api1_formula",
")",
",",
"(",
"'GET'",
",",
"'^/api/2/formula/(?:(?P<period>[A-Za-z0-9:-]*)/)?(?P<names>[A-Za-z0-9_+-]+)/?$'",
",",
"formula",
".",
"api2_formula",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/parameters/?$'",
",",
"parameters",
".",
"api1_parameters",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/reforms/?$'",
",",
"reforms",
".",
"api1_reforms",
")",
",",
"(",
"'POST'",
",",
"'^/api/1/simulate/?$'",
",",
"simulate",
".",
"api1_simulate",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/swagger$'",
",",
"swagger",
".",
"api1_swagger",
")",
",",
"(",
"'GET'",
",",
"'^/api/1/variables/?$'",
",",
"variables",
".",
"api1_variables",
")",
",",
"]",
"router",
"=",
"urls",
".",
"make_router",
"(",
"*",
"routings",
")",
"return",
"router"
] | 50.105263 | 0.002062 | [
"def make_router():\n",
" \"\"\"Return a WSGI application that searches requests to controllers \"\"\"\n",
" global router\n",
" routings = [\n",
" ('GET', '^/$', index),\n",
" ('GET', '^/api/?$', index),\n",
" ('POST', '^/api/1/calculate/?$', calculate.api1_calculate),\n",
" ('GET', '^/api/2/entities/?$', entities.api2_entities),\n",
" ('GET', '^/api/1/field/?$', field.api1_field),\n",
" ('GET', '^/api/1/formula/(?P<name>[^/]+)/?$', formula.api1_formula),\n",
" ('GET', '^/api/2/formula/(?:(?P<period>[A-Za-z0-9:-]*)/)?(?P<names>[A-Za-z0-9_+-]+)/?$', formula.api2_formula),\n",
" ('GET', '^/api/1/parameters/?$', parameters.api1_parameters),\n",
" ('GET', '^/api/1/reforms/?$', reforms.api1_reforms),\n",
" ('POST', '^/api/1/simulate/?$', simulate.api1_simulate),\n",
" ('GET', '^/api/1/swagger$', swagger.api1_swagger),\n",
" ('GET', '^/api/1/variables/?$', variables.api1_variables),\n",
" ]\n",
" router = urls.make_router(*routings)\n",
" return router"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.008333333333333333,
0,
0,
0,
0,
0,
0,
0,
0.058823529411764705
] | 19 | 0.003535 |
def pretty_print(d, ind='', verbosity=0):
"""Pretty print a data dictionary from the bridge client
"""
assert isinstance(d, dict)
for k, v in sorted(d.items()):
str_base = '{} - [{}] {}'.format(ind, type(v).__name__, k)
if isinstance(v, dict):
print(str_base.replace('-', '+', 1))
pretty_print(v, ind=ind+' ', verbosity=verbosity)
continue
elif isinstance(v, np.ndarray):
node = '{}, {}, {}'.format(str_base, v.dtype, v.shape)
if verbosity >= 2:
node += '\n{}'.format(v)
elif isinstance(v, Sequence):
if v and isinstance(v, (list, tuple)):
itemtype = ' of ' + type(v[0]).__name__
pos = str_base.find(']')
str_base = str_base[:pos] + itemtype + str_base[pos:]
node = '{}, {}'.format(str_base, v)
if verbosity < 1 and len(node) > 80:
node = node[:77] + '...'
else:
node = '{}, {}'.format(str_base, v)
print(node) | [
"def",
"pretty_print",
"(",
"d",
",",
"ind",
"=",
"''",
",",
"verbosity",
"=",
"0",
")",
":",
"assert",
"isinstance",
"(",
"d",
",",
"dict",
")",
"for",
"k",
",",
"v",
"in",
"sorted",
"(",
"d",
".",
"items",
"(",
")",
")",
":",
"str_base",
"=",
"'{} - [{}] {}'",
".",
"format",
"(",
"ind",
",",
"type",
"(",
"v",
")",
".",
"__name__",
",",
"k",
")",
"if",
"isinstance",
"(",
"v",
",",
"dict",
")",
":",
"print",
"(",
"str_base",
".",
"replace",
"(",
"'-'",
",",
"'+'",
",",
"1",
")",
")",
"pretty_print",
"(",
"v",
",",
"ind",
"=",
"ind",
"+",
"' '",
",",
"verbosity",
"=",
"verbosity",
")",
"continue",
"elif",
"isinstance",
"(",
"v",
",",
"np",
".",
"ndarray",
")",
":",
"node",
"=",
"'{}, {}, {}'",
".",
"format",
"(",
"str_base",
",",
"v",
".",
"dtype",
",",
"v",
".",
"shape",
")",
"if",
"verbosity",
">=",
"2",
":",
"node",
"+=",
"'\\n{}'",
".",
"format",
"(",
"v",
")",
"elif",
"isinstance",
"(",
"v",
",",
"Sequence",
")",
":",
"if",
"v",
"and",
"isinstance",
"(",
"v",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"itemtype",
"=",
"' of '",
"+",
"type",
"(",
"v",
"[",
"0",
"]",
")",
".",
"__name__",
"pos",
"=",
"str_base",
".",
"find",
"(",
"']'",
")",
"str_base",
"=",
"str_base",
"[",
":",
"pos",
"]",
"+",
"itemtype",
"+",
"str_base",
"[",
"pos",
":",
"]",
"node",
"=",
"'{}, {}'",
".",
"format",
"(",
"str_base",
",",
"v",
")",
"if",
"verbosity",
"<",
"1",
"and",
"len",
"(",
"node",
")",
">",
"80",
":",
"node",
"=",
"node",
"[",
":",
"77",
"]",
"+",
"'...'",
"else",
":",
"node",
"=",
"'{}, {}'",
".",
"format",
"(",
"str_base",
",",
"v",
")",
"print",
"(",
"node",
")"
] | 39.961538 | 0.00094 | [
"def pretty_print(d, ind='', verbosity=0):\n",
" \"\"\"Pretty print a data dictionary from the bridge client\n",
" \"\"\"\n",
" assert isinstance(d, dict)\n",
" for k, v in sorted(d.items()):\n",
" str_base = '{} - [{}] {}'.format(ind, type(v).__name__, k)\n",
"\n",
" if isinstance(v, dict):\n",
" print(str_base.replace('-', '+', 1))\n",
" pretty_print(v, ind=ind+' ', verbosity=verbosity)\n",
" continue\n",
" elif isinstance(v, np.ndarray):\n",
" node = '{}, {}, {}'.format(str_base, v.dtype, v.shape)\n",
" if verbosity >= 2:\n",
" node += '\\n{}'.format(v)\n",
" elif isinstance(v, Sequence):\n",
" if v and isinstance(v, (list, tuple)):\n",
" itemtype = ' of ' + type(v[0]).__name__\n",
" pos = str_base.find(']')\n",
" str_base = str_base[:pos] + itemtype + str_base[pos:]\n",
" node = '{}, {}'.format(str_base, v)\n",
" if verbosity < 1 and len(node) > 80:\n",
" node = node[:77] + '...'\n",
" else:\n",
" node = '{}, {}'.format(str_base, v)\n",
" print(node)"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 26 | 0.002024 |
def cmd_hasher(f, algorithm):
"""Compute various hashes for the input data, that can be a file or a stream.
Example:
\b
$ habu.hasher README.rst
md5 992a833cd162047daaa6a236b8ac15ae README.rst
ripemd160 0566f9141e65e57cae93e0e3b70d1d8c2ccb0623 README.rst
sha1 d7dbfd2c5e2828eb22f776550c826e4166526253 README.rst
sha256 6bb22d927e1b6307ced616821a1877b6cc35e... README.rst
sha512 8743f3eb12a11cf3edcc16e400fb14d599b4a... README.rst
whirlpool 96bcc083242e796992c0f3462f330811f9e8c... README.rst
You can also specify which algorithm to use. In such case, the output is
only the value of the calculated hash:
\b
$ habu.hasher -a md5 README.rst
992a833cd162047daaa6a236b8ac15ae README.rst
"""
data = f.read()
if not data:
print("Empty file or string!")
return 1
if algorithm:
print(hasher(data, algorithm)[algorithm], f.name)
else:
for algo, result in hasher(data).items():
print("{:<12} {} {}".format(algo, result, f.name)) | [
"def",
"cmd_hasher",
"(",
"f",
",",
"algorithm",
")",
":",
"data",
"=",
"f",
".",
"read",
"(",
")",
"if",
"not",
"data",
":",
"print",
"(",
"\"Empty file or string!\"",
")",
"return",
"1",
"if",
"algorithm",
":",
"print",
"(",
"hasher",
"(",
"data",
",",
"algorithm",
")",
"[",
"algorithm",
"]",
",",
"f",
".",
"name",
")",
"else",
":",
"for",
"algo",
",",
"result",
"in",
"hasher",
"(",
"data",
")",
".",
"items",
"(",
")",
":",
"print",
"(",
"\"{:<12} {} {}\"",
".",
"format",
"(",
"algo",
",",
"result",
",",
"f",
".",
"name",
")",
")"
] | 31.878788 | 0.001845 | [
"def cmd_hasher(f, algorithm):\n",
" \"\"\"Compute various hashes for the input data, that can be a file or a stream.\n",
"\n",
" Example:\n",
"\n",
" \\b\n",
" $ habu.hasher README.rst\n",
" md5 992a833cd162047daaa6a236b8ac15ae README.rst\n",
" ripemd160 0566f9141e65e57cae93e0e3b70d1d8c2ccb0623 README.rst\n",
" sha1 d7dbfd2c5e2828eb22f776550c826e4166526253 README.rst\n",
" sha256 6bb22d927e1b6307ced616821a1877b6cc35e... README.rst\n",
" sha512 8743f3eb12a11cf3edcc16e400fb14d599b4a... README.rst\n",
" whirlpool 96bcc083242e796992c0f3462f330811f9e8c... README.rst\n",
"\n",
" You can also specify which algorithm to use. In such case, the output is\n",
" only the value of the calculated hash:\n",
"\n",
" \\b\n",
" $ habu.hasher -a md5 README.rst\n",
" 992a833cd162047daaa6a236b8ac15ae README.rst\n",
" \"\"\"\n",
"\n",
" data = f.read()\n",
"\n",
" if not data:\n",
" print(\"Empty file or string!\")\n",
" return 1\n",
"\n",
" if algorithm:\n",
" print(hasher(data, algorithm)[algorithm], f.name)\n",
" else:\n",
" for algo, result in hasher(data).items():\n",
" print(\"{:<12} {} {}\".format(algo, result, f.name))"
] | [
0,
0.012195121951219513,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.016129032258064516
] | 33 | 0.000858 |
def get_identities(self, identity=None, attrs=None):
""" Get identities matching name and attrs
of the user, as a list
:param: zobjects.Identity or identity name (string)
:param: attrs dict of attributes to return only identities matching
:returns: list of zobjects.Identity
"""
resp = self.request('GetIdentities')
if 'identity' in resp:
identities = resp['identity']
if type(identities) != list:
identities = [identities]
if identity or attrs:
wanted_identities = []
for u_identity in [
zobjects.Identity.from_dict(i) for i in identities]:
if identity:
if isinstance(identity, zobjects.Identity):
if u_identity.name == identity.name:
return [u_identity]
else:
if u_identity.name == identity:
return [u_identity]
elif attrs:
for attr, value in attrs.items():
if (attr in u_identity._a_tags and
u_identity._a_tags[attr] == value):
wanted_identities.append(u_identity)
return wanted_identities
else:
return [zobjects.Identity.from_dict(i) for i in identities]
else:
return [] | [
"def",
"get_identities",
"(",
"self",
",",
"identity",
"=",
"None",
",",
"attrs",
"=",
"None",
")",
":",
"resp",
"=",
"self",
".",
"request",
"(",
"'GetIdentities'",
")",
"if",
"'identity'",
"in",
"resp",
":",
"identities",
"=",
"resp",
"[",
"'identity'",
"]",
"if",
"type",
"(",
"identities",
")",
"!=",
"list",
":",
"identities",
"=",
"[",
"identities",
"]",
"if",
"identity",
"or",
"attrs",
":",
"wanted_identities",
"=",
"[",
"]",
"for",
"u_identity",
"in",
"[",
"zobjects",
".",
"Identity",
".",
"from_dict",
"(",
"i",
")",
"for",
"i",
"in",
"identities",
"]",
":",
"if",
"identity",
":",
"if",
"isinstance",
"(",
"identity",
",",
"zobjects",
".",
"Identity",
")",
":",
"if",
"u_identity",
".",
"name",
"==",
"identity",
".",
"name",
":",
"return",
"[",
"u_identity",
"]",
"else",
":",
"if",
"u_identity",
".",
"name",
"==",
"identity",
":",
"return",
"[",
"u_identity",
"]",
"elif",
"attrs",
":",
"for",
"attr",
",",
"value",
"in",
"attrs",
".",
"items",
"(",
")",
":",
"if",
"(",
"attr",
"in",
"u_identity",
".",
"_a_tags",
"and",
"u_identity",
".",
"_a_tags",
"[",
"attr",
"]",
"==",
"value",
")",
":",
"wanted_identities",
".",
"append",
"(",
"u_identity",
")",
"return",
"wanted_identities",
"else",
":",
"return",
"[",
"zobjects",
".",
"Identity",
".",
"from_dict",
"(",
"i",
")",
"for",
"i",
"in",
"identities",
"]",
"else",
":",
"return",
"[",
"]"
] | 39.631579 | 0.001296 | [
"def get_identities(self, identity=None, attrs=None):\n",
" \"\"\" Get identities matching name and attrs\n",
" of the user, as a list\n",
"\n",
" :param: zobjects.Identity or identity name (string)\n",
" :param: attrs dict of attributes to return only identities matching\n",
" :returns: list of zobjects.Identity\n",
" \"\"\"\n",
" resp = self.request('GetIdentities')\n",
"\n",
" if 'identity' in resp:\n",
" identities = resp['identity']\n",
" if type(identities) != list:\n",
" identities = [identities]\n",
"\n",
" if identity or attrs:\n",
" wanted_identities = []\n",
"\n",
" for u_identity in [\n",
" zobjects.Identity.from_dict(i) for i in identities]:\n",
" if identity:\n",
" if isinstance(identity, zobjects.Identity):\n",
" if u_identity.name == identity.name:\n",
" return [u_identity]\n",
" else:\n",
" if u_identity.name == identity:\n",
" return [u_identity]\n",
"\n",
" elif attrs:\n",
" for attr, value in attrs.items():\n",
" if (attr in u_identity._a_tags and\n",
" u_identity._a_tags[attr] == value):\n",
" wanted_identities.append(u_identity)\n",
" return wanted_identities\n",
" else:\n",
" return [zobjects.Identity.from_dict(i) for i in identities]\n",
" else:\n",
" return []"
] | [
0,
0.0196078431372549,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.047619047619047616
] | 38 | 0.001769 |
def add_operator(self, operator):
"""Add an ``Operator`` to the ``Expression``.
The ``Operator`` may result in a new ``Expression`` if an ``Operator``
already exists and is of a different precedence.
There are three possibilities when adding an ``Operator`` to an
``Expression`` depending on whether or not an ``Operator`` already
exists:
- No ``Operator`` on the working ``Expression``; Simply set the
``Operator`` and return ``self``.
- ``Operator`` already exists and is higher in precedence; The
``Operator`` and last ``Constraint`` belong in a sub-expression of
the working ``Expression``.
- ``Operator`` already exists and is lower in precedence; The
``Operator`` belongs to the parent of the working ``Expression``
whether one currently exists or not. To remain in the context of
the top ``Expression``, this method will return the parent here
rather than ``self``.
Args:
operator (Operator): What we are adding.
Returns:
Expression: ``self`` or related ``Expression``.
Raises:
FiqlObjectExpression: Operator is not a valid ``Operator``.
"""
if not isinstance(operator, Operator):
raise FiqlObjectException("%s is not a valid element type" % (
operator.__class__))
if not self._working_fragment.operator:
self._working_fragment.operator = operator
elif operator > self._working_fragment.operator:
last_constraint = self._working_fragment.elements.pop()
self._working_fragment = self._working_fragment \
.create_nested_expression()
self._working_fragment.add_element(last_constraint)
self._working_fragment.add_operator(operator)
elif operator < self._working_fragment.operator:
if self._working_fragment.parent:
return self._working_fragment.parent.add_operator(operator)
else:
return Expression().add_element(self._working_fragment) \
.add_operator(operator)
return self | [
"def",
"add_operator",
"(",
"self",
",",
"operator",
")",
":",
"if",
"not",
"isinstance",
"(",
"operator",
",",
"Operator",
")",
":",
"raise",
"FiqlObjectException",
"(",
"\"%s is not a valid element type\"",
"%",
"(",
"operator",
".",
"__class__",
")",
")",
"if",
"not",
"self",
".",
"_working_fragment",
".",
"operator",
":",
"self",
".",
"_working_fragment",
".",
"operator",
"=",
"operator",
"elif",
"operator",
">",
"self",
".",
"_working_fragment",
".",
"operator",
":",
"last_constraint",
"=",
"self",
".",
"_working_fragment",
".",
"elements",
".",
"pop",
"(",
")",
"self",
".",
"_working_fragment",
"=",
"self",
".",
"_working_fragment",
".",
"create_nested_expression",
"(",
")",
"self",
".",
"_working_fragment",
".",
"add_element",
"(",
"last_constraint",
")",
"self",
".",
"_working_fragment",
".",
"add_operator",
"(",
"operator",
")",
"elif",
"operator",
"<",
"self",
".",
"_working_fragment",
".",
"operator",
":",
"if",
"self",
".",
"_working_fragment",
".",
"parent",
":",
"return",
"self",
".",
"_working_fragment",
".",
"parent",
".",
"add_operator",
"(",
"operator",
")",
"else",
":",
"return",
"Expression",
"(",
")",
".",
"add_element",
"(",
"self",
".",
"_working_fragment",
")",
".",
"add_operator",
"(",
"operator",
")",
"return",
"self"
] | 44.795918 | 0.001337 | [
"def add_operator(self, operator):\n",
" \"\"\"Add an ``Operator`` to the ``Expression``.\n",
"\n",
" The ``Operator`` may result in a new ``Expression`` if an ``Operator``\n",
" already exists and is of a different precedence.\n",
"\n",
" There are three possibilities when adding an ``Operator`` to an\n",
" ``Expression`` depending on whether or not an ``Operator`` already\n",
" exists:\n",
"\n",
" - No ``Operator`` on the working ``Expression``; Simply set the\n",
" ``Operator`` and return ``self``.\n",
" - ``Operator`` already exists and is higher in precedence; The\n",
" ``Operator`` and last ``Constraint`` belong in a sub-expression of\n",
" the working ``Expression``.\n",
" - ``Operator`` already exists and is lower in precedence; The\n",
" ``Operator`` belongs to the parent of the working ``Expression``\n",
" whether one currently exists or not. To remain in the context of\n",
" the top ``Expression``, this method will return the parent here\n",
" rather than ``self``.\n",
"\n",
" Args:\n",
" operator (Operator): What we are adding.\n",
"\n",
" Returns:\n",
" Expression: ``self`` or related ``Expression``.\n",
"\n",
" Raises:\n",
" FiqlObjectExpression: Operator is not a valid ``Operator``.\n",
" \"\"\"\n",
" if not isinstance(operator, Operator):\n",
" raise FiqlObjectException(\"%s is not a valid element type\" % (\n",
" operator.__class__))\n",
"\n",
" if not self._working_fragment.operator:\n",
" self._working_fragment.operator = operator\n",
" elif operator > self._working_fragment.operator:\n",
" last_constraint = self._working_fragment.elements.pop()\n",
" self._working_fragment = self._working_fragment \\\n",
" .create_nested_expression()\n",
" self._working_fragment.add_element(last_constraint)\n",
" self._working_fragment.add_operator(operator)\n",
" elif operator < self._working_fragment.operator:\n",
" if self._working_fragment.parent:\n",
" return self._working_fragment.parent.add_operator(operator)\n",
" else:\n",
" return Expression().add_element(self._working_fragment) \\\n",
" .add_operator(operator)\n",
" return self"
] | [
0,
0.018518518518518517,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.020833333333333332,
0,
0,
0,
0,
0,
0,
0,
0,
0.05263157894736842
] | 49 | 0.001877 |
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.abspath(__file__))
git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
return timestamp.strftime('%Y%m%d%H%M%S')
except ValueError:
return None | [
"def",
"get_git_changeset",
"(",
")",
":",
"repo_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"__file__",
")",
")",
"git_log",
"=",
"subprocess",
".",
"Popen",
"(",
"'git log --pretty=format:%ct --quiet -1 HEAD'",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"shell",
"=",
"True",
",",
"cwd",
"=",
"repo_dir",
",",
"universal_newlines",
"=",
"True",
")",
"timestamp",
"=",
"git_log",
".",
"communicate",
"(",
")",
"[",
"0",
"]",
"try",
":",
"timestamp",
"=",
"datetime",
".",
"datetime",
".",
"utcfromtimestamp",
"(",
"int",
"(",
"timestamp",
")",
")",
"return",
"timestamp",
".",
"strftime",
"(",
"'%Y%m%d%H%M%S'",
")",
"except",
"ValueError",
":",
"return",
"None"
] | 48.411765 | 0.002384 | [
"def get_git_changeset():\n",
" \"\"\"Returns a numeric identifier of the latest git changeset.\n",
"\n",
" The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.\n",
" This value isn't guaranteed to be unique, but collisions are very unlikely,\n",
" so it's sufficient for generating the development version numbers.\n",
" \"\"\"\n",
" repo_dir = os.path.dirname(os.path.abspath(__file__))\n",
" git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',\n",
" stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n",
" shell=True, cwd=repo_dir, universal_newlines=True)\n",
" timestamp = git_log.communicate()[0]\n",
" try:\n",
" timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))\n",
" return timestamp.strftime('%Y%m%d%H%M%S')\n",
" except ValueError:\n",
" return None"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.012195121951219513,
0,
0,
0,
0,
0,
0.05263157894736842
] | 17 | 0.003813 |
def post_event(api_key=None,
app_key=None,
title=None,
text=None,
date_happened=None,
priority=None,
host=None,
tags=None,
alert_type=None,
aggregation_key=None,
source_type_name=None):
'''
Post an event to the Datadog stream.
CLI Example
.. code-block:: bash
salt-call datadog.post_event api_key='0123456789' \\
app_key='9876543210' \\
title='Salt Highstate' \\
text="Salt highstate was run on $(salt-call grains.get id)" \\
tags='["service:salt", "event:highstate"]'
Required arguments
:param title: The event title. Limited to 100 characters.
:param text: The body of the event. Limited to 4000 characters. The text
supports markdown.
Optional arguments
:param date_happened: POSIX timestamp of the event.
:param priority: The priority of the event ('normal' or 'low').
:param host: Host name to associate with the event.
:param tags: A list of tags to apply to the event.
:param alert_type: "error", "warning", "info" or "success".
:param aggregation_key: An arbitrary string to use for aggregation,
max length of 100 characters.
:param source_type_name: The type of event being posted.
'''
_initialize_connection(api_key, app_key)
if title is None:
raise SaltInvocationError('title must be specified')
if text is None:
raise SaltInvocationError('text must be specified')
if alert_type not in [None, 'error', 'warning', 'info', 'success']:
# Datadog only supports these alert types but the API doesn't return an
# error for an incorrect alert_type, so we can do it here for now.
# https://github.com/DataDog/datadogpy/issues/215
message = ('alert_type must be one of "error", "warning", "info", or '
'"success"')
raise SaltInvocationError(message)
ret = {'result': False,
'response': None,
'comment': ''}
try:
response = datadog.api.Event.create(title=title,
text=text,
date_happened=date_happened,
priority=priority,
host=host,
tags=tags,
alert_type=alert_type,
aggregation_key=aggregation_key,
source_type_name=source_type_name
)
except ValueError:
comment = ('Unexpected exception in Datadog Post Event API '
'call. Are your keys correct?')
ret['comment'] = comment
return ret
ret['response'] = response
if 'status' in response.keys():
ret['result'] = True
ret['comment'] = 'Successfully sent event'
else:
ret['comment'] = 'Error in posting event.'
return ret | [
"def",
"post_event",
"(",
"api_key",
"=",
"None",
",",
"app_key",
"=",
"None",
",",
"title",
"=",
"None",
",",
"text",
"=",
"None",
",",
"date_happened",
"=",
"None",
",",
"priority",
"=",
"None",
",",
"host",
"=",
"None",
",",
"tags",
"=",
"None",
",",
"alert_type",
"=",
"None",
",",
"aggregation_key",
"=",
"None",
",",
"source_type_name",
"=",
"None",
")",
":",
"_initialize_connection",
"(",
"api_key",
",",
"app_key",
")",
"if",
"title",
"is",
"None",
":",
"raise",
"SaltInvocationError",
"(",
"'title must be specified'",
")",
"if",
"text",
"is",
"None",
":",
"raise",
"SaltInvocationError",
"(",
"'text must be specified'",
")",
"if",
"alert_type",
"not",
"in",
"[",
"None",
",",
"'error'",
",",
"'warning'",
",",
"'info'",
",",
"'success'",
"]",
":",
"# Datadog only supports these alert types but the API doesn't return an",
"# error for an incorrect alert_type, so we can do it here for now.",
"# https://github.com/DataDog/datadogpy/issues/215",
"message",
"=",
"(",
"'alert_type must be one of \"error\", \"warning\", \"info\", or '",
"'\"success\"'",
")",
"raise",
"SaltInvocationError",
"(",
"message",
")",
"ret",
"=",
"{",
"'result'",
":",
"False",
",",
"'response'",
":",
"None",
",",
"'comment'",
":",
"''",
"}",
"try",
":",
"response",
"=",
"datadog",
".",
"api",
".",
"Event",
".",
"create",
"(",
"title",
"=",
"title",
",",
"text",
"=",
"text",
",",
"date_happened",
"=",
"date_happened",
",",
"priority",
"=",
"priority",
",",
"host",
"=",
"host",
",",
"tags",
"=",
"tags",
",",
"alert_type",
"=",
"alert_type",
",",
"aggregation_key",
"=",
"aggregation_key",
",",
"source_type_name",
"=",
"source_type_name",
")",
"except",
"ValueError",
":",
"comment",
"=",
"(",
"'Unexpected exception in Datadog Post Event API '",
"'call. Are your keys correct?'",
")",
"ret",
"[",
"'comment'",
"]",
"=",
"comment",
"return",
"ret",
"ret",
"[",
"'response'",
"]",
"=",
"response",
"if",
"'status'",
"in",
"response",
".",
"keys",
"(",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'comment'",
"]",
"=",
"'Successfully sent event'",
"else",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Error in posting event.'",
"return",
"ret"
] | 40.195122 | 0.000888 | [
"def post_event(api_key=None,\n",
" app_key=None,\n",
" title=None,\n",
" text=None,\n",
" date_happened=None,\n",
" priority=None,\n",
" host=None,\n",
" tags=None,\n",
" alert_type=None,\n",
" aggregation_key=None,\n",
" source_type_name=None):\n",
" '''\n",
" Post an event to the Datadog stream.\n",
"\n",
" CLI Example\n",
"\n",
" .. code-block:: bash\n",
"\n",
" salt-call datadog.post_event api_key='0123456789' \\\\\n",
" app_key='9876543210' \\\\\n",
" title='Salt Highstate' \\\\\n",
" text=\"Salt highstate was run on $(salt-call grains.get id)\" \\\\\n",
" tags='[\"service:salt\", \"event:highstate\"]'\n",
"\n",
" Required arguments\n",
"\n",
" :param title: The event title. Limited to 100 characters.\n",
" :param text: The body of the event. Limited to 4000 characters. The text\n",
" supports markdown.\n",
"\n",
" Optional arguments\n",
"\n",
" :param date_happened: POSIX timestamp of the event.\n",
" :param priority: The priority of the event ('normal' or 'low').\n",
" :param host: Host name to associate with the event.\n",
" :param tags: A list of tags to apply to the event.\n",
" :param alert_type: \"error\", \"warning\", \"info\" or \"success\".\n",
" :param aggregation_key: An arbitrary string to use for aggregation,\n",
" max length of 100 characters.\n",
" :param source_type_name: The type of event being posted.\n",
" '''\n",
" _initialize_connection(api_key, app_key)\n",
" if title is None:\n",
" raise SaltInvocationError('title must be specified')\n",
" if text is None:\n",
" raise SaltInvocationError('text must be specified')\n",
" if alert_type not in [None, 'error', 'warning', 'info', 'success']:\n",
" # Datadog only supports these alert types but the API doesn't return an\n",
" # error for an incorrect alert_type, so we can do it here for now.\n",
" # https://github.com/DataDog/datadogpy/issues/215\n",
" message = ('alert_type must be one of \"error\", \"warning\", \"info\", or '\n",
" '\"success\"')\n",
" raise SaltInvocationError(message)\n",
"\n",
" ret = {'result': False,\n",
" 'response': None,\n",
" 'comment': ''}\n",
"\n",
" try:\n",
" response = datadog.api.Event.create(title=title,\n",
" text=text,\n",
" date_happened=date_happened,\n",
" priority=priority,\n",
" host=host,\n",
" tags=tags,\n",
" alert_type=alert_type,\n",
" aggregation_key=aggregation_key,\n",
" source_type_name=source_type_name\n",
" )\n",
" except ValueError:\n",
" comment = ('Unexpected exception in Datadog Post Event API '\n",
" 'call. Are your keys correct?')\n",
" ret['comment'] = comment\n",
" return ret\n",
"\n",
" ret['response'] = response\n",
" if 'status' in response.keys():\n",
" ret['result'] = True\n",
" ret['comment'] = 'Successfully sent event'\n",
" else:\n",
" ret['comment'] = 'Error in posting event.'\n",
" return ret"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.01,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.022222222222222223,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07142857142857142
] | 82 | 0.001264 |
def __init(self):
""" initializes the service """
params = {
"f" : "json",
}
json_dict = self._get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json = json.dumps(json_dict)
self._json_dict = json_dict
self.loadAttributes(json_dict=json_dict) | [
"def",
"__init",
"(",
"self",
")",
":",
"params",
"=",
"{",
"\"f\"",
":",
"\"json\"",
",",
"}",
"json_dict",
"=",
"self",
".",
"_get",
"(",
"self",
".",
"_url",
",",
"params",
",",
"securityHandler",
"=",
"self",
".",
"_securityHandler",
",",
"proxy_port",
"=",
"self",
".",
"_proxy_port",
",",
"proxy_url",
"=",
"self",
".",
"_proxy_url",
")",
"self",
".",
"_json",
"=",
"json",
".",
"dumps",
"(",
"json_dict",
")",
"self",
".",
"_json_dict",
"=",
"json_dict",
"self",
".",
"loadAttributes",
"(",
"json_dict",
"=",
"json_dict",
")"
] | 39.333333 | 0.012422 | [
"def __init(self):\n",
" \"\"\" initializes the service \"\"\"\n",
" params = {\n",
" \"f\" : \"json\",\n",
" }\n",
" json_dict = self._get(self._url, params,\n",
" securityHandler=self._securityHandler,\n",
" proxy_port=self._proxy_port,\n",
" proxy_url=self._proxy_url)\n",
" self._json = json.dumps(json_dict)\n",
" self._json_dict = json_dict\n",
" self.loadAttributes(json_dict=json_dict)"
] | [
0,
0.025,
0,
0.038461538461538464,
0,
0,
0.013888888888888888,
0.016129032258064516,
0.016666666666666666,
0,
0,
0.020833333333333332
] | 12 | 0.010915 |
def info(self, message, *args, **kwargs):
"""More important level : default for print and save
"""
self._log(logging.INFO, message, *args, **kwargs) | [
"def",
"info",
"(",
"self",
",",
"message",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_log",
"(",
"logging",
".",
"INFO",
",",
"message",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 42.25 | 0.011628 | [
"def info(self, message, *args, **kwargs):\n",
" \"\"\"More important level : default for print and save\n",
" \"\"\"\n",
" self._log(logging.INFO, message, *args, **kwargs)"
] | [
0,
0.01639344262295082,
0,
0.017543859649122806
] | 4 | 0.008484 |
def haversine_distance(origin, destination):
"""
Calculate the Haversine distance.
Parameters
----------
origin : tuple of float
(lat, long)
destination : tuple of float
(lat, long)
Returns
-------
distance_in_km : float
Examples
--------
>>> munich = (48.1372, 11.5756)
>>> berlin = (52.5186, 13.4083)
>>> round(haversine_distance(munich, berlin), 1)
504.2
>>> new_york_city = (40.712777777778, -74.005833333333) # NYC
>>> round(haversine_distance(berlin, new_york_city), 1)
6385.3
"""
lat1, lon1 = origin
lat2, lon2 = destination
if not (-90.0 <= lat1 <= 90):
raise ValueError('lat1={:2.2f}, but must be in [-90,+90]'.format(lat1))
if not (-90.0 <= lat2 <= 90):
raise ValueError('lat2={:2.2f}, but must be in [-90,+90]'.format(lat2))
if not (-180.0 <= lon1 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
if not (-180.0 <= lon2 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
radius = 6371 # km
dlat = math_stl.radians(lat2 - lat1)
dlon = math_stl.radians(lon2 - lon1)
a = (math_stl.sin(dlat / 2) * math_stl.sin(dlat / 2) +
math_stl.cos(math_stl.radians(lat1)) *
math_stl.cos(math_stl.radians(lat2)) *
math_stl.sin(dlon / 2) * math_stl.sin(dlon / 2))
c = 2 * math_stl.atan2(math_stl.sqrt(a), math_stl.sqrt(1 - a))
d = radius * c
return d | [
"def",
"haversine_distance",
"(",
"origin",
",",
"destination",
")",
":",
"lat1",
",",
"lon1",
"=",
"origin",
"lat2",
",",
"lon2",
"=",
"destination",
"if",
"not",
"(",
"-",
"90.0",
"<=",
"lat1",
"<=",
"90",
")",
":",
"raise",
"ValueError",
"(",
"'lat1={:2.2f}, but must be in [-90,+90]'",
".",
"format",
"(",
"lat1",
")",
")",
"if",
"not",
"(",
"-",
"90.0",
"<=",
"lat2",
"<=",
"90",
")",
":",
"raise",
"ValueError",
"(",
"'lat2={:2.2f}, but must be in [-90,+90]'",
".",
"format",
"(",
"lat2",
")",
")",
"if",
"not",
"(",
"-",
"180.0",
"<=",
"lon1",
"<=",
"180",
")",
":",
"raise",
"ValueError",
"(",
"'lon1={:2.2f}, but must be in [-180,+180]'",
".",
"format",
"(",
"lat1",
")",
")",
"if",
"not",
"(",
"-",
"180.0",
"<=",
"lon2",
"<=",
"180",
")",
":",
"raise",
"ValueError",
"(",
"'lon1={:2.2f}, but must be in [-180,+180]'",
".",
"format",
"(",
"lat1",
")",
")",
"radius",
"=",
"6371",
"# km",
"dlat",
"=",
"math_stl",
".",
"radians",
"(",
"lat2",
"-",
"lat1",
")",
"dlon",
"=",
"math_stl",
".",
"radians",
"(",
"lon2",
"-",
"lon1",
")",
"a",
"=",
"(",
"math_stl",
".",
"sin",
"(",
"dlat",
"/",
"2",
")",
"*",
"math_stl",
".",
"sin",
"(",
"dlat",
"/",
"2",
")",
"+",
"math_stl",
".",
"cos",
"(",
"math_stl",
".",
"radians",
"(",
"lat1",
")",
")",
"*",
"math_stl",
".",
"cos",
"(",
"math_stl",
".",
"radians",
"(",
"lat2",
")",
")",
"*",
"math_stl",
".",
"sin",
"(",
"dlon",
"/",
"2",
")",
"*",
"math_stl",
".",
"sin",
"(",
"dlon",
"/",
"2",
")",
")",
"c",
"=",
"2",
"*",
"math_stl",
".",
"atan2",
"(",
"math_stl",
".",
"sqrt",
"(",
"a",
")",
",",
"math_stl",
".",
"sqrt",
"(",
"1",
"-",
"a",
")",
")",
"d",
"=",
"radius",
"*",
"c",
"return",
"d"
] | 30.46 | 0.000636 | [
"def haversine_distance(origin, destination):\n",
" \"\"\"\n",
" Calculate the Haversine distance.\n",
"\n",
" Parameters\n",
" ----------\n",
" origin : tuple of float\n",
" (lat, long)\n",
" destination : tuple of float\n",
" (lat, long)\n",
"\n",
" Returns\n",
" -------\n",
" distance_in_km : float\n",
"\n",
" Examples\n",
" --------\n",
" >>> munich = (48.1372, 11.5756)\n",
" >>> berlin = (52.5186, 13.4083)\n",
" >>> round(haversine_distance(munich, berlin), 1)\n",
" 504.2\n",
"\n",
" >>> new_york_city = (40.712777777778, -74.005833333333) # NYC\n",
" >>> round(haversine_distance(berlin, new_york_city), 1)\n",
" 6385.3\n",
" \"\"\"\n",
" lat1, lon1 = origin\n",
" lat2, lon2 = destination\n",
" if not (-90.0 <= lat1 <= 90):\n",
" raise ValueError('lat1={:2.2f}, but must be in [-90,+90]'.format(lat1))\n",
" if not (-90.0 <= lat2 <= 90):\n",
" raise ValueError('lat2={:2.2f}, but must be in [-90,+90]'.format(lat2))\n",
" if not (-180.0 <= lon1 <= 180):\n",
" raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'\n",
" .format(lat1))\n",
" if not (-180.0 <= lon2 <= 180):\n",
" raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'\n",
" .format(lat1))\n",
" radius = 6371 # km\n",
"\n",
" dlat = math_stl.radians(lat2 - lat1)\n",
" dlon = math_stl.radians(lon2 - lon1)\n",
" a = (math_stl.sin(dlat / 2) * math_stl.sin(dlat / 2) +\n",
" math_stl.cos(math_stl.radians(lat1)) *\n",
" math_stl.cos(math_stl.radians(lat2)) *\n",
" math_stl.sin(dlon / 2) * math_stl.sin(dlon / 2))\n",
" c = 2 * math_stl.atan2(math_stl.sqrt(a), math_stl.sqrt(1 - a))\n",
" d = radius * c\n",
"\n",
" return d"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08333333333333333
] | 50 | 0.001667 |