text
stringlengths
75
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
0.18
texts
sequence
scores
sequence
num_lines
int64
3
2.77k
avg_score
float64
0
0.37
def distance_matrix(lons, lats, diameter=2*EARTH_RADIUS): """ :param lons: array of m longitudes :param lats: array of m latitudes :returns: matrix of (m, m) distances """ m = len(lons) assert m == len(lats), (m, len(lats)) lons = numpy.radians(lons) lats = numpy.radians(lats) cos_lats = numpy.cos(lats) result = numpy.zeros((m, m)) for i in range(len(lons)): a = numpy.sin((lats[i] - lats) / 2.0) b = numpy.sin((lons[i] - lons) / 2.0) result[i, :] = numpy.arcsin( numpy.sqrt(a * a + cos_lats[i] * cos_lats * b * b)) * diameter return numpy.matrix(result, copy=False)
[ "def", "distance_matrix", "(", "lons", ",", "lats", ",", "diameter", "=", "2", "*", "EARTH_RADIUS", ")", ":", "m", "=", "len", "(", "lons", ")", "assert", "m", "==", "len", "(", "lats", ")", ",", "(", "m", ",", "len", "(", "lats", ")", ")", "lons", "=", "numpy", ".", "radians", "(", "lons", ")", "lats", "=", "numpy", ".", "radians", "(", "lats", ")", "cos_lats", "=", "numpy", ".", "cos", "(", "lats", ")", "result", "=", "numpy", ".", "zeros", "(", "(", "m", ",", "m", ")", ")", "for", "i", "in", "range", "(", "len", "(", "lons", ")", ")", ":", "a", "=", "numpy", ".", "sin", "(", "(", "lats", "[", "i", "]", "-", "lats", ")", "/", "2.0", ")", "b", "=", "numpy", ".", "sin", "(", "(", "lons", "[", "i", "]", "-", "lons", ")", "/", "2.0", ")", "result", "[", "i", ",", ":", "]", "=", "numpy", ".", "arcsin", "(", "numpy", ".", "sqrt", "(", "a", "*", "a", "+", "cos_lats", "[", "i", "]", "*", "cos_lats", "*", "b", "*", "b", ")", ")", "*", "diameter", "return", "numpy", ".", "matrix", "(", "result", ",", "copy", "=", "False", ")" ]
35.5
0.001524
[ "def distance_matrix(lons, lats, diameter=2*EARTH_RADIUS):\n", " \"\"\"\n", " :param lons: array of m longitudes\n", " :param lats: array of m latitudes\n", " :returns: matrix of (m, m) distances\n", " \"\"\"\n", " m = len(lons)\n", " assert m == len(lats), (m, len(lats))\n", " lons = numpy.radians(lons)\n", " lats = numpy.radians(lats)\n", " cos_lats = numpy.cos(lats)\n", " result = numpy.zeros((m, m))\n", " for i in range(len(lons)):\n", " a = numpy.sin((lats[i] - lats) / 2.0)\n", " b = numpy.sin((lons[i] - lons) / 2.0)\n", " result[i, :] = numpy.arcsin(\n", " numpy.sqrt(a * a + cos_lats[i] * cos_lats * b * b)) * diameter\n", " return numpy.matrix(result, copy=False)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.023255813953488372 ]
18
0.001292
def has_only_keys(self, keys): """ Ensures :attr:`subject` is a :class:`collections.Mapping` and contains *keys*, and no other keys. """ self.is_a(Mapping) self.contains_only(keys) return ChainInspector(self._subject)
[ "def", "has_only_keys", "(", "self", ",", "keys", ")", ":", "self", ".", "is_a", "(", "Mapping", ")", "self", ".", "contains_only", "(", "keys", ")", "return", "ChainInspector", "(", "self", ".", "_subject", ")" ]
37
0.011321
[ "def has_only_keys(self, keys):\n", " \"\"\"\n", " Ensures :attr:`subject` is a :class:`collections.Mapping` and contains *keys*, and no other keys.\n", " \"\"\"\n", " self.is_a(Mapping)\n", " self.contains_only(keys)\n", " return ChainInspector(self._subject)" ]
[ 0, 0.08333333333333333, 0.009433962264150943, 0, 0, 0, 0.022727272727272728 ]
7
0.016499
def _op_generic_Ctz(self, args): """Count the trailing zeroes""" wtf_expr = claripy.BVV(self._from_size, self._from_size) for a in reversed(range(self._from_size)): bit = claripy.Extract(a, a, args[0]) wtf_expr = claripy.If(bit == 1, claripy.BVV(a, self._from_size), wtf_expr) return wtf_expr
[ "def", "_op_generic_Ctz", "(", "self", ",", "args", ")", ":", "wtf_expr", "=", "claripy", ".", "BVV", "(", "self", ".", "_from_size", ",", "self", ".", "_from_size", ")", "for", "a", "in", "reversed", "(", "range", "(", "self", ".", "_from_size", ")", ")", ":", "bit", "=", "claripy", ".", "Extract", "(", "a", ",", "a", ",", "args", "[", "0", "]", ")", "wtf_expr", "=", "claripy", ".", "If", "(", "bit", "==", "1", ",", "claripy", ".", "BVV", "(", "a", ",", "self", ".", "_from_size", ")", ",", "wtf_expr", ")", "return", "wtf_expr" ]
48.857143
0.008621
[ "def _op_generic_Ctz(self, args):\n", " \"\"\"Count the trailing zeroes\"\"\"\n", " wtf_expr = claripy.BVV(self._from_size, self._from_size)\n", " for a in reversed(range(self._from_size)):\n", " bit = claripy.Extract(a, a, args[0])\n", " wtf_expr = claripy.If(bit == 1, claripy.BVV(a, self._from_size), wtf_expr)\n", " return wtf_expr" ]
[ 0, 0.025, 0, 0, 0, 0.011494252873563218, 0.043478260869565216 ]
7
0.011425
def isAboveHorizon(ra, decl, mcRA, lat): """ Returns if an object's 'ra' and 'decl' is above the horizon at a specific latitude, given the MC's right ascension. """ # This function checks if the equatorial distance from # the object to the MC is within its diurnal semi-arc. dArc, _ = dnarcs(decl, lat) dist = abs(angle.closestdistance(mcRA, ra)) return dist <= dArc/2.0 + 0.0003
[ "def", "isAboveHorizon", "(", "ra", ",", "decl", ",", "mcRA", ",", "lat", ")", ":", "# This function checks if the equatorial distance from ", "# the object to the MC is within its diurnal semi-arc.", "dArc", ",", "_", "=", "dnarcs", "(", "decl", ",", "lat", ")", "dist", "=", "abs", "(", "angle", ".", "closestdistance", "(", "mcRA", ",", "ra", ")", ")", "return", "dist", "<=", "dArc", "/", "2.0", "+", "0.0003" ]
34.75
0.014019
[ "def isAboveHorizon(ra, decl, mcRA, lat):\n", " \"\"\" Returns if an object's 'ra' and 'decl' \n", " is above the horizon at a specific latitude, \n", " given the MC's right ascension.\n", " \n", " \"\"\"\n", " # This function checks if the equatorial distance from \n", " # the object to the MC is within its diurnal semi-arc.\n", " \n", " dArc, _ = dnarcs(decl, lat)\n", " dist = abs(angle.closestdistance(mcRA, ra))\n", " return dist <= dArc/2.0 + 0.0003" ]
[ 0, 0.020833333333333332, 0.02, 0, 0.2, 0, 0.016666666666666666, 0, 0.2, 0, 0, 0.027777777777777776 ]
12
0.04044
def equal(actual, expected): ''' Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') ''' is_passing = (actual == expected) types_to_diff = (str, dict, list, tuple) if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff): readable_diff = difflib.unified_diff(pformat(expected).split('\n'), pformat(actual).split('\n'), n=99) description = '\n'.join(['equal:'] + list(readable_diff)) else: description = "equal: expect {} == {}".format(actual, expected) outcome = (is_passing, description) return outcome
[ "def", "equal", "(", "actual", ",", "expected", ")", ":", "is_passing", "=", "(", "actual", "==", "expected", ")", "types_to_diff", "=", "(", "str", ",", "dict", ",", "list", ",", "tuple", ")", "if", "not", "is_passing", "and", "isinstance", "(", "expected", ",", "types_to_diff", ")", "and", "isinstance", "(", "actual", ",", "types_to_diff", ")", ":", "readable_diff", "=", "difflib", ".", "unified_diff", "(", "pformat", "(", "expected", ")", ".", "split", "(", "'\\n'", ")", ",", "pformat", "(", "actual", ")", ".", "split", "(", "'\\n'", ")", ",", "n", "=", "99", ")", "description", "=", "'\\n'", ".", "join", "(", "[", "'equal:'", "]", "+", "list", "(", "readable_diff", ")", ")", "else", ":", "description", "=", "\"equal: expect {} == {}\"", ".", "format", "(", "actual", ",", "expected", ")", "outcome", "=", "(", "is_passing", ",", "description", ")", "return", "outcome" ]
35.454545
0.002497
[ "def equal(actual, expected):\n", " '''\n", " Compare actual and expected using ==\n", "\n", " >>> expect = Expector([])\n", " >>> expect(1).to_not(equal, 2)\n", " (True, 'equal: expect 1 == 2')\n", "\n", " >>> expect(1).to(equal, 1)\n", " (True, 'equal: expect 1 == 1')\n", " '''\n", " is_passing = (actual == expected)\n", "\n", " types_to_diff = (str, dict, list, tuple)\n", " if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff):\n", " readable_diff = difflib.unified_diff(pformat(expected).split('\\n'),\n", " pformat(actual).split('\\n'), n=99)\n", " description = '\\n'.join(['equal:'] + list(readable_diff))\n", " else:\n", " description = \"equal: expect {} == {}\".format(actual, expected)\n", " outcome = (is_passing, description)\n", " return outcome" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0, 0, 0, 0, 0, 0, 0.05555555555555555 ]
22
0.002975
def unpack_rsp(cls, rsp_pb): """Convert from PLS response to user response""" if rsp_pb.retType != RET_OK: return RET_ERROR, rsp_pb.retMsg, None raw_deal_list = rsp_pb.s2c.orderFillList deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list] return RET_OK, "", deal_list
[ "def", "unpack_rsp", "(", "cls", ",", "rsp_pb", ")", ":", "if", "rsp_pb", ".", "retType", "!=", "RET_OK", ":", "return", "RET_ERROR", ",", "rsp_pb", ".", "retMsg", ",", "None", "raw_deal_list", "=", "rsp_pb", ".", "s2c", ".", "orderFillList", "deal_list", "=", "[", "DealListQuery", ".", "parse_deal", "(", "rsp_pb", ",", "deal", ")", "for", "deal", "in", "raw_deal_list", "]", "return", "RET_OK", ",", "\"\"", ",", "deal_list" ]
37.666667
0.008646
[ "def unpack_rsp(cls, rsp_pb):\n", " \"\"\"Convert from PLS response to user response\"\"\"\n", " if rsp_pb.retType != RET_OK:\n", " return RET_ERROR, rsp_pb.retMsg, None\n", "\n", " raw_deal_list = rsp_pb.s2c.orderFillList\n", " deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list]\n", "\n", " return RET_OK, \"\", deal_list" ]
[ 0, 0.017543859649122806, 0, 0, 0, 0, 0.011494252873563218, 0, 0.027777777777777776 ]
9
0.006313
def solve_let(expr, vars): """Solves a let-form by calling RHS with nested scope.""" lhs_value = solve(expr.lhs, vars).value if not isinstance(lhs_value, structured.IStructured): raise errors.EfilterTypeError( root=expr.lhs, query=expr.original, message="The LHS of 'let' must evaluate to an IStructured. Got %r." % (lhs_value,)) return solve(expr.rhs, __nest_scope(expr.lhs, vars, lhs_value))
[ "def", "solve_let", "(", "expr", ",", "vars", ")", ":", "lhs_value", "=", "solve", "(", "expr", ".", "lhs", ",", "vars", ")", ".", "value", "if", "not", "isinstance", "(", "lhs_value", ",", "structured", ".", "IStructured", ")", ":", "raise", "errors", ".", "EfilterTypeError", "(", "root", "=", "expr", ".", "lhs", ",", "query", "=", "expr", ".", "original", ",", "message", "=", "\"The LHS of 'let' must evaluate to an IStructured. Got %r.\"", "%", "(", "lhs_value", ",", ")", ")", "return", "solve", "(", "expr", ".", "rhs", ",", "__nest_scope", "(", "expr", ".", "lhs", ",", "vars", ",", "lhs_value", ")", ")" ]
44.5
0.002203
[ "def solve_let(expr, vars):\n", " \"\"\"Solves a let-form by calling RHS with nested scope.\"\"\"\n", " lhs_value = solve(expr.lhs, vars).value\n", " if not isinstance(lhs_value, structured.IStructured):\n", " raise errors.EfilterTypeError(\n", " root=expr.lhs, query=expr.original,\n", " message=\"The LHS of 'let' must evaluate to an IStructured. Got %r.\"\n", " % (lhs_value,))\n", "\n", " return solve(expr.rhs, __nest_scope(expr.lhs, vars, lhs_value))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014925373134328358 ]
10
0.001493
async def _connect_sentinel(self, address, timeout, pools): """Try to connect to specified Sentinel returning either connections pool or exception. """ try: with async_timeout(timeout, loop=self._loop): pool = await create_pool( address, minsize=1, maxsize=2, parser=self._parser_class, loop=self._loop) pools.append(pool) return pool except asyncio.TimeoutError as err: sentinel_logger.debug( "Failed to connect to Sentinel(%r) within %ss timeout", address, timeout) return err except Exception as err: sentinel_logger.debug( "Error connecting to Sentinel(%r): %r", address, err) return err
[ "async", "def", "_connect_sentinel", "(", "self", ",", "address", ",", "timeout", ",", "pools", ")", ":", "try", ":", "with", "async_timeout", "(", "timeout", ",", "loop", "=", "self", ".", "_loop", ")", ":", "pool", "=", "await", "create_pool", "(", "address", ",", "minsize", "=", "1", ",", "maxsize", "=", "2", ",", "parser", "=", "self", ".", "_parser_class", ",", "loop", "=", "self", ".", "_loop", ")", "pools", ".", "append", "(", "pool", ")", "return", "pool", "except", "asyncio", ".", "TimeoutError", "as", "err", ":", "sentinel_logger", ".", "debug", "(", "\"Failed to connect to Sentinel(%r) within %ss timeout\"", ",", "address", ",", "timeout", ")", "return", "err", "except", "Exception", "as", "err", ":", "sentinel_logger", ".", "debug", "(", "\"Error connecting to Sentinel(%r): %r\"", ",", "address", ",", "err", ")", "return", "err" ]
39.380952
0.002361
[ "async def _connect_sentinel(self, address, timeout, pools):\n", " \"\"\"Try to connect to specified Sentinel returning either\n", " connections pool or exception.\n", " \"\"\"\n", " try:\n", " with async_timeout(timeout, loop=self._loop):\n", " pool = await create_pool(\n", " address, minsize=1, maxsize=2,\n", " parser=self._parser_class,\n", " loop=self._loop)\n", " pools.append(pool)\n", " return pool\n", " except asyncio.TimeoutError as err:\n", " sentinel_logger.debug(\n", " \"Failed to connect to Sentinel(%r) within %ss timeout\",\n", " address, timeout)\n", " return err\n", " except Exception as err:\n", " sentinel_logger.debug(\n", " \"Error connecting to Sentinel(%r): %r\", address, err)\n", " return err" ]
[ 0, 0.015384615384615385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456 ]
21
0.002897
def read(cls, proto): """ capnp deserialization method for the anomaly likelihood object :param proto: (Object) capnp proto object specified in nupic.regions.anomaly_likelihood.capnp :returns: (Object) the deserialized AnomalyLikelihood object """ # pylint: disable=W0212 anomalyLikelihood = object.__new__(cls) anomalyLikelihood._iteration = proto.iteration anomalyLikelihood._historicalScores = collections.deque( maxlen=proto.historicWindowSize) for i, score in enumerate(proto.historicalScores): anomalyLikelihood._historicalScores.append((i, score.value, score.anomalyScore)) if proto.distribution.name: # is "" when there is no distribution. anomalyLikelihood._distribution = dict() anomalyLikelihood._distribution['distribution'] = dict() anomalyLikelihood._distribution['distribution']["name"] = proto.distribution.name anomalyLikelihood._distribution['distribution']["mean"] = proto.distribution.mean anomalyLikelihood._distribution['distribution']["variance"] = proto.distribution.variance anomalyLikelihood._distribution['distribution']["stdev"] = proto.distribution.stdev anomalyLikelihood._distribution["movingAverage"] = {} anomalyLikelihood._distribution["movingAverage"]["windowSize"] = proto.distribution.movingAverage.windowSize anomalyLikelihood._distribution["movingAverage"]["historicalValues"] = [] for value in proto.distribution.movingAverage.historicalValues: anomalyLikelihood._distribution["movingAverage"]["historicalValues"].append(value) anomalyLikelihood._distribution["movingAverage"]["total"] = proto.distribution.movingAverage.total anomalyLikelihood._distribution["historicalLikelihoods"] = [] for likelihood in proto.distribution.historicalLikelihoods: anomalyLikelihood._distribution["historicalLikelihoods"].append(likelihood) else: anomalyLikelihood._distribution = None anomalyLikelihood._probationaryPeriod = proto.probationaryPeriod anomalyLikelihood._learningPeriod = proto.learningPeriod anomalyLikelihood._reestimationPeriod = proto.reestimationPeriod # pylint: enable=W0212 return anomalyLikelihood
[ "def", "read", "(", "cls", ",", "proto", ")", ":", "# pylint: disable=W0212", "anomalyLikelihood", "=", "object", ".", "__new__", "(", "cls", ")", "anomalyLikelihood", ".", "_iteration", "=", "proto", ".", "iteration", "anomalyLikelihood", ".", "_historicalScores", "=", "collections", ".", "deque", "(", "maxlen", "=", "proto", ".", "historicWindowSize", ")", "for", "i", ",", "score", "in", "enumerate", "(", "proto", ".", "historicalScores", ")", ":", "anomalyLikelihood", ".", "_historicalScores", ".", "append", "(", "(", "i", ",", "score", ".", "value", ",", "score", ".", "anomalyScore", ")", ")", "if", "proto", ".", "distribution", ".", "name", ":", "# is \"\" when there is no distribution.", "anomalyLikelihood", ".", "_distribution", "=", "dict", "(", ")", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "=", "dict", "(", ")", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"name\"", "]", "=", "proto", ".", "distribution", ".", "name", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"mean\"", "]", "=", "proto", ".", "distribution", ".", "mean", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"variance\"", "]", "=", "proto", ".", "distribution", ".", "variance", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"stdev\"", "]", "=", "proto", ".", "distribution", ".", "stdev", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "=", "{", "}", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"windowSize\"", "]", "=", "proto", ".", "distribution", ".", "movingAverage", ".", "windowSize", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"historicalValues\"", "]", "=", "[", "]", "for", "value", "in", "proto", ".", "distribution", ".", "movingAverage", ".", "historicalValues", ":", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"historicalValues\"", "]", ".", "append", "(", "value", ")", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"total\"", "]", "=", "proto", ".", "distribution", ".", "movingAverage", ".", "total", "anomalyLikelihood", ".", "_distribution", "[", "\"historicalLikelihoods\"", "]", "=", "[", "]", "for", "likelihood", "in", "proto", ".", "distribution", ".", "historicalLikelihoods", ":", "anomalyLikelihood", ".", "_distribution", "[", "\"historicalLikelihoods\"", "]", ".", "append", "(", "likelihood", ")", "else", ":", "anomalyLikelihood", ".", "_distribution", "=", "None", "anomalyLikelihood", ".", "_probationaryPeriod", "=", "proto", ".", "probationaryPeriod", "anomalyLikelihood", ".", "_learningPeriod", "=", "proto", ".", "learningPeriod", "anomalyLikelihood", ".", "_reestimationPeriod", "=", "proto", ".", "reestimationPeriod", "# pylint: enable=W0212", "return", "anomalyLikelihood" ]
51.363636
0.010855
[ "def read(cls, proto):\n", " \"\"\" capnp deserialization method for the anomaly likelihood object\n", "\n", " :param proto: (Object) capnp proto object specified in\n", " nupic.regions.anomaly_likelihood.capnp\n", "\n", " :returns: (Object) the deserialized AnomalyLikelihood object\n", " \"\"\"\n", " # pylint: disable=W0212\n", " anomalyLikelihood = object.__new__(cls)\n", " anomalyLikelihood._iteration = proto.iteration\n", "\n", " anomalyLikelihood._historicalScores = collections.deque(\n", " maxlen=proto.historicWindowSize)\n", " for i, score in enumerate(proto.historicalScores):\n", " anomalyLikelihood._historicalScores.append((i, score.value,\n", " score.anomalyScore))\n", " if proto.distribution.name: # is \"\" when there is no distribution.\n", " anomalyLikelihood._distribution = dict()\n", " anomalyLikelihood._distribution['distribution'] = dict()\n", " anomalyLikelihood._distribution['distribution'][\"name\"] = proto.distribution.name\n", " anomalyLikelihood._distribution['distribution'][\"mean\"] = proto.distribution.mean\n", " anomalyLikelihood._distribution['distribution'][\"variance\"] = proto.distribution.variance\n", " anomalyLikelihood._distribution['distribution'][\"stdev\"] = proto.distribution.stdev\n", "\n", " anomalyLikelihood._distribution[\"movingAverage\"] = {}\n", " anomalyLikelihood._distribution[\"movingAverage\"][\"windowSize\"] = proto.distribution.movingAverage.windowSize\n", " anomalyLikelihood._distribution[\"movingAverage\"][\"historicalValues\"] = []\n", " for value in proto.distribution.movingAverage.historicalValues:\n", " anomalyLikelihood._distribution[\"movingAverage\"][\"historicalValues\"].append(value)\n", " anomalyLikelihood._distribution[\"movingAverage\"][\"total\"] = proto.distribution.movingAverage.total\n", "\n", " anomalyLikelihood._distribution[\"historicalLikelihoods\"] = []\n", " for likelihood in proto.distribution.historicalLikelihoods:\n", " anomalyLikelihood._distribution[\"historicalLikelihoods\"].append(likelihood)\n", " else:\n", " anomalyLikelihood._distribution = None\n", "\n", " anomalyLikelihood._probationaryPeriod = proto.probationaryPeriod\n", " anomalyLikelihood._learningPeriod = proto.learningPeriod\n", " anomalyLikelihood._reestimationPeriod = proto.reestimationPeriod\n", " # pylint: enable=W0212\n", "\n", " return anomalyLikelihood" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015151515151515152, 0, 0.014084507042253521, 0.02127659574468085, 0.015873015873015872, 0.022727272727272728, 0.022727272727272728, 0.020833333333333332, 0.022222222222222223, 0, 0.016666666666666666, 0.017391304347826087, 0.0125, 0.014285714285714285, 0.01098901098901099, 0.01904761904761905, 0, 0.014705882352941176, 0.015151515151515152, 0.011904761904761904, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0.03571428571428571 ]
44
0.007852
def context_exclude(zap_helper, name, pattern): """Exclude a pattern from a given context.""" console.info('Excluding regex {0} from context with name: {1}'.format(pattern, name)) with zap_error_handler(): result = zap_helper.zap.context.exclude_from_context(contextname=name, regex=pattern) if result != 'OK': raise ZAPError('Excluding regex from context failed: {}'.format(result))
[ "def", "context_exclude", "(", "zap_helper", ",", "name", ",", "pattern", ")", ":", "console", ".", "info", "(", "'Excluding regex {0} from context with name: {1}'", ".", "format", "(", "pattern", ",", "name", ")", ")", "with", "zap_error_handler", "(", ")", ":", "result", "=", "zap_helper", ".", "zap", ".", "context", ".", "exclude_from_context", "(", "contextname", "=", "name", ",", "regex", "=", "pattern", ")", "if", "result", "!=", "'OK'", ":", "raise", "ZAPError", "(", "'Excluding regex from context failed: {}'", ".", "format", "(", "result", ")", ")" ]
52.125
0.009434
[ "def context_exclude(zap_helper, name, pattern):\n", " \"\"\"Exclude a pattern from a given context.\"\"\"\n", " console.info('Excluding regex {0} from context with name: {1}'.format(pattern, name))\n", " with zap_error_handler():\n", " result = zap_helper.zap.context.exclude_from_context(contextname=name, regex=pattern)\n", "\n", " if result != 'OK':\n", " raise ZAPError('Excluding regex from context failed: {}'.format(result))" ]
[ 0, 0, 0.011111111111111112, 0, 0.010638297872340425, 0, 0, 0.023809523809523808 ]
8
0.005695
def extract(what, calc_id, webapi=True): """ Extract an output from the datastore and save it into an .hdf5 file. By default uses the WebAPI, otherwise the extraction is done locally. """ with performance.Monitor('extract', measuremem=True) as mon: if webapi: obj = WebExtractor(calc_id).get(what) else: obj = Extractor(calc_id).get(what) fname = '%s_%d.hdf5' % (what.replace('/', '-').replace('?', '-'), calc_id) obj.save(fname) print('Saved', fname) if mon.duration > 1: print(mon)
[ "def", "extract", "(", "what", ",", "calc_id", ",", "webapi", "=", "True", ")", ":", "with", "performance", ".", "Monitor", "(", "'extract'", ",", "measuremem", "=", "True", ")", "as", "mon", ":", "if", "webapi", ":", "obj", "=", "WebExtractor", "(", "calc_id", ")", ".", "get", "(", "what", ")", "else", ":", "obj", "=", "Extractor", "(", "calc_id", ")", ".", "get", "(", "what", ")", "fname", "=", "'%s_%d.hdf5'", "%", "(", "what", ".", "replace", "(", "'/'", ",", "'-'", ")", ".", "replace", "(", "'?'", ",", "'-'", ")", ",", "calc_id", ")", "obj", ".", "save", "(", "fname", ")", "print", "(", "'Saved'", ",", "fname", ")", "if", "mon", ".", "duration", ">", "1", ":", "print", "(", "mon", ")" ]
37.25
0.001637
[ "def extract(what, calc_id, webapi=True):\n", " \"\"\"\n", " Extract an output from the datastore and save it into an .hdf5 file.\n", " By default uses the WebAPI, otherwise the extraction is done locally.\n", " \"\"\"\n", " with performance.Monitor('extract', measuremem=True) as mon:\n", " if webapi:\n", " obj = WebExtractor(calc_id).get(what)\n", " else:\n", " obj = Extractor(calc_id).get(what)\n", " fname = '%s_%d.hdf5' % (what.replace('/', '-').replace('?', '-'),\n", " calc_id)\n", " obj.save(fname)\n", " print('Saved', fname)\n", " if mon.duration > 1:\n", " print(mon)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05555555555555555 ]
16
0.003472
def jdToDate(jd): '''def jdToDate(jd): Convert a Julian day number to day/month/year. jd is an integer.''' if (jd > 2299160): # After 5/10/1582, Gregorian calendar a = jd + 32044 b = int((4 * a + 3) / 146097.) c = a - int((b * 146097) / 4.) else: b = 0 c = jd + 32082 d = int((4 * c + 3) / 1461.) e = c - int((1461 * d) / 4.) m = int((5 * e + 2) / 153.) day = e - int((153 * m + 2) / 5.) + 1 month = m + 3 - 12 * int(m / 10.) year = b * 100 + d - 4800 + int(m / 10.) return [day, month, year]
[ "def", "jdToDate", "(", "jd", ")", ":", "if", "(", "jd", ">", "2299160", ")", ":", "# After 5/10/1582, Gregorian calendar", "a", "=", "jd", "+", "32044", "b", "=", "int", "(", "(", "4", "*", "a", "+", "3", ")", "/", "146097.", ")", "c", "=", "a", "-", "int", "(", "(", "b", "*", "146097", ")", "/", "4.", ")", "else", ":", "b", "=", "0", "c", "=", "jd", "+", "32082", "d", "=", "int", "(", "(", "4", "*", "c", "+", "3", ")", "/", "1461.", ")", "e", "=", "c", "-", "int", "(", "(", "1461", "*", "d", ")", "/", "4.", ")", "m", "=", "int", "(", "(", "5", "*", "e", "+", "2", ")", "/", "153.", ")", "day", "=", "e", "-", "int", "(", "(", "153", "*", "m", "+", "2", ")", "/", "5.", ")", "+", "1", "month", "=", "m", "+", "3", "-", "12", "*", "int", "(", "m", "/", "10.", ")", "year", "=", "b", "*", "100", "+", "d", "-", "4800", "+", "int", "(", "m", "/", "10.", ")", "return", "[", "day", ",", "month", ",", "year", "]" ]
32.555556
0.001658
[ "def jdToDate(jd):\n", " '''def jdToDate(jd): Convert a Julian day number to day/month/year.\n", " jd is an integer.'''\n", " if (jd > 2299160):\n", " # After 5/10/1582, Gregorian calendar\n", " a = jd + 32044\n", " b = int((4 * a + 3) / 146097.)\n", " c = a - int((b * 146097) / 4.)\n", " else:\n", " b = 0\n", " c = jd + 32082\n", " d = int((4 * c + 3) / 1461.)\n", " e = c - int((1461 * d) / 4.)\n", " m = int((5 * e + 2) / 153.)\n", " day = e - int((153 * m + 2) / 5.) + 1\n", " month = m + 3 - 12 * int(m / 10.)\n", " year = b * 100 + d - 4800 + int(m / 10.)\n", " return [day, month, year]" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655 ]
18
0.001916
def authenticate_search_bind(self, username, password): """ Performs a search bind to authenticate a user. This is required when a the login attribute is not the same as the RDN, since we cannot string together their DN on the fly, instead we have to find it in the LDAP, then attempt to bind with their credentials. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_LOGIN_ATTR) password (str): User's password to bind with when we find their dn. Returns: AuthenticationResponse """ connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), ) try: connection.bind() log.debug("Successfully bound to LDAP as '{0}' for search_bind method".format( self.config.get('LDAP_BIND_USER_DN') or 'Anonymous' )) except Exception as e: self.destroy_connection(connection) log.error(e) return AuthenticationResponse() # Find the user in the search path. user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) log.debug( "Performing an LDAP Search using filter '{0}', base '{1}', " "and scope '{2}'".format( search_filter, self.full_user_search_dn, self.config.get('LDAP_USER_SEARCH_SCOPE') )) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES') ) response = AuthenticationResponse() if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.debug( "Authentication was not successful for user '{0}'".format(username)) else: for user in connection.response: # Attempt to bind with each user we find until we can find # one that works. if 'type' not in user or user.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue user_connection = self._make_connection( bind_user=user['dn'], bind_password=password ) log.debug( "Directly binding a connection to a server with " "user:'{0}'".format(user['dn'])) try: user_connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Populate User Data user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_id = username response.user_dn = user['dn'] if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=user['dn'], _connection=connection) self.destroy_connection(user_connection) break except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for " "user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: # pragma: no cover # This should never happen, however in case ldap3 does ever # throw an error here, we catch it and log it log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(user_connection) self.destroy_connection(connection) return response
[ "def", "authenticate_search_bind", "(", "self", ",", "username", ",", "password", ")", ":", "connection", "=", "self", ".", "_make_connection", "(", "bind_user", "=", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_DN'", ")", ",", "bind_password", "=", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_PASSWORD'", ")", ",", ")", "try", ":", "connection", ".", "bind", "(", ")", "log", ".", "debug", "(", "\"Successfully bound to LDAP as '{0}' for search_bind method\"", ".", "format", "(", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_DN'", ")", "or", "'Anonymous'", ")", ")", "except", "Exception", "as", "e", ":", "self", ".", "destroy_connection", "(", "connection", ")", "log", ".", "error", "(", "e", ")", "return", "AuthenticationResponse", "(", ")", "# Find the user in the search path.", "user_filter", "=", "'({search_attr}={username})'", ".", "format", "(", "search_attr", "=", "self", ".", "config", ".", "get", "(", "'LDAP_USER_LOGIN_ATTR'", ")", ",", "username", "=", "username", ")", "search_filter", "=", "'(&{0}{1})'", ".", "format", "(", "self", ".", "config", ".", "get", "(", "'LDAP_USER_OBJECT_FILTER'", ")", ",", "user_filter", ",", ")", "log", ".", "debug", "(", "\"Performing an LDAP Search using filter '{0}', base '{1}', \"", "\"and scope '{2}'\"", ".", "format", "(", "search_filter", ",", "self", ".", "full_user_search_dn", ",", "self", ".", "config", ".", "get", "(", "'LDAP_USER_SEARCH_SCOPE'", ")", ")", ")", "connection", ".", "search", "(", "search_base", "=", "self", ".", "full_user_search_dn", ",", "search_filter", "=", "search_filter", ",", "search_scope", "=", "getattr", "(", "ldap3", ",", "self", ".", "config", ".", "get", "(", "'LDAP_USER_SEARCH_SCOPE'", ")", ")", ",", "attributes", "=", "self", ".", "config", ".", "get", "(", "'LDAP_GET_USER_ATTRIBUTES'", ")", ")", "response", "=", "AuthenticationResponse", "(", ")", "if", "len", "(", "connection", ".", "response", ")", "==", "0", "or", "(", "self", ".", "config", ".", "get", "(", "'LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND'", ")", "and", "len", "(", "connection", ".", "response", ")", ">", "1", ")", ":", "# Don't allow them to log in.", "log", ".", "debug", "(", "\"Authentication was not successful for user '{0}'\"", ".", "format", "(", "username", ")", ")", "else", ":", "for", "user", "in", "connection", ".", "response", ":", "# Attempt to bind with each user we find until we can find", "# one that works.", "if", "'type'", "not", "in", "user", "or", "user", ".", "get", "(", "'type'", ")", "!=", "'searchResEntry'", ":", "# Issue #13 - Don't return non-entry results.", "continue", "user_connection", "=", "self", ".", "_make_connection", "(", "bind_user", "=", "user", "[", "'dn'", "]", ",", "bind_password", "=", "password", ")", "log", ".", "debug", "(", "\"Directly binding a connection to a server with \"", "\"user:'{0}'\"", ".", "format", "(", "user", "[", "'dn'", "]", ")", ")", "try", ":", "user_connection", ".", "bind", "(", ")", "log", ".", "debug", "(", "\"Authentication was successful for user '{0}'\"", ".", "format", "(", "username", ")", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "success", "# Populate User Data", "user", "[", "'attributes'", "]", "[", "'dn'", "]", "=", "user", "[", "'dn'", "]", "response", ".", "user_info", "=", "user", "[", "'attributes'", "]", "response", ".", "user_id", "=", "username", "response", ".", "user_dn", "=", "user", "[", "'dn'", "]", "if", "self", ".", "config", ".", "get", "(", "'LDAP_SEARCH_FOR_GROUPS'", ")", ":", "response", ".", "user_groups", "=", "self", ".", "get_user_groups", "(", "dn", "=", "user", "[", "'dn'", "]", ",", "_connection", "=", "connection", ")", "self", ".", "destroy_connection", "(", "user_connection", ")", "break", "except", "ldap3", ".", "core", ".", "exceptions", ".", "LDAPInvalidCredentialsResult", ":", "log", ".", "debug", "(", "\"Authentication was not successful for \"", "\"user '{0}'\"", ".", "format", "(", "username", ")", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "fail", "except", "Exception", "as", "e", ":", "# pragma: no cover", "# This should never happen, however in case ldap3 does ever", "# throw an error here, we catch it and log it", "log", ".", "error", "(", "e", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "fail", "self", ".", "destroy_connection", "(", "user_connection", ")", "self", ".", "destroy_connection", "(", "connection", ")", "return", "response" ]
39.396552
0.001067
[ "def authenticate_search_bind(self, username, password):\n", " \"\"\"\n", " Performs a search bind to authenticate a user. This is\n", " required when a the login attribute is not the same\n", " as the RDN, since we cannot string together their DN on\n", " the fly, instead we have to find it in the LDAP, then attempt\n", " to bind with their credentials.\n", "\n", " Args:\n", " username (str): Username of the user to bind (the field specified\n", " as LDAP_BIND_LOGIN_ATTR)\n", " password (str): User's password to bind with when we find their dn.\n", "\n", " Returns:\n", " AuthenticationResponse\n", "\n", " \"\"\"\n", " connection = self._make_connection(\n", " bind_user=self.config.get('LDAP_BIND_USER_DN'),\n", " bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'),\n", " )\n", "\n", " try:\n", " connection.bind()\n", " log.debug(\"Successfully bound to LDAP as '{0}' for search_bind method\".format(\n", " self.config.get('LDAP_BIND_USER_DN') or 'Anonymous'\n", " ))\n", " except Exception as e:\n", " self.destroy_connection(connection)\n", " log.error(e)\n", " return AuthenticationResponse()\n", "\n", " # Find the user in the search path.\n", " user_filter = '({search_attr}={username})'.format(\n", " search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'),\n", " username=username\n", " )\n", " search_filter = '(&{0}{1})'.format(\n", " self.config.get('LDAP_USER_OBJECT_FILTER'),\n", " user_filter,\n", " )\n", "\n", " log.debug(\n", " \"Performing an LDAP Search using filter '{0}', base '{1}', \"\n", " \"and scope '{2}'\".format(\n", " search_filter,\n", " self.full_user_search_dn,\n", " self.config.get('LDAP_USER_SEARCH_SCOPE')\n", " ))\n", "\n", " connection.search(\n", " search_base=self.full_user_search_dn,\n", " search_filter=search_filter,\n", " search_scope=getattr(\n", " ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')),\n", " attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES')\n", " )\n", "\n", " response = AuthenticationResponse()\n", "\n", " if len(connection.response) == 0 or \\\n", " (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and\n", " len(connection.response) > 1):\n", " # Don't allow them to log in.\n", " log.debug(\n", " \"Authentication was not successful for user '{0}'\".format(username))\n", "\n", " else:\n", " for user in connection.response:\n", " # Attempt to bind with each user we find until we can find\n", " # one that works.\n", "\n", " if 'type' not in user or user.get('type') != 'searchResEntry':\n", " # Issue #13 - Don't return non-entry results.\n", " continue\n", "\n", " user_connection = self._make_connection(\n", " bind_user=user['dn'],\n", " bind_password=password\n", " )\n", "\n", " log.debug(\n", " \"Directly binding a connection to a server with \"\n", " \"user:'{0}'\".format(user['dn']))\n", " try:\n", " user_connection.bind()\n", " log.debug(\n", " \"Authentication was successful for user '{0}'\".format(username))\n", " response.status = AuthenticationResponseStatus.success\n", "\n", " # Populate User Data\n", " user['attributes']['dn'] = user['dn']\n", " response.user_info = user['attributes']\n", " response.user_id = username\n", " response.user_dn = user['dn']\n", " if self.config.get('LDAP_SEARCH_FOR_GROUPS'):\n", " response.user_groups = self.get_user_groups(\n", " dn=user['dn'], _connection=connection)\n", " self.destroy_connection(user_connection)\n", " break\n", "\n", " except ldap3.core.exceptions.LDAPInvalidCredentialsResult:\n", " log.debug(\n", " \"Authentication was not successful for \"\n", " \"user '{0}'\".format(username))\n", " response.status = AuthenticationResponseStatus.fail\n", " except Exception as e: # pragma: no cover\n", " # This should never happen, however in case ldap3 does ever\n", " # throw an error here, we catch it and log it\n", " log.error(e)\n", " response.status = AuthenticationResponseStatus.fail\n", "\n", " self.destroy_connection(user_connection)\n", "\n", " self.destroy_connection(connection)\n", " return response" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216 ]
116
0.001386
def _parse_node(node, parent_matrix, material_map, meshes, graph, resolver=None): """ Recursively parse COLLADA scene nodes. """ # Parse mesh node if isinstance(node, collada.scene.GeometryNode): geometry = node.geometry # Create local material map from material symbol to actual material local_material_map = {} for mn in node.materials: symbol = mn.symbol m = mn.target if m.id in material_map: local_material_map[symbol] = material_map[m.id] else: local_material_map[symbol] = _parse_material(m, resolver) # Iterate over primitives of geometry for i, primitive in enumerate(geometry.primitives): if isinstance(primitive, collada.polylist.Polylist): primitive = primitive.triangleset() if isinstance(primitive, collada.triangleset.TriangleSet): vertex = primitive.vertex vertex_index = primitive.vertex_index vertices = vertex[vertex_index].reshape( len(vertex_index) * 3, 3) # Get normals if present normals = None if primitive.normal is not None: normal = primitive.normal normal_index = primitive.normal_index normals = normal[normal_index].reshape( len(normal_index) * 3, 3) # Get colors if present colors = None s = primitive.sources if ('COLOR' in s and len(s['COLOR']) > 0 and len(primitive.index) > 0): color = s['COLOR'][0][4].data color_index = primitive.index[:, :, s['COLOR'][0][0]] colors = color[color_index].reshape( len(color_index) * 3, 3) faces = np.arange( vertices.shape[0]).reshape( vertices.shape[0] // 3, 3) # Get UV coordinates if possible vis = None if primitive.material in local_material_map: material = copy.copy( local_material_map[primitive.material]) uv = None if len(primitive.texcoordset) > 0: texcoord = primitive.texcoordset[0] texcoord_index = primitive.texcoord_indexset[0] uv = texcoord[texcoord_index].reshape( (len(texcoord_index) * 3, 2)) vis = visual.texture.TextureVisuals( uv=uv, material=material) primid = '{}.{}'.format(geometry.id, i) meshes[primid] = { 'vertices': vertices, 'faces': faces, 'vertex_normals': normals, 'vertex_colors': colors, 'visual': vis} graph.append({'frame_to': primid, 'matrix': parent_matrix, 'geometry': primid}) # recurse down tree for nodes with children elif isinstance(node, collada.scene.Node): if node.children is not None: for child in node.children: # create the new matrix matrix = np.dot(parent_matrix, node.matrix) # parse the child node _parse_node( node=child, parent_matrix=matrix, material_map=material_map, meshes=meshes, graph=graph, resolver=resolver) elif isinstance(node, collada.scene.CameraNode): # TODO: convert collada cameras to trimesh cameras pass elif isinstance(node, collada.scene.LightNode): # TODO: convert collada lights to trimesh lights pass
[ "def", "_parse_node", "(", "node", ",", "parent_matrix", ",", "material_map", ",", "meshes", ",", "graph", ",", "resolver", "=", "None", ")", ":", "# Parse mesh node", "if", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "GeometryNode", ")", ":", "geometry", "=", "node", ".", "geometry", "# Create local material map from material symbol to actual material", "local_material_map", "=", "{", "}", "for", "mn", "in", "node", ".", "materials", ":", "symbol", "=", "mn", ".", "symbol", "m", "=", "mn", ".", "target", "if", "m", ".", "id", "in", "material_map", ":", "local_material_map", "[", "symbol", "]", "=", "material_map", "[", "m", ".", "id", "]", "else", ":", "local_material_map", "[", "symbol", "]", "=", "_parse_material", "(", "m", ",", "resolver", ")", "# Iterate over primitives of geometry", "for", "i", ",", "primitive", "in", "enumerate", "(", "geometry", ".", "primitives", ")", ":", "if", "isinstance", "(", "primitive", ",", "collada", ".", "polylist", ".", "Polylist", ")", ":", "primitive", "=", "primitive", ".", "triangleset", "(", ")", "if", "isinstance", "(", "primitive", ",", "collada", ".", "triangleset", ".", "TriangleSet", ")", ":", "vertex", "=", "primitive", ".", "vertex", "vertex_index", "=", "primitive", ".", "vertex_index", "vertices", "=", "vertex", "[", "vertex_index", "]", ".", "reshape", "(", "len", "(", "vertex_index", ")", "*", "3", ",", "3", ")", "# Get normals if present", "normals", "=", "None", "if", "primitive", ".", "normal", "is", "not", "None", ":", "normal", "=", "primitive", ".", "normal", "normal_index", "=", "primitive", ".", "normal_index", "normals", "=", "normal", "[", "normal_index", "]", ".", "reshape", "(", "len", "(", "normal_index", ")", "*", "3", ",", "3", ")", "# Get colors if present", "colors", "=", "None", "s", "=", "primitive", ".", "sources", "if", "(", "'COLOR'", "in", "s", "and", "len", "(", "s", "[", "'COLOR'", "]", ")", ">", "0", "and", "len", "(", "primitive", ".", "index", ")", ">", "0", ")", ":", "color", "=", "s", "[", "'COLOR'", "]", "[", "0", "]", "[", "4", "]", ".", "data", "color_index", "=", "primitive", ".", "index", "[", ":", ",", ":", ",", "s", "[", "'COLOR'", "]", "[", "0", "]", "[", "0", "]", "]", "colors", "=", "color", "[", "color_index", "]", ".", "reshape", "(", "len", "(", "color_index", ")", "*", "3", ",", "3", ")", "faces", "=", "np", ".", "arange", "(", "vertices", ".", "shape", "[", "0", "]", ")", ".", "reshape", "(", "vertices", ".", "shape", "[", "0", "]", "//", "3", ",", "3", ")", "# Get UV coordinates if possible", "vis", "=", "None", "if", "primitive", ".", "material", "in", "local_material_map", ":", "material", "=", "copy", ".", "copy", "(", "local_material_map", "[", "primitive", ".", "material", "]", ")", "uv", "=", "None", "if", "len", "(", "primitive", ".", "texcoordset", ")", ">", "0", ":", "texcoord", "=", "primitive", ".", "texcoordset", "[", "0", "]", "texcoord_index", "=", "primitive", ".", "texcoord_indexset", "[", "0", "]", "uv", "=", "texcoord", "[", "texcoord_index", "]", ".", "reshape", "(", "(", "len", "(", "texcoord_index", ")", "*", "3", ",", "2", ")", ")", "vis", "=", "visual", ".", "texture", ".", "TextureVisuals", "(", "uv", "=", "uv", ",", "material", "=", "material", ")", "primid", "=", "'{}.{}'", ".", "format", "(", "geometry", ".", "id", ",", "i", ")", "meshes", "[", "primid", "]", "=", "{", "'vertices'", ":", "vertices", ",", "'faces'", ":", "faces", ",", "'vertex_normals'", ":", "normals", ",", "'vertex_colors'", ":", "colors", ",", "'visual'", ":", "vis", "}", "graph", ".", "append", "(", "{", "'frame_to'", ":", "primid", ",", "'matrix'", ":", "parent_matrix", ",", "'geometry'", ":", "primid", "}", ")", "# recurse down tree for nodes with children", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "Node", ")", ":", "if", "node", ".", "children", "is", "not", "None", ":", "for", "child", "in", "node", ".", "children", ":", "# create the new matrix", "matrix", "=", "np", ".", "dot", "(", "parent_matrix", ",", "node", ".", "matrix", ")", "# parse the child node", "_parse_node", "(", "node", "=", "child", ",", "parent_matrix", "=", "matrix", ",", "material_map", "=", "material_map", ",", "meshes", "=", "meshes", ",", "graph", "=", "graph", ",", "resolver", "=", "resolver", ")", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "CameraNode", ")", ":", "# TODO: convert collada cameras to trimesh cameras", "pass", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "LightNode", ")", ":", "# TODO: convert collada lights to trimesh lights", "pass" ]
38.76699
0.000244
[ "def _parse_node(node,\n", " parent_matrix,\n", " material_map,\n", " meshes,\n", " graph,\n", " resolver=None):\n", " \"\"\"\n", " Recursively parse COLLADA scene nodes.\n", " \"\"\"\n", "\n", " # Parse mesh node\n", " if isinstance(node, collada.scene.GeometryNode):\n", " geometry = node.geometry\n", "\n", " # Create local material map from material symbol to actual material\n", " local_material_map = {}\n", " for mn in node.materials:\n", " symbol = mn.symbol\n", " m = mn.target\n", " if m.id in material_map:\n", " local_material_map[symbol] = material_map[m.id]\n", " else:\n", " local_material_map[symbol] = _parse_material(m, resolver)\n", "\n", " # Iterate over primitives of geometry\n", " for i, primitive in enumerate(geometry.primitives):\n", " if isinstance(primitive, collada.polylist.Polylist):\n", " primitive = primitive.triangleset()\n", " if isinstance(primitive, collada.triangleset.TriangleSet):\n", " vertex = primitive.vertex\n", " vertex_index = primitive.vertex_index\n", " vertices = vertex[vertex_index].reshape(\n", " len(vertex_index) * 3, 3)\n", "\n", " # Get normals if present\n", " normals = None\n", " if primitive.normal is not None:\n", " normal = primitive.normal\n", " normal_index = primitive.normal_index\n", " normals = normal[normal_index].reshape(\n", " len(normal_index) * 3, 3)\n", "\n", " # Get colors if present\n", " colors = None\n", " s = primitive.sources\n", " if ('COLOR' in s and len(s['COLOR'])\n", " > 0 and len(primitive.index) > 0):\n", " color = s['COLOR'][0][4].data\n", " color_index = primitive.index[:, :, s['COLOR'][0][0]]\n", " colors = color[color_index].reshape(\n", " len(color_index) * 3, 3)\n", "\n", " faces = np.arange(\n", " vertices.shape[0]).reshape(\n", " vertices.shape[0] // 3, 3)\n", "\n", " # Get UV coordinates if possible\n", " vis = None\n", " if primitive.material in local_material_map:\n", " material = copy.copy(\n", " local_material_map[primitive.material])\n", " uv = None\n", " if len(primitive.texcoordset) > 0:\n", " texcoord = primitive.texcoordset[0]\n", " texcoord_index = primitive.texcoord_indexset[0]\n", " uv = texcoord[texcoord_index].reshape(\n", " (len(texcoord_index) * 3, 2))\n", " vis = visual.texture.TextureVisuals(\n", " uv=uv, material=material)\n", "\n", " primid = '{}.{}'.format(geometry.id, i)\n", " meshes[primid] = {\n", " 'vertices': vertices,\n", " 'faces': faces,\n", " 'vertex_normals': normals,\n", " 'vertex_colors': colors,\n", " 'visual': vis}\n", "\n", " graph.append({'frame_to': primid,\n", " 'matrix': parent_matrix,\n", " 'geometry': primid})\n", "\n", " # recurse down tree for nodes with children\n", " elif isinstance(node, collada.scene.Node):\n", " if node.children is not None:\n", " for child in node.children:\n", " # create the new matrix\n", " matrix = np.dot(parent_matrix, node.matrix)\n", " # parse the child node\n", " _parse_node(\n", " node=child,\n", " parent_matrix=matrix,\n", " material_map=material_map,\n", " meshes=meshes,\n", " graph=graph,\n", " resolver=resolver)\n", "\n", " elif isinstance(node, collada.scene.CameraNode):\n", " # TODO: convert collada cameras to trimesh cameras\n", " pass\n", " elif isinstance(node, collada.scene.LightNode):\n", " # TODO: convert collada lights to trimesh lights\n", " pass" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333 ]
103
0.000809
def handle(data_type, data, data_id=None, caller=None): """ execute all data handlers on the specified data according to data type Args: data_type (str): data type handle data (dict or list): data Kwargs: data_id (str): can be used to differentiate between different data sets of the same data type. If not specified will default to the data type caller (object): if specified, holds the object or function that is trying to handle data Returns: dict or list - data after handlers have been executed on it """ if not data_id: data_id = data_type # instantiate handlers for data type if they havent been yet if data_id not in _handlers: _handlers[data_id] = dict( [(h.handle, h) for h in handlers.instantiate_for_data_type(data_type, data_id=data_id)]) for handler in list(_handlers[data_id].values()): try: data = handler(data, caller=caller) except Exception as inst: vodka.log.error("Data handler '%s' failed with error" % handler) vodka.log.error(traceback.format_exc()) return data
[ "def", "handle", "(", "data_type", ",", "data", ",", "data_id", "=", "None", ",", "caller", "=", "None", ")", ":", "if", "not", "data_id", ":", "data_id", "=", "data_type", "# instantiate handlers for data type if they havent been yet", "if", "data_id", "not", "in", "_handlers", ":", "_handlers", "[", "data_id", "]", "=", "dict", "(", "[", "(", "h", ".", "handle", ",", "h", ")", "for", "h", "in", "handlers", ".", "instantiate_for_data_type", "(", "data_type", ",", "data_id", "=", "data_id", ")", "]", ")", "for", "handler", "in", "list", "(", "_handlers", "[", "data_id", "]", ".", "values", "(", ")", ")", ":", "try", ":", "data", "=", "handler", "(", "data", ",", "caller", "=", "caller", ")", "except", "Exception", "as", "inst", ":", "vodka", ".", "log", ".", "error", "(", "\"Data handler '%s' failed with error\"", "%", "handler", ")", "vodka", ".", "log", ".", "error", "(", "traceback", ".", "format_exc", "(", ")", ")", "return", "data" ]
33.085714
0.001678
[ "def handle(data_type, data, data_id=None, caller=None):\n", " \"\"\"\n", " execute all data handlers on the specified data according to data type\n", "\n", " Args:\n", " data_type (str): data type handle\n", " data (dict or list): data\n", "\n", " Kwargs:\n", " data_id (str): can be used to differentiate between different data\n", " sets of the same data type. If not specified will default to\n", " the data type\n", " caller (object): if specified, holds the object or function that\n", " is trying to handle data\n", "\n", " Returns:\n", " dict or list - data after handlers have been executed on it\n", " \"\"\"\n", "\n", " if not data_id:\n", " data_id = data_type\n", "\n", " # instantiate handlers for data type if they havent been yet\n", " if data_id not in _handlers:\n", " _handlers[data_id] = dict(\n", " [(h.handle, h) for h in handlers.instantiate_for_data_type(data_type, data_id=data_id)])\n", "\n", " for handler in list(_handlers[data_id].values()):\n", " try:\n", " data = handler(data, caller=caller)\n", " except Exception as inst:\n", " vodka.log.error(\"Data handler '%s' failed with error\" % handler)\n", " vodka.log.error(traceback.format_exc())\n", "\n", " return data" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667 ]
35
0.002188
def split_box( fraction, x,y, w,h ): """Return set of two boxes where first is the fraction given""" if w >= h: new_w = int(w*fraction) if new_w: return (x,y,new_w,h),(x+new_w,y,w-new_w,h) else: return None,None else: new_h = int(h*fraction) if new_h: return (x,y,w,new_h),(x,y+new_h,w,h-new_h) else: return None,None
[ "def", "split_box", "(", "fraction", ",", "x", ",", "y", ",", "w", ",", "h", ")", ":", "if", "w", ">=", "h", ":", "new_w", "=", "int", "(", "w", "*", "fraction", ")", "if", "new_w", ":", "return", "(", "x", ",", "y", ",", "new_w", ",", "h", ")", ",", "(", "x", "+", "new_w", ",", "y", ",", "w", "-", "new_w", ",", "h", ")", "else", ":", "return", "None", ",", "None", "else", ":", "new_h", "=", "int", "(", "h", "*", "fraction", ")", "if", "new_h", ":", "return", "(", "x", ",", "y", ",", "w", ",", "new_h", ")", ",", "(", "x", ",", "y", "+", "new_h", ",", "w", ",", "h", "-", "new_h", ")", "else", ":", "return", "None", ",", "None" ]
29.428571
0.049412
[ "def split_box( fraction, x,y, w,h ):\n", " \"\"\"Return set of two boxes where first is the fraction given\"\"\"\n", " if w >= h:\n", " new_w = int(w*fraction)\n", " if new_w:\n", " return (x,y,new_w,h),(x+new_w,y,w-new_w,h)\n", " else:\n", " return None,None\n", " else:\n", " new_h = int(h*fraction)\n", " if new_h:\n", " return (x,y,w,new_h),(x,y+new_h,w,h-new_h)\n", " else:\n", " return None,None" ]
[ 0.10810810810810811, 0, 0, 0, 0, 0.12727272727272726, 0, 0.034482758620689655, 0, 0, 0, 0.12727272727272726, 0, 0.07142857142857142 ]
14
0.033469
def _calc_ticks(value_range, base): """ Calculate tick marks within a range Parameters ---------- value_range: tuple Range for which to calculate ticks. Returns ------- out: tuple (major, middle, minor) tick locations """ def _minor(x, mid_idx): return np.hstack([x[1:mid_idx], x[mid_idx+1:-1]]) # * Calculate the low and high powers, # * Generate for all intervals in along the low-high power range # The intervals are in normal space # * Calculate evenly spaced breaks in normal space, then convert # them to log space. low = np.floor(value_range[0]) high = np.ceil(value_range[1]) arr = base ** np.arange(low, float(high+1)) n_ticks = base - 1 breaks = [log(np.linspace(b1, b2, n_ticks+1), base) for (b1, b2) in list(zip(arr, arr[1:]))] # Partition the breaks in the 3 groups major = np.array([x[0] for x in breaks] + [breaks[-1][-1]]) if n_ticks % 2: mid_idx = n_ticks // 2 middle = [x[mid_idx] for x in breaks] minor = np.hstack([_minor(x, mid_idx) for x in breaks]) else: middle = [] minor = np.hstack([x[1:-1] for x in breaks]) return major, middle, minor
[ "def", "_calc_ticks", "(", "value_range", ",", "base", ")", ":", "def", "_minor", "(", "x", ",", "mid_idx", ")", ":", "return", "np", ".", "hstack", "(", "[", "x", "[", "1", ":", "mid_idx", "]", ",", "x", "[", "mid_idx", "+", "1", ":", "-", "1", "]", "]", ")", "# * Calculate the low and high powers,", "# * Generate for all intervals in along the low-high power range", "# The intervals are in normal space", "# * Calculate evenly spaced breaks in normal space, then convert", "# them to log space.", "low", "=", "np", ".", "floor", "(", "value_range", "[", "0", "]", ")", "high", "=", "np", ".", "ceil", "(", "value_range", "[", "1", "]", ")", "arr", "=", "base", "**", "np", ".", "arange", "(", "low", ",", "float", "(", "high", "+", "1", ")", ")", "n_ticks", "=", "base", "-", "1", "breaks", "=", "[", "log", "(", "np", ".", "linspace", "(", "b1", ",", "b2", ",", "n_ticks", "+", "1", ")", ",", "base", ")", "for", "(", "b1", ",", "b2", ")", "in", "list", "(", "zip", "(", "arr", ",", "arr", "[", "1", ":", "]", ")", ")", "]", "# Partition the breaks in the 3 groups", "major", "=", "np", ".", "array", "(", "[", "x", "[", "0", "]", "for", "x", "in", "breaks", "]", "+", "[", "breaks", "[", "-", "1", "]", "[", "-", "1", "]", "]", ")", "if", "n_ticks", "%", "2", ":", "mid_idx", "=", "n_ticks", "//", "2", "middle", "=", "[", "x", "[", "mid_idx", "]", "for", "x", "in", "breaks", "]", "minor", "=", "np", ".", "hstack", "(", "[", "_minor", "(", "x", ",", "mid_idx", ")", "for", "x", "in", "breaks", "]", ")", "else", ":", "middle", "=", "[", "]", "minor", "=", "np", ".", "hstack", "(", "[", "x", "[", "1", ":", "-", "1", "]", "for", "x", "in", "breaks", "]", ")", "return", "major", ",", "middle", ",", "minor" ]
33.65
0.001444
[ "def _calc_ticks(value_range, base):\n", " \"\"\"\n", " Calculate tick marks within a range\n", "\n", " Parameters\n", " ----------\n", " value_range: tuple\n", " Range for which to calculate ticks.\n", "\n", " Returns\n", " -------\n", " out: tuple\n", " (major, middle, minor) tick locations\n", " \"\"\"\n", " def _minor(x, mid_idx):\n", " return np.hstack([x[1:mid_idx], x[mid_idx+1:-1]])\n", "\n", " # * Calculate the low and high powers,\n", " # * Generate for all intervals in along the low-high power range\n", " # The intervals are in normal space\n", " # * Calculate evenly spaced breaks in normal space, then convert\n", " # them to log space.\n", " low = np.floor(value_range[0])\n", " high = np.ceil(value_range[1])\n", " arr = base ** np.arange(low, float(high+1))\n", " n_ticks = base - 1\n", " breaks = [log(np.linspace(b1, b2, n_ticks+1), base)\n", " for (b1, b2) in list(zip(arr, arr[1:]))]\n", "\n", " # Partition the breaks in the 3 groups\n", " major = np.array([x[0] for x in breaks] + [breaks[-1][-1]])\n", " if n_ticks % 2:\n", " mid_idx = n_ticks // 2\n", " middle = [x[mid_idx] for x in breaks]\n", " minor = np.hstack([_minor(x, mid_idx) for x in breaks])\n", " else:\n", " middle = []\n", " minor = np.hstack([x[1:-1] for x in breaks])\n", "\n", " return major, middle, minor" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857 ]
40
0.002798
def plot_points(self, ax, legend=None, field=None, field_function=None, undefined=0, **kwargs): """ Plotting, but only for points (as opposed to intervals). """ ys = [iv.top.z for iv in self] if field is not None: f = field_function or utils.null xs = [f(iv.data.get(field, undefined)) for iv in self] else: xs = [1 for iv in self] ax.set_xlim((min(xs), max(xs))) for x, y in zip(xs, ys): ax.axhline(y, color='lightgray', zorder=0) ax.scatter(xs, ys, clip_on=False, **kwargs) return ax
[ "def", "plot_points", "(", "self", ",", "ax", ",", "legend", "=", "None", ",", "field", "=", "None", ",", "field_function", "=", "None", ",", "undefined", "=", "0", ",", "*", "*", "kwargs", ")", ":", "ys", "=", "[", "iv", ".", "top", ".", "z", "for", "iv", "in", "self", "]", "if", "field", "is", "not", "None", ":", "f", "=", "field_function", "or", "utils", ".", "null", "xs", "=", "[", "f", "(", "iv", ".", "data", ".", "get", "(", "field", ",", "undefined", ")", ")", "for", "iv", "in", "self", "]", "else", ":", "xs", "=", "[", "1", "for", "iv", "in", "self", "]", "ax", ".", "set_xlim", "(", "(", "min", "(", "xs", ")", ",", "max", "(", "xs", ")", ")", ")", "for", "x", ",", "y", "in", "zip", "(", "xs", ",", "ys", ")", ":", "ax", ".", "axhline", "(", "y", ",", "color", "=", "'lightgray'", ",", "zorder", "=", "0", ")", "ax", ".", "scatter", "(", "xs", ",", "ys", ",", "clip_on", "=", "False", ",", "*", "*", "kwargs", ")", "return", "ax" ]
27.76
0.009749
[ "def plot_points(self, ax,\n", " legend=None,\n", " field=None,\n", " field_function=None,\n", " undefined=0,\n", " **kwargs):\n", " \"\"\"\n", " Plotting, but only for points (as opposed to intervals).\n", " \"\"\"\n", "\n", " ys = [iv.top.z for iv in self]\n", "\n", " if field is not None:\n", " f = field_function or utils.null\n", " xs = [f(iv.data.get(field, undefined)) for iv in self]\n", " else:\n", " xs = [1 for iv in self]\n", "\n", " ax.set_xlim((min(xs), max(xs)))\n", " for x, y in zip(xs, ys):\n", " ax.axhline(y, color='lightgray', zorder=0)\n", "\n", " ax.scatter(xs, ys, clip_on=False, **kwargs)\n", "\n", " return ax" ]
[ 0, 0.030303030303030304, 0.03125, 0.024390243902439025, 0.030303030303030304, 0.03225806451612903, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705 ]
25
0.011626
def scalars_impl(self, run, tag_regex_string): """Given a tag regex and single run, return ScalarEvents. Args: run: A run string. tag_regex_string: A regular expression that captures portions of tags. Raises: ValueError: if the scalars plugin is not registered. Returns: A dictionary that is the JSON-able response. """ if not tag_regex_string: # The user provided no regex. return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Construct the regex. try: regex = re.compile(tag_regex_string) except re.error: return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Fetch the tags for the run. Filter for tags that match the regex. run_to_data = self._multiplexer.PluginRunToTagToContent( scalars_metadata.PLUGIN_NAME) tag_to_data = None try: tag_to_data = run_to_data[run] except KeyError: # The run could not be found. Perhaps a configuration specified a run that # TensorBoard has not read from disk yet. payload = {} if tag_to_data: scalars_plugin_instance = self._get_scalars_plugin() if not scalars_plugin_instance: raise ValueError(('Failed to respond to request for /scalars. ' 'The scalars plugin is oddly not registered.')) form = scalars_plugin.OutputFormat.JSON payload = { tag: scalars_plugin_instance.scalars_impl(tag, run, None, form)[0] for tag in tag_to_data.keys() if regex.match(tag) } return { _REGEX_VALID_PROPERTY: True, _TAG_TO_EVENTS_PROPERTY: payload, }
[ "def", "scalars_impl", "(", "self", ",", "run", ",", "tag_regex_string", ")", ":", "if", "not", "tag_regex_string", ":", "# The user provided no regex.", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Construct the regex.", "try", ":", "regex", "=", "re", ".", "compile", "(", "tag_regex_string", ")", "except", "re", ".", "error", ":", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Fetch the tags for the run. Filter for tags that match the regex.", "run_to_data", "=", "self", ".", "_multiplexer", ".", "PluginRunToTagToContent", "(", "scalars_metadata", ".", "PLUGIN_NAME", ")", "tag_to_data", "=", "None", "try", ":", "tag_to_data", "=", "run_to_data", "[", "run", "]", "except", "KeyError", ":", "# The run could not be found. Perhaps a configuration specified a run that", "# TensorBoard has not read from disk yet.", "payload", "=", "{", "}", "if", "tag_to_data", ":", "scalars_plugin_instance", "=", "self", ".", "_get_scalars_plugin", "(", ")", "if", "not", "scalars_plugin_instance", ":", "raise", "ValueError", "(", "(", "'Failed to respond to request for /scalars. '", "'The scalars plugin is oddly not registered.'", ")", ")", "form", "=", "scalars_plugin", ".", "OutputFormat", ".", "JSON", "payload", "=", "{", "tag", ":", "scalars_plugin_instance", ".", "scalars_impl", "(", "tag", ",", "run", ",", "None", ",", "form", ")", "[", "0", "]", "for", "tag", "in", "tag_to_data", ".", "keys", "(", ")", "if", "regex", ".", "match", "(", "tag", ")", "}", "return", "{", "_REGEX_VALID_PROPERTY", ":", "True", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "payload", ",", "}" ]
28.87931
0.009238
[ "def scalars_impl(self, run, tag_regex_string):\n", " \"\"\"Given a tag regex and single run, return ScalarEvents.\n", "\n", " Args:\n", " run: A run string.\n", " tag_regex_string: A regular expression that captures portions of tags.\n", "\n", " Raises:\n", " ValueError: if the scalars plugin is not registered.\n", "\n", " Returns:\n", " A dictionary that is the JSON-able response.\n", " \"\"\"\n", " if not tag_regex_string:\n", " # The user provided no regex.\n", " return {\n", " _REGEX_VALID_PROPERTY: False,\n", " _TAG_TO_EVENTS_PROPERTY: {},\n", " }\n", "\n", " # Construct the regex.\n", " try:\n", " regex = re.compile(tag_regex_string)\n", " except re.error:\n", " return {\n", " _REGEX_VALID_PROPERTY: False,\n", " _TAG_TO_EVENTS_PROPERTY: {},\n", " }\n", "\n", " # Fetch the tags for the run. Filter for tags that match the regex.\n", " run_to_data = self._multiplexer.PluginRunToTagToContent(\n", " scalars_metadata.PLUGIN_NAME)\n", "\n", " tag_to_data = None\n", " try:\n", " tag_to_data = run_to_data[run]\n", " except KeyError:\n", " # The run could not be found. Perhaps a configuration specified a run that\n", " # TensorBoard has not read from disk yet.\n", " payload = {}\n", "\n", " if tag_to_data:\n", " scalars_plugin_instance = self._get_scalars_plugin()\n", " if not scalars_plugin_instance:\n", " raise ValueError(('Failed to respond to request for /scalars. '\n", " 'The scalars plugin is oddly not registered.'))\n", "\n", " form = scalars_plugin.OutputFormat.JSON\n", " payload = {\n", " tag: scalars_plugin_instance.scalars_impl(tag, run, None, form)[0]\n", " for tag in tag_to_data.keys()\n", " if regex.match(tag)\n", " }\n", "\n", " return {\n", " _REGEX_VALID_PROPERTY: True,\n", " _TAG_TO_EVENTS_PROPERTY: payload,\n", " }" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776, 0.06666666666666667, 0, 0, 0, 0, 0, 0, 0.023255813953488372, 0, 0.06666666666666667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02702702702702703, 0, 0.024691358024691357, 0.020833333333333332, 0.05263157894736842, 0, 0, 0.01694915254237288, 0.02631578947368421, 0, 0, 0, 0.021739130434782608, 0.05555555555555555, 0, 0.023809523809523808, 0.03125, 0, 0, 0, 0, 0, 0.2 ]
58
0.011813
def zSetSurfaceData(self, surfNum, radius=None, thick=None, material=None, semidia=None, conic=None, comment=None): """Sets surface data""" if self.pMode == 0: # Sequential mode surf = self.pLDE.GetSurfaceAt(surfNum) if radius is not None: surf.pRadius = radius if thick is not None: surf.pThickness = thick if material is not None: surf.pMaterial = material if semidia is not None: surf.pSemiDiameter = semidia if conic is not None: surf.pConic = conic if comment is not None: surf.pComment = comment else: raise NotImplementedError('Function not implemented for non-sequential mode')
[ "def", "zSetSurfaceData", "(", "self", ",", "surfNum", ",", "radius", "=", "None", ",", "thick", "=", "None", ",", "material", "=", "None", ",", "semidia", "=", "None", ",", "conic", "=", "None", ",", "comment", "=", "None", ")", ":", "if", "self", ".", "pMode", "==", "0", ":", "# Sequential mode", "surf", "=", "self", ".", "pLDE", ".", "GetSurfaceAt", "(", "surfNum", ")", "if", "radius", "is", "not", "None", ":", "surf", ".", "pRadius", "=", "radius", "if", "thick", "is", "not", "None", ":", "surf", ".", "pThickness", "=", "thick", "if", "material", "is", "not", "None", ":", "surf", ".", "pMaterial", "=", "material", "if", "semidia", "is", "not", "None", ":", "surf", ".", "pSemiDiameter", "=", "semidia", "if", "conic", "is", "not", "None", ":", "surf", ".", "pConic", "=", "conic", "if", "comment", "is", "not", "None", ":", "surf", ".", "pComment", "=", "comment", "else", ":", "raise", "NotImplementedError", "(", "'Function not implemented for non-sequential mode'", ")" ]
42.526316
0.008475
[ "def zSetSurfaceData(self, surfNum, radius=None, thick=None, material=None, semidia=None, \n", " conic=None, comment=None):\n", " \"\"\"Sets surface data\"\"\"\n", " if self.pMode == 0: # Sequential mode\n", " surf = self.pLDE.GetSurfaceAt(surfNum)\n", " if radius is not None:\n", " surf.pRadius = radius\n", " if thick is not None:\n", " surf.pThickness = thick\n", " if material is not None:\n", " surf.pMaterial = material\n", " if semidia is not None:\n", " surf.pSemiDiameter = semidia\n", " if conic is not None:\n", " surf.pConic = conic\n", " if comment is not None:\n", " surf.pComment = comment\n", " else:\n", " raise NotImplementedError('Function not implemented for non-sequential mode')" ]
[ 0.022222222222222223, 0.0196078431372549, 0.03125, 0.021739130434782608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02247191011235955 ]
19
0.006173
async def update_pin(**payload): """Update the onboarding welcome message after recieving a "pin_added" event from Slack. Update timestamp for welcome message as well. """ data = payload["data"] web_client = payload["web_client"] channel_id = data["channel_id"] user_id = data["user"] # Get the original tutorial sent. onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] # Mark the pin task as completed. onboarding_tutorial.pin_task_completed = True # Get the new message payload message = onboarding_tutorial.get_message_payload() # Post the updated message in Slack updated_message = await web_client.chat_update(**message) # Update the timestamp saved on the onboarding tutorial object onboarding_tutorial.timestamp = updated_message["ts"]
[ "async", "def", "update_pin", "(", "*", "*", "payload", ")", ":", "data", "=", "payload", "[", "\"data\"", "]", "web_client", "=", "payload", "[", "\"web_client\"", "]", "channel_id", "=", "data", "[", "\"channel_id\"", "]", "user_id", "=", "data", "[", "\"user\"", "]", "# Get the original tutorial sent.", "onboarding_tutorial", "=", "onboarding_tutorials_sent", "[", "channel_id", "]", "[", "user_id", "]", "# Mark the pin task as completed.", "onboarding_tutorial", ".", "pin_task_completed", "=", "True", "# Get the new message payload", "message", "=", "onboarding_tutorial", ".", "get_message_payload", "(", ")", "# Post the updated message in Slack", "updated_message", "=", "await", "web_client", ".", "chat_update", "(", "*", "*", "message", ")", "# Update the timestamp saved on the onboarding tutorial object", "onboarding_tutorial", ".", "timestamp", "=", "updated_message", "[", "\"ts\"", "]" ]
35.26087
0.0012
[ "async def update_pin(**payload):\n", " \"\"\"Update the onboarding welcome message after recieving a \"pin_added\"\n", " event from Slack. Update timestamp for welcome message as well.\n", " \"\"\"\n", " data = payload[\"data\"]\n", " web_client = payload[\"web_client\"]\n", " channel_id = data[\"channel_id\"]\n", " user_id = data[\"user\"]\n", "\n", " # Get the original tutorial sent.\n", " onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id]\n", "\n", " # Mark the pin task as completed.\n", " onboarding_tutorial.pin_task_completed = True\n", "\n", " # Get the new message payload\n", " message = onboarding_tutorial.get_message_payload()\n", "\n", " # Post the updated message in Slack\n", " updated_message = await web_client.chat_update(**message)\n", "\n", " # Update the timestamp saved on the onboarding tutorial object\n", " onboarding_tutorial.timestamp = updated_message[\"ts\"]" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017543859649122806 ]
23
0.000763
def display_pil_image(im): """Displayhook function for PIL Images, rendered as PNG.""" from IPython.core import display b = BytesIO() im.save(b, format='png') data = b.getvalue() ip_img = display.Image(data=data, format='png', embed=True) return ip_img._repr_png_()
[ "def", "display_pil_image", "(", "im", ")", ":", "from", "IPython", ".", "core", "import", "display", "b", "=", "BytesIO", "(", ")", "im", ".", "save", "(", "b", ",", "format", "=", "'png'", ")", "data", "=", "b", ".", "getvalue", "(", ")", "ip_img", "=", "display", ".", "Image", "(", "data", "=", "data", ",", "format", "=", "'png'", ",", "embed", "=", "True", ")", "return", "ip_img", ".", "_repr_png_", "(", ")" ]
31
0.027875
[ "def display_pil_image(im):\n", " \"\"\"Displayhook function for PIL Images, rendered as PNG.\"\"\"\n", " from IPython.core import display\n", " b = BytesIO()\n", " im.save(b, format='png')\n", " data = b.getvalue()\n", "\n", " ip_img = display.Image(data=data, format='png', embed=True)\n", " return ip_img._repr_png_()" ]
[ 0, 0.015873015873015872, 0.027777777777777776, 0.058823529411764705, 0.03571428571428571, 0.043478260869565216, 0, 0.015873015873015872, 0.06896551724137931 ]
9
0.029612
def _upload(self, archive, region): """Upload function source and return source url """ # Generate source upload url url = self.client.execute_command( 'generateUploadUrl', {'parent': 'projects/{}/locations/{}'.format( self.session.get_default_project(), region)}).get('uploadUrl') log.debug("uploading function code %s", url) http = self._get_http_client(self.client) headers, response = http.request( url, method='PUT', headers={ 'content-type': 'application/zip', 'Content-Length': '%d' % archive.size, 'x-goog-content-length-range': '0,104857600' }, body=open(archive.path, 'rb') ) log.info("function code uploaded") if headers['status'] != '200': raise RuntimeError("%s\n%s" % (headers, response)) return url
[ "def", "_upload", "(", "self", ",", "archive", ",", "region", ")", ":", "# Generate source upload url", "url", "=", "self", ".", "client", ".", "execute_command", "(", "'generateUploadUrl'", ",", "{", "'parent'", ":", "'projects/{}/locations/{}'", ".", "format", "(", "self", ".", "session", ".", "get_default_project", "(", ")", ",", "region", ")", "}", ")", ".", "get", "(", "'uploadUrl'", ")", "log", ".", "debug", "(", "\"uploading function code %s\"", ",", "url", ")", "http", "=", "self", ".", "_get_http_client", "(", "self", ".", "client", ")", "headers", ",", "response", "=", "http", ".", "request", "(", "url", ",", "method", "=", "'PUT'", ",", "headers", "=", "{", "'content-type'", ":", "'application/zip'", ",", "'Content-Length'", ":", "'%d'", "%", "archive", ".", "size", ",", "'x-goog-content-length-range'", ":", "'0,104857600'", "}", ",", "body", "=", "open", "(", "archive", ".", "path", ",", "'rb'", ")", ")", "log", ".", "info", "(", "\"function code uploaded\"", ")", "if", "headers", "[", "'status'", "]", "!=", "'200'", ":", "raise", "RuntimeError", "(", "\"%s\\n%s\"", "%", "(", "headers", ",", "response", ")", ")", "return", "url" ]
39.25
0.002073
[ "def _upload(self, archive, region):\n", " \"\"\"Upload function source and return source url\n", " \"\"\"\n", " # Generate source upload url\n", " url = self.client.execute_command(\n", " 'generateUploadUrl',\n", " {'parent': 'projects/{}/locations/{}'.format(\n", " self.session.get_default_project(),\n", " region)}).get('uploadUrl')\n", " log.debug(\"uploading function code %s\", url)\n", " http = self._get_http_client(self.client)\n", " headers, response = http.request(\n", " url, method='PUT',\n", " headers={\n", " 'content-type': 'application/zip',\n", " 'Content-Length': '%d' % archive.size,\n", " 'x-goog-content-length-range': '0,104857600'\n", " },\n", " body=open(archive.path, 'rb')\n", " )\n", " log.info(\"function code uploaded\")\n", " if headers['status'] != '200':\n", " raise RuntimeError(\"%s\\n%s\" % (headers, response))\n", " return url" ]
[ 0, 0.017857142857142856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05555555555555555 ]
24
0.003059
def copy(self, deep=True, data=None): """Returns a copy of this object. `deep` is ignored since data is stored in the form of pandas.Index, which is already immutable. Dimensions, attributes and encodings are always copied. Use `data` to create a new object with the same structure as original but entirely new data. Parameters ---------- deep : bool, optional Deep is always ignored. data : array_like, optional Data to use in the new object. Must have same shape as original. Returns ------- object : Variable New object with dimensions, attributes, encodings, and optionally data copied from original. """ if data is None: data = self._data else: data = as_compatible_data(data) if self.shape != data.shape: raise ValueError("Data shape {} must match shape of object {}" .format(data.shape, self.shape)) return type(self)(self.dims, data, self._attrs, self._encoding, fastpath=True)
[ "def", "copy", "(", "self", ",", "deep", "=", "True", ",", "data", "=", "None", ")", ":", "if", "data", "is", "None", ":", "data", "=", "self", ".", "_data", "else", ":", "data", "=", "as_compatible_data", "(", "data", ")", "if", "self", ".", "shape", "!=", "data", ".", "shape", ":", "raise", "ValueError", "(", "\"Data shape {} must match shape of object {}\"", ".", "format", "(", "data", ".", "shape", ",", "self", ".", "shape", ")", ")", "return", "type", "(", "self", ")", "(", "self", ".", "dims", ",", "data", ",", "self", ".", "_attrs", ",", "self", ".", "_encoding", ",", "fastpath", "=", "True", ")" ]
36.03125
0.001689
[ "def copy(self, deep=True, data=None):\n", " \"\"\"Returns a copy of this object.\n", "\n", " `deep` is ignored since data is stored in the form of\n", " pandas.Index, which is already immutable. Dimensions, attributes\n", " and encodings are always copied.\n", "\n", " Use `data` to create a new object with the same structure as\n", " original but entirely new data.\n", "\n", " Parameters\n", " ----------\n", " deep : bool, optional\n", " Deep is always ignored.\n", " data : array_like, optional\n", " Data to use in the new object. Must have same shape as original.\n", "\n", " Returns\n", " -------\n", " object : Variable\n", " New object with dimensions, attributes, encodings, and optionally\n", " data copied from original.\n", " \"\"\"\n", " if data is None:\n", " data = self._data\n", " else:\n", " data = as_compatible_data(data)\n", " if self.shape != data.shape:\n", " raise ValueError(\"Data shape {} must match shape of object {}\"\n", " .format(data.shape, self.shape))\n", " return type(self)(self.dims, data, self._attrs,\n", " self._encoding, fastpath=True)" ]
[ 0, 0.023809523809523808, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017857142857142856 ]
32
0.001302
def add_output_option(parser): """output option""" parser.add_argument("-o", "--outdir", dest="outdir", type=str, default='GSEApy_reports', metavar='', action="store", help="The GSEApy output directory. Default: the current working directory") parser.add_argument("-f", "--format", dest="format", type=str, metavar='', action="store", choices=("pdf", "png", "jpeg", "eps", "svg"), default="pdf", help="File extensions supported by Matplotlib active backend,\ choose from {'pdf', 'png', 'jpeg','ps', 'eps','svg'}. Default: 'pdf'.") parser.add_argument("--fs", "--figsize", action='store', nargs=2, dest='figsize', metavar=('width', 'height'),type=float, default=(6.5, 6), help="The figsize keyword argument need two parameters to define. Default: (6.5, 6)") parser.add_argument("--graph", dest = "graph", action="store", type=int, default=20, metavar='int', help="Numbers of top graphs produced. Default: 20") parser.add_argument("--no-plot", action='store_true', dest='noplot', default=False, help="Speed up computing by suppressing the plot output."+\ "This is useful only if data are interested. Default: False.") parser.add_argument("-v", "--verbose", action="store_true", default=False, dest='verbose', help="Increase output verbosity, print out progress of your job", )
[ "def", "add_output_option", "(", "parser", ")", ":", "parser", ".", "add_argument", "(", "\"-o\"", ",", "\"--outdir\"", ",", "dest", "=", "\"outdir\"", ",", "type", "=", "str", ",", "default", "=", "'GSEApy_reports'", ",", "metavar", "=", "''", ",", "action", "=", "\"store\"", ",", "help", "=", "\"The GSEApy output directory. Default: the current working directory\"", ")", "parser", ".", "add_argument", "(", "\"-f\"", ",", "\"--format\"", ",", "dest", "=", "\"format\"", ",", "type", "=", "str", ",", "metavar", "=", "''", ",", "action", "=", "\"store\"", ",", "choices", "=", "(", "\"pdf\"", ",", "\"png\"", ",", "\"jpeg\"", ",", "\"eps\"", ",", "\"svg\"", ")", ",", "default", "=", "\"pdf\"", ",", "help", "=", "\"File extensions supported by Matplotlib active backend,\\\n choose from {'pdf', 'png', 'jpeg','ps', 'eps','svg'}. Default: 'pdf'.\"", ")", "parser", ".", "add_argument", "(", "\"--fs\"", ",", "\"--figsize\"", ",", "action", "=", "'store'", ",", "nargs", "=", "2", ",", "dest", "=", "'figsize'", ",", "metavar", "=", "(", "'width'", ",", "'height'", ")", ",", "type", "=", "float", ",", "default", "=", "(", "6.5", ",", "6", ")", ",", "help", "=", "\"The figsize keyword argument need two parameters to define. Default: (6.5, 6)\"", ")", "parser", ".", "add_argument", "(", "\"--graph\"", ",", "dest", "=", "\"graph\"", ",", "action", "=", "\"store\"", ",", "type", "=", "int", ",", "default", "=", "20", ",", "metavar", "=", "'int'", ",", "help", "=", "\"Numbers of top graphs produced. Default: 20\"", ")", "parser", ".", "add_argument", "(", "\"--no-plot\"", ",", "action", "=", "'store_true'", ",", "dest", "=", "'noplot'", ",", "default", "=", "False", ",", "help", "=", "\"Speed up computing by suppressing the plot output.\"", "+", "\"This is useful only if data are interested. Default: False.\"", ")", "parser", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbose\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ",", "dest", "=", "'verbose'", ",", "help", "=", "\"Increase output verbosity, print out progress of your job\"", ",", ")" ]
77.95
0.013308
[ "def add_output_option(parser):\n", " \"\"\"output option\"\"\"\n", "\n", " parser.add_argument(\"-o\", \"--outdir\", dest=\"outdir\", type=str, default='GSEApy_reports',\n", " metavar='', action=\"store\",\n", " help=\"The GSEApy output directory. Default: the current working directory\")\n", " parser.add_argument(\"-f\", \"--format\", dest=\"format\", type=str, metavar='', action=\"store\",\n", " choices=(\"pdf\", \"png\", \"jpeg\", \"eps\", \"svg\"), default=\"pdf\",\n", " help=\"File extensions supported by Matplotlib active backend,\\\n", " choose from {'pdf', 'png', 'jpeg','ps', 'eps','svg'}. Default: 'pdf'.\")\n", " parser.add_argument(\"--fs\", \"--figsize\", action='store', nargs=2, dest='figsize',\n", " metavar=('width', 'height'),type=float, default=(6.5, 6),\n", " help=\"The figsize keyword argument need two parameters to define. Default: (6.5, 6)\")\n", " parser.add_argument(\"--graph\", dest = \"graph\", action=\"store\", type=int, default=20, metavar='int',\n", " help=\"Numbers of top graphs produced. Default: 20\")\n", " parser.add_argument(\"--no-plot\", action='store_true', dest='noplot', default=False,\n", " help=\"Speed up computing by suppressing the plot output.\"+\\\n", " \"This is useful only if data are interested. Default: False.\")\n", " parser.add_argument(\"-v\", \"--verbose\", action=\"store_true\", default=False, dest='verbose',\n", " help=\"Increase output verbosity, print out progress of your job\", )" ]
[ 0, 0, 0, 0.010752688172043012, 0, 0.01, 0.010526315789473684, 0.011764705882352941, 0.011494252873563218, 0.00980392156862745, 0.011627906976744186, 0.024390243902439025, 0.00909090909090909, 0.028846153846153848, 0, 0.011363636363636364, 0.03571428571428571, 0.010869565217391304, 0.010526315789473684, 0.02197802197802198 ]
20
0.011437
def cartesian_square_centred_on_point(self, point, distance, **kwargs): ''' Select earthquakes from within a square centered on a point :param point: Centre point as instance of nhlib.geo.point.Point class :param distance: Distance (km) :returns: Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue` class containing only selected events ''' point_surface = Point(point.longitude, point.latitude, 0.) # As distance is north_point = point_surface.point_at(distance, 0., 0.) east_point = point_surface.point_at(distance, 0., 90.) south_point = point_surface.point_at(distance, 0., 180.) west_point = point_surface.point_at(distance, 0., 270.) is_long = np.logical_and( self.catalogue.data['longitude'] >= west_point.longitude, self.catalogue.data['longitude'] < east_point.longitude) is_surface = np.logical_and( is_long, self.catalogue.data['latitude'] >= south_point.latitude, self.catalogue.data['latitude'] < north_point.latitude) upper_depth, lower_depth = _check_depth_limits(kwargs) is_valid = np.logical_and( is_surface, self.catalogue.data['depth'] >= upper_depth, self.catalogue.data['depth'] < lower_depth) return self.select_catalogue(is_valid)
[ "def", "cartesian_square_centred_on_point", "(", "self", ",", "point", ",", "distance", ",", "*", "*", "kwargs", ")", ":", "point_surface", "=", "Point", "(", "point", ".", "longitude", ",", "point", ".", "latitude", ",", "0.", ")", "# As distance is", "north_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "0.", ")", "east_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "90.", ")", "south_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "180.", ")", "west_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "270.", ")", "is_long", "=", "np", ".", "logical_and", "(", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", ">=", "west_point", ".", "longitude", ",", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", "<", "east_point", ".", "longitude", ")", "is_surface", "=", "np", ".", "logical_and", "(", "is_long", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", ">=", "south_point", ".", "latitude", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", "<", "north_point", ".", "latitude", ")", "upper_depth", ",", "lower_depth", "=", "_check_depth_limits", "(", "kwargs", ")", "is_valid", "=", "np", ".", "logical_and", "(", "is_surface", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", ">=", "upper_depth", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", "<", "lower_depth", ")", "return", "self", ".", "select_catalogue", "(", "is_valid", ")" ]
40.542857
0.001376
[ "def cartesian_square_centred_on_point(self, point, distance, **kwargs):\n", " '''\n", " Select earthquakes from within a square centered on a point\n", "\n", " :param point:\n", " Centre point as instance of nhlib.geo.point.Point class\n", "\n", " :param distance:\n", " Distance (km)\n", "\n", " :returns:\n", " Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`\n", " class containing only selected events\n", " '''\n", " point_surface = Point(point.longitude, point.latitude, 0.)\n", " # As distance is\n", " north_point = point_surface.point_at(distance, 0., 0.)\n", " east_point = point_surface.point_at(distance, 0., 90.)\n", " south_point = point_surface.point_at(distance, 0., 180.)\n", " west_point = point_surface.point_at(distance, 0., 270.)\n", " is_long = np.logical_and(\n", " self.catalogue.data['longitude'] >= west_point.longitude,\n", " self.catalogue.data['longitude'] < east_point.longitude)\n", " is_surface = np.logical_and(\n", " is_long,\n", " self.catalogue.data['latitude'] >= south_point.latitude,\n", " self.catalogue.data['latitude'] < north_point.latitude)\n", "\n", " upper_depth, lower_depth = _check_depth_limits(kwargs)\n", " is_valid = np.logical_and(\n", " is_surface,\n", " self.catalogue.data['depth'] >= upper_depth,\n", " self.catalogue.data['depth'] < lower_depth)\n", "\n", " return self.select_catalogue(is_valid)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608 ]
35
0.003002
def validateData(self, text): ''' Method which validates the data from each tag, to check whether it is an empty string :param text: data to be validated :return: True or False depending on the result ''' if text == "\n": return False for c in text: try: if str(c) != " ": return True except: return False return False
[ "def", "validateData", "(", "self", ",", "text", ")", ":", "if", "text", "==", "\"\\n\"", ":", "return", "False", "for", "c", "in", "text", ":", "try", ":", "if", "str", "(", "c", ")", "!=", "\" \"", ":", "return", "True", "except", ":", "return", "False", "return", "False" ]
30.4
0.008511
[ "def validateData(self, text):\n", " '''\n", " Method which validates the data from each tag, to check whether it is an empty string\n", " :param text: data to be validated\n", " :return: True or False depending on the result\n", " '''\n", " if text == \"\\n\":\n", " return False\n", " for c in text:\n", " try:\n", " if str(c) != \" \":\n", " return True\n", " except:\n", " return False\n", " return False" ]
[ 0, 0.08333333333333333, 0.010638297872340425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05, 0, 0.05 ]
15
0.012931
def renew_service(request, pk): """ renew an existing service :param request object :param pk: the primary key of the service to renew :type pk: int """ default_provider.load_services() service = get_object_or_404(ServicesActivated, pk=pk) service_name = str(service.name) service_object = default_provider.get_service(service_name) lets_auth = getattr(service_object, 'auth') getattr(service_object, 'reset_failed')(pk=pk) return redirect(lets_auth(request))
[ "def", "renew_service", "(", "request", ",", "pk", ")", ":", "default_provider", ".", "load_services", "(", ")", "service", "=", "get_object_or_404", "(", "ServicesActivated", ",", "pk", "=", "pk", ")", "service_name", "=", "str", "(", "service", ".", "name", ")", "service_object", "=", "default_provider", ".", "get_service", "(", "service_name", ")", "lets_auth", "=", "getattr", "(", "service_object", ",", "'auth'", ")", "getattr", "(", "service_object", ",", "'reset_failed'", ")", "(", "pk", "=", "pk", ")", "return", "redirect", "(", "lets_auth", "(", "request", ")", ")" ]
36.714286
0.001898
[ "def renew_service(request, pk):\n", " \"\"\"\n", " renew an existing service\n", " :param request object\n", " :param pk: the primary key of the service to renew\n", " :type pk: int\n", " \"\"\"\n", " default_provider.load_services()\n", " service = get_object_or_404(ServicesActivated, pk=pk)\n", " service_name = str(service.name)\n", " service_object = default_provider.get_service(service_name)\n", " lets_auth = getattr(service_object, 'auth')\n", " getattr(service_object, 'reset_failed')(pk=pk)\n", " return redirect(lets_auth(request))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02564102564102564 ]
14
0.001832
def clicked(self, px, py): '''see if the image has been clicked on''' if self.hidden: return None if (abs(px - self.posx) > self.width/2 or abs(py - self.posy) > self.height/2): return None return math.sqrt((px-self.posx)**2 + (py-self.posy)**2)
[ "def", "clicked", "(", "self", ",", "px", ",", "py", ")", ":", "if", "self", ".", "hidden", ":", "return", "None", "if", "(", "abs", "(", "px", "-", "self", ".", "posx", ")", ">", "self", ".", "width", "/", "2", "or", "abs", "(", "py", "-", "self", ".", "posy", ")", ">", "self", ".", "height", "/", "2", ")", ":", "return", "None", "return", "math", ".", "sqrt", "(", "(", "px", "-", "self", ".", "posx", ")", "**", "2", "+", "(", "py", "-", "self", ".", "posy", ")", "**", "2", ")" ]
38.25
0.009585
[ "def clicked(self, px, py):\n", " '''see if the image has been clicked on'''\n", " if self.hidden:\n", " return None\n", " if (abs(px - self.posx) > self.width/2 or\n", " abs(py - self.posy) > self.height/2):\n", " return None\n", " return math.sqrt((px-self.posx)**2 + (py-self.posy)**2)" ]
[ 0, 0.0196078431372549, 0, 0, 0, 0.02, 0, 0.015873015873015872 ]
8
0.006935
def proj_l1(v, gamma, axis=None, method=None): r"""Projection operator of the :math:`\ell_1` norm. Parameters ---------- v : array_like Input array :math:`\mathbf{v}` gamma : float Parameter :math:`\gamma` axis : None or int or tuple of ints, optional (default None) Axes of `v` over which to compute the :math:`\ell_1` norm. If `None`, an entire multi-dimensional array is treated as a vector. If axes are specified, then distinct norm values are computed over the indices of the remaining axes of input array `v`. method : None or str, optional (default None) Solver method to use. If `None`, the most appropriate choice is made based on the `axis` parameter. Valid methods are - 'scalarroot' The solution is computed via the method of Sec. 6.5.2 in :cite:`parikh-2014-proximal`. - 'sortcumsum' The solution is computed via the method of :cite:`duchi-2008-efficient`. Returns ------- x : ndarray Output array """ if method is None: if axis is None: method = 'scalarroot' else: method = 'sortcumsum' if method == 'scalarroot': if axis is not None: raise ValueError('Method scalarroot only supports axis=None') return _proj_l1_scalar_root(v, gamma) elif method == 'sortcumsum': if isinstance(axis, tuple): vtr, rsi = ndto2d(v, axis) xtr = _proj_l1_sortsum(vtr, gamma, axis=1) return ndfrom2d(xtr, rsi) else: return _proj_l1_sortsum(v, gamma, axis) else: raise ValueError('Unknown solver method %s' % method)
[ "def", "proj_l1", "(", "v", ",", "gamma", ",", "axis", "=", "None", ",", "method", "=", "None", ")", ":", "if", "method", "is", "None", ":", "if", "axis", "is", "None", ":", "method", "=", "'scalarroot'", "else", ":", "method", "=", "'sortcumsum'", "if", "method", "==", "'scalarroot'", ":", "if", "axis", "is", "not", "None", ":", "raise", "ValueError", "(", "'Method scalarroot only supports axis=None'", ")", "return", "_proj_l1_scalar_root", "(", "v", ",", "gamma", ")", "elif", "method", "==", "'sortcumsum'", ":", "if", "isinstance", "(", "axis", ",", "tuple", ")", ":", "vtr", ",", "rsi", "=", "ndto2d", "(", "v", ",", "axis", ")", "xtr", "=", "_proj_l1_sortsum", "(", "vtr", ",", "gamma", ",", "axis", "=", "1", ")", "return", "ndfrom2d", "(", "xtr", ",", "rsi", ")", "else", ":", "return", "_proj_l1_sortsum", "(", "v", ",", "gamma", ",", "axis", ")", "else", ":", "raise", "ValueError", "(", "'Unknown solver method %s'", "%", "method", ")" ]
32.461538
0.000575
[ "def proj_l1(v, gamma, axis=None, method=None):\n", " r\"\"\"Projection operator of the :math:`\\ell_1` norm.\n", "\n", "\n", " Parameters\n", " ----------\n", " v : array_like\n", " Input array :math:`\\mathbf{v}`\n", " gamma : float\n", " Parameter :math:`\\gamma`\n", " axis : None or int or tuple of ints, optional (default None)\n", " Axes of `v` over which to compute the :math:`\\ell_1` norm. If\n", " `None`, an entire multi-dimensional array is treated as a\n", " vector. If axes are specified, then distinct norm values are\n", " computed over the indices of the remaining axes of input array\n", " `v`.\n", " method : None or str, optional (default None)\n", " Solver method to use. If `None`, the most appropriate choice is\n", " made based on the `axis` parameter. Valid methods are\n", "\n", " - 'scalarroot'\n", " The solution is computed via the method of Sec. 6.5.2 in\n", " :cite:`parikh-2014-proximal`.\n", " - 'sortcumsum'\n", " The solution is computed via the method of\n", " :cite:`duchi-2008-efficient`.\n", "\n", " Returns\n", " -------\n", " x : ndarray\n", " Output array\n", " \"\"\"\n", "\n", " if method is None:\n", " if axis is None:\n", " method = 'scalarroot'\n", " else:\n", " method = 'sortcumsum'\n", "\n", " if method == 'scalarroot':\n", " if axis is not None:\n", " raise ValueError('Method scalarroot only supports axis=None')\n", " return _proj_l1_scalar_root(v, gamma)\n", " elif method == 'sortcumsum':\n", " if isinstance(axis, tuple):\n", " vtr, rsi = ndto2d(v, axis)\n", " xtr = _proj_l1_sortsum(vtr, gamma, axis=1)\n", " return ndfrom2d(xtr, rsi)\n", " else:\n", " return _proj_l1_sortsum(v, gamma, axis)\n", " else:\n", " raise ValueError('Unknown solver method %s' % method)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01639344262295082 ]
52
0.000315
def setOverlayTransformTrackedDeviceRelative(self, ulOverlayHandle, unTrackedDevice): """Sets the transform to relative to the transform of the specified tracked device.""" fn = self.function_table.setOverlayTransformTrackedDeviceRelative pmatTrackedDeviceToOverlayTransform = HmdMatrix34_t() result = fn(ulOverlayHandle, unTrackedDevice, byref(pmatTrackedDeviceToOverlayTransform)) return result, pmatTrackedDeviceToOverlayTransform
[ "def", "setOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ",", "unTrackedDevice", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformTrackedDeviceRelative", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "unTrackedDevice", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackedDeviceToOverlayTransform" ]
66.857143
0.010549
[ "def setOverlayTransformTrackedDeviceRelative(self, ulOverlayHandle, unTrackedDevice):\n", " \"\"\"Sets the transform to relative to the transform of the specified tracked device.\"\"\"\n", "\n", " fn = self.function_table.setOverlayTransformTrackedDeviceRelative\n", " pmatTrackedDeviceToOverlayTransform = HmdMatrix34_t()\n", " result = fn(ulOverlayHandle, unTrackedDevice, byref(pmatTrackedDeviceToOverlayTransform))\n", " return result, pmatTrackedDeviceToOverlayTransform" ]
[ 0.011627906976744186, 0.021052631578947368, 0, 0, 0, 0.01020408163265306, 0.017241379310344827 ]
7
0.008589
def chunk(line, mapping={None: 'text', '${': 'escape', '#{': 'bless', '&{': 'args', '%{': 'format', '@{': 'json'}): """Chunkify and "tag" a block of text into plain text and code sections. The first delimeter is blank to represent text sections, and keep the indexes aligned with the tags. Values are yielded in the form (tag, text). """ skipping = 0 # How many closing parenthesis will we need to skip? start = None # Starting position of current match. last = 0 i = 0 text = line.line while i < len(text): if start is not None: if text[i] == '{': skipping += 1 elif text[i] == '}': if skipping: skipping -= 1 else: yield line.clone(kind=mapping[text[start-2:start]], line=text[start:i]) start = None last = i = i + 1 continue elif text[i:i+2] in mapping: if last is not None and last != i: yield line.clone(kind=mapping[None], line=text[last:i]) last = None start = i = i + 2 continue i += 1 if last < len(text): yield line.clone(kind=mapping[None], line=text[last:])
[ "def", "chunk", "(", "line", ",", "mapping", "=", "{", "None", ":", "'text'", ",", "'${'", ":", "'escape'", ",", "'#{'", ":", "'bless'", ",", "'&{'", ":", "'args'", ",", "'%{'", ":", "'format'", ",", "'@{'", ":", "'json'", "}", ")", ":", "skipping", "=", "0", "# How many closing parenthesis will we need to skip?", "start", "=", "None", "# Starting position of current match.", "last", "=", "0", "i", "=", "0", "text", "=", "line", ".", "line", "while", "i", "<", "len", "(", "text", ")", ":", "if", "start", "is", "not", "None", ":", "if", "text", "[", "i", "]", "==", "'{'", ":", "skipping", "+=", "1", "elif", "text", "[", "i", "]", "==", "'}'", ":", "if", "skipping", ":", "skipping", "-=", "1", "else", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "text", "[", "start", "-", "2", ":", "start", "]", "]", ",", "line", "=", "text", "[", "start", ":", "i", "]", ")", "start", "=", "None", "last", "=", "i", "=", "i", "+", "1", "continue", "elif", "text", "[", "i", ":", "i", "+", "2", "]", "in", "mapping", ":", "if", "last", "is", "not", "None", "and", "last", "!=", "i", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "None", "]", ",", "line", "=", "text", "[", "last", ":", "i", "]", ")", "last", "=", "None", "start", "=", "i", "=", "i", "+", "2", "continue", "i", "+=", "1", "if", "last", "<", "len", "(", "text", ")", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "None", "]", ",", "line", "=", "text", "[", "last", ":", "]", ")" ]
24.690476
0.05102
[ "def chunk(line, mapping={None: 'text', '${': 'escape', '#{': 'bless', '&{': 'args', '%{': 'format', '@{': 'json'}):\n", "\t\"\"\"Chunkify and \"tag\" a block of text into plain text and code sections.\n", "\t\n", "\tThe first delimeter is blank to represent text sections, and keep the indexes aligned with the tags.\n", "\t\n", "\tValues are yielded in the form (tag, text).\n", "\t\"\"\"\n", "\t\n", "\tskipping = 0 # How many closing parenthesis will we need to skip?\n", "\tstart = None # Starting position of current match.\n", "\tlast = 0\n", "\t\n", "\ti = 0\n", "\t\n", "\ttext = line.line\n", "\t\n", "\twhile i < len(text):\n", "\t\tif start is not None:\n", "\t\t\tif text[i] == '{':\n", "\t\t\t\tskipping += 1\n", "\t\t\t\n", "\t\t\telif text[i] == '}':\n", "\t\t\t\tif skipping:\n", "\t\t\t\t\tskipping -= 1\n", "\t\t\t\telse:\n", "\t\t\t\t\tyield line.clone(kind=mapping[text[start-2:start]], line=text[start:i])\n", "\t\t\t\t\tstart = None\n", "\t\t\t\t\tlast = i = i + 1\n", "\t\t\t\t\tcontinue\n", "\t\t\n", "\t\telif text[i:i+2] in mapping:\n", "\t\t\tif last is not None and last != i:\n", "\t\t\t\tyield line.clone(kind=mapping[None], line=text[last:i])\n", "\t\t\t\tlast = None\n", "\t\t\t\n", "\t\t\tstart = i = i + 2\n", "\t\t\tcontinue\n", "\t\t\n", "\t\ti += 1\n", "\t\n", "\tif last < len(text):\n", "\t\tyield line.clone(kind=mapping[None], line=text[last:])" ]
[ 0.008620689655172414, 0.013513513513513514, 1, 0.0196078431372549, 1, 0.022222222222222223, 0.2, 1, 0.014705882352941176, 0.018867924528301886, 0.1, 1, 0.14285714285714285, 1, 0.05555555555555555, 1, 0.045454545454545456, 0.041666666666666664, 0.045454545454545456, 0.05555555555555555, 0.5, 0.041666666666666664, 0.058823529411764705, 0.05263157894736842, 0.1, 0.012987012987012988, 0.05555555555555555, 0.045454545454545456, 0.07142857142857142, 0.6666666666666666, 0.03225806451612903, 0.02631578947368421, 0.016666666666666666, 0.0625, 0.5, 0.047619047619047616, 0.08333333333333333, 0.6666666666666666, 0.1111111111111111, 1, 0.045454545454545456, 0.03571428571428571 ]
42
0.262308
def show_image(self, image_id_or_slug): """ This method displays the attributes of an image. Required parameters image_id: Numeric, this is the id of the image you would like to use to rebuild your droplet with """ if not image_id_or_slug: msg = 'image_id_or_slug is required to destroy an image!' raise DOPException(msg) json = self.request('/images/%s' % image_id_or_slug, method='GET') image_json = json.get('image') status = json.get('status') if status == 'OK': image = Image.from_json(image_json) return image else: message = json.get('message') raise DOPException('[%s]: %s' % (status, message))
[ "def", "show_image", "(", "self", ",", "image_id_or_slug", ")", ":", "if", "not", "image_id_or_slug", ":", "msg", "=", "'image_id_or_slug is required to destroy an image!'", "raise", "DOPException", "(", "msg", ")", "json", "=", "self", ".", "request", "(", "'/images/%s'", "%", "image_id_or_slug", ",", "method", "=", "'GET'", ")", "image_json", "=", "json", ".", "get", "(", "'image'", ")", "status", "=", "json", ".", "get", "(", "'status'", ")", "if", "status", "==", "'OK'", ":", "image", "=", "Image", ".", "from_json", "(", "image_json", ")", "return", "image", "else", ":", "message", "=", "json", ".", "get", "(", "'message'", ")", "raise", "DOPException", "(", "'[%s]: %s'", "%", "(", "status", ",", "message", ")", ")" ]
33.869565
0.002497
[ "def show_image(self, image_id_or_slug):\n", " \"\"\"\n", " This method displays the attributes of an image.\n", "\n", " Required parameters\n", "\n", " image_id:\n", " Numeric, this is the id of the image you would like to use to\n", " rebuild your droplet with\n", " \"\"\"\n", " if not image_id_or_slug:\n", " msg = 'image_id_or_slug is required to destroy an image!'\n", " raise DOPException(msg)\n", "\n", " json = self.request('/images/%s' % image_id_or_slug, method='GET')\n", " image_json = json.get('image')\n", " status = json.get('status')\n", " if status == 'OK':\n", " image = Image.from_json(image_json)\n", " return image\n", " else:\n", " message = json.get('message')\n", " raise DOPException('[%s]: %s' % (status, message))" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016129032258064516 ]
23
0.004324
def remove_zero_normals(self): """Removes normal vectors with a zero magnitude. Note ---- This returns nothing and updates the NormalCloud in-place. """ points_of_interest = np.where(np.linalg.norm(self._data, axis=0) != 0.0)[0] self._data = self._data[:, points_of_interest]
[ "def", "remove_zero_normals", "(", "self", ")", ":", "points_of_interest", "=", "np", ".", "where", "(", "np", ".", "linalg", ".", "norm", "(", "self", ".", "_data", ",", "axis", "=", "0", ")", "!=", "0.0", ")", "[", "0", "]", "self", ".", "_data", "=", "self", ".", "_data", "[", ":", ",", "points_of_interest", "]" ]
36
0.009036
[ "def remove_zero_normals(self):\n", " \"\"\"Removes normal vectors with a zero magnitude.\n", "\n", " Note\n", " ----\n", " This returns nothing and updates the NormalCloud in-place.\n", " \"\"\"\n", " points_of_interest = np.where(np.linalg.norm(self._data, axis=0) != 0.0)[0]\n", " self._data = self._data[:, points_of_interest]" ]
[ 0, 0.017543859649122806, 0, 0, 0, 0, 0, 0.011904761904761904, 0.018518518518518517 ]
9
0.00533
def search(self, index=None, doc_type=None, body=None, **query_params): """ Make a search query on the elastic search `<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html>`_ :param index: the index name to query :param doc_type: he doc type to search in :param body: the query :param query_params: params :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_exclude: A list of fields to exclude from the returned _source field :arg _source_include: A list of fields to extract and return from the _source field :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR), default 'OR', valid choices are: 'AND', 'OR' :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg fielddata_fields: A comma-separated list of fields to return as the field data representation of a field for each hit :arg fields: A comma-separated list of fields to return as part of a hit :arg from\_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg lowercase_expanded_terms: Specify whether query terms should be lowercased :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type, valid choices are: 'query_then_fetch', 'dfs_query_then_fetch' :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of <field>:<direction> pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode, default 'missing', valid choices are: 'missing', 'popular', 'always' :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Explicit operation timeout :arg track_scores: Whether to calculate and return scores even if they are not used for sorting :arg version: Specify whether to return document version as part of a hit """ path = self._es_parser.make_path(index, doc_type, EsMethods.SEARCH) result = yield self._perform_request(HttpMethod.POST, path, body=body, params=query_params) returnValue(result)
[ "def", "search", "(", "self", ",", "index", "=", "None", ",", "doc_type", "=", "None", ",", "body", "=", "None", ",", "*", "*", "query_params", ")", ":", "path", "=", "self", ".", "_es_parser", ".", "make_path", "(", "index", ",", "doc_type", ",", "EsMethods", ".", "SEARCH", ")", "result", "=", "yield", "self", ".", "_perform_request", "(", "HttpMethod", ".", "POST", ",", "path", ",", "body", "=", "body", ",", "params", "=", "query_params", ")", "returnValue", "(", "result", ")" ]
57.652778
0.001895
[ "def search(self, index=None, doc_type=None, body=None, **query_params):\n", " \"\"\"\n", " Make a search query on the elastic search\n", " `<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html>`_\n", "\n", " :param index: the index name to query\n", " :param doc_type: he doc type to search in\n", " :param body: the query\n", " :param query_params: params\n", " :arg _source: True or false to return the _source field or not, or a\n", " list of fields to return\n", " :arg _source_exclude: A list of fields to exclude from the returned\n", " _source field\n", " :arg _source_include: A list of fields to extract and return from the\n", " _source field\n", " :arg allow_no_indices: Whether to ignore if a wildcard indices\n", " expression resolves into no concrete indices. (This includes `_all`\n", " string or when no indices have been specified)\n", " :arg analyze_wildcard: Specify whether wildcard and prefix queries\n", " should be analyzed (default: false)\n", " :arg analyzer: The analyzer to use for the query string\n", " :arg default_operator: The default operator for query string query (AND\n", " or OR), default 'OR', valid choices are: 'AND', 'OR'\n", " :arg df: The field to use as default where no field prefix is given in\n", " the query string\n", " :arg expand_wildcards: Whether to expand wildcard expression to concrete\n", " indices that are open, closed or both., default 'open', valid\n", " choices are: 'open', 'closed', 'none', 'all'\n", " :arg explain: Specify whether to return detailed information about score\n", " computation as part of a hit\n", " :arg fielddata_fields: A comma-separated list of fields to return as the\n", " field data representation of a field for each hit\n", " :arg fields: A comma-separated list of fields to return as part of a hit\n", " :arg from\\_: Starting offset (default: 0)\n", " :arg ignore_unavailable: Whether specified concrete indices should be\n", " ignored when unavailable (missing or closed)\n", " :arg lenient: Specify whether format-based query failures (such as\n", " providing text to a numeric field) should be ignored\n", " :arg lowercase_expanded_terms: Specify whether query terms should be\n", " lowercased\n", " :arg preference: Specify the node or shard the operation should be\n", " performed on (default: random)\n", " :arg q: Query in the Lucene query string syntax\n", " :arg request_cache: Specify if request cache should be used for this\n", " request or not, defaults to index level setting\n", " :arg routing: A comma-separated list of specific routing values\n", " :arg scroll: Specify how long a consistent view of the index should be\n", " maintained for scrolled search\n", " :arg search_type: Search operation type, valid choices are:\n", " 'query_then_fetch', 'dfs_query_then_fetch'\n", " :arg size: Number of hits to return (default: 10)\n", " :arg sort: A comma-separated list of <field>:<direction> pairs\n", " :arg stats: Specific 'tag' of the request for logging and statistical\n", " purposes\n", " :arg suggest_field: Specify which field to use for suggestions\n", " :arg suggest_mode: Specify suggest mode, default 'missing', valid\n", " choices are: 'missing', 'popular', 'always'\n", " :arg suggest_size: How many suggestions to return in response\n", " :arg suggest_text: The source text for which the suggestions should be\n", " returned\n", " :arg terminate_after: The maximum number of documents to collect for\n", " each shard, upon reaching which the query execution will terminate\n", " early.\n", " :arg timeout: Explicit operation timeout\n", " :arg track_scores: Whether to calculate and return scores even if they\n", " are not used for sorting\n", " :arg version: Specify whether to return document version as part of a\n", " hit\n", " \"\"\"\n", " path = self._es_parser.make_path(index, doc_type, EsMethods.SEARCH)\n", " result = yield self._perform_request(HttpMethod.POST, path, body=body, params=query_params)\n", " returnValue(result)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0.012345679012345678, 0, 0.012345679012345678, 0, 0.012345679012345678, 0.02, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0.037037037037037035 ]
72
0.002774
def set_static_dns(iface, *addrs): ''' Set static DNS configuration on a Windows NIC Args: iface (str): The name of the interface to set addrs (*): One or more DNS servers to be added. To clear the list of DNS servers pass an empty list (``[]``). If undefined or ``None`` no changes will be made. Returns: dict: A dictionary containing the new DNS settings CLI Example: .. code-block:: bash salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.1' salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.252' '192.168.1.253' ''' if addrs is () or str(addrs[0]).lower() == 'none': return {'Interface': iface, 'DNS Server': 'No Changes'} # Clear the list of DNS servers if [] is passed if str(addrs[0]).lower() == '[]': log.debug('Clearing list of DNS servers') cmd = ['netsh', 'interface', 'ip', 'set', 'dns', 'name={0}'.format(iface), 'source=static', 'address=none'] __salt__['cmd.run'](cmd, python_shell=False) return {'Interface': iface, 'DNS Server': []} addr_index = 1 for addr in addrs: if addr_index == 1: cmd = ['netsh', 'interface', 'ip', 'set', 'dns', 'name={0}'.format(iface), 'source=static', 'address={0}'.format(addr), 'register=primary'] __salt__['cmd.run'](cmd, python_shell=False) addr_index = addr_index + 1 else: cmd = ['netsh', 'interface', 'ip', 'add', 'dns', 'name={0}'.format(iface), 'address={0}'.format(addr), 'index={0}'.format(addr_index)] __salt__['cmd.run'](cmd, python_shell=False) addr_index = addr_index + 1 return {'Interface': iface, 'DNS Server': addrs}
[ "def", "set_static_dns", "(", "iface", ",", "*", "addrs", ")", ":", "if", "addrs", "is", "(", ")", "or", "str", "(", "addrs", "[", "0", "]", ")", ".", "lower", "(", ")", "==", "'none'", ":", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "'No Changes'", "}", "# Clear the list of DNS servers if [] is passed", "if", "str", "(", "addrs", "[", "0", "]", ")", ".", "lower", "(", ")", "==", "'[]'", ":", "log", ".", "debug", "(", "'Clearing list of DNS servers'", ")", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'set'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'source=static'", ",", "'address=none'", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "[", "]", "}", "addr_index", "=", "1", "for", "addr", "in", "addrs", ":", "if", "addr_index", "==", "1", ":", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'set'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'source=static'", ",", "'address={0}'", ".", "format", "(", "addr", ")", ",", "'register=primary'", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "addr_index", "=", "addr_index", "+", "1", "else", ":", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'add'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'address={0}'", ".", "format", "(", "addr", ")", ",", "'index={0}'", ".", "format", "(", "addr_index", ")", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "addr_index", "=", "addr_index", "+", "1", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "addrs", "}" ]
37.115385
0.001514
[ "def set_static_dns(iface, *addrs):\n", " '''\n", " Set static DNS configuration on a Windows NIC\n", "\n", " Args:\n", "\n", " iface (str): The name of the interface to set\n", "\n", " addrs (*):\n", " One or more DNS servers to be added. To clear the list of DNS\n", " servers pass an empty list (``[]``). If undefined or ``None`` no\n", " changes will be made.\n", "\n", " Returns:\n", " dict: A dictionary containing the new DNS settings\n", "\n", " CLI Example:\n", "\n", " .. code-block:: bash\n", "\n", " salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.1'\n", " salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.252' '192.168.1.253'\n", " '''\n", " if addrs is () or str(addrs[0]).lower() == 'none':\n", " return {'Interface': iface, 'DNS Server': 'No Changes'}\n", " # Clear the list of DNS servers if [] is passed\n", " if str(addrs[0]).lower() == '[]':\n", " log.debug('Clearing list of DNS servers')\n", " cmd = ['netsh', 'interface', 'ip', 'set', 'dns',\n", " 'name={0}'.format(iface),\n", " 'source=static',\n", " 'address=none']\n", " __salt__['cmd.run'](cmd, python_shell=False)\n", " return {'Interface': iface, 'DNS Server': []}\n", " addr_index = 1\n", " for addr in addrs:\n", " if addr_index == 1:\n", " cmd = ['netsh', 'interface', 'ip', 'set', 'dns',\n", " 'name={0}'.format(iface),\n", " 'source=static',\n", " 'address={0}'.format(addr),\n", " 'register=primary']\n", " __salt__['cmd.run'](cmd, python_shell=False)\n", " addr_index = addr_index + 1\n", " else:\n", " cmd = ['netsh', 'interface', 'ip', 'add', 'dns',\n", " 'name={0}'.format(iface),\n", " 'address={0}'.format(addr),\n", " 'index={0}'.format(addr_index)]\n", " __salt__['cmd.run'](cmd, python_shell=False)\n", " addr_index = addr_index + 1\n", " return {'Interface': iface, 'DNS Server': addrs}" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0.00909090909090909, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232 ]
52
0.000754
def unarchive(filename,output_dir='.'): '''unpacks the given archive into ``output_dir``''' if not os.path.exists(output_dir): os.makedirs(output_dir) for archive in archive_formats: if filename.endswith(archive_formats[archive]['suffix']): return subprocess.call(archive_formats[archive]['command'](output_dir,filename))==0 return False
[ "def", "unarchive", "(", "filename", ",", "output_dir", "=", "'.'", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "output_dir", ")", ":", "os", ".", "makedirs", "(", "output_dir", ")", "for", "archive", "in", "archive_formats", ":", "if", "filename", ".", "endswith", "(", "archive_formats", "[", "archive", "]", "[", "'suffix'", "]", ")", ":", "return", "subprocess", ".", "call", "(", "archive_formats", "[", "archive", "]", "[", "'command'", "]", "(", "output_dir", ",", "filename", ")", ")", "==", "0", "return", "False" ]
46.75
0.013123
[ "def unarchive(filename,output_dir='.'):\n", " '''unpacks the given archive into ``output_dir``'''\n", " if not os.path.exists(output_dir):\n", " os.makedirs(output_dir)\n", " for archive in archive_formats:\n", " if filename.endswith(archive_formats[archive]['suffix']):\n", " return subprocess.call(archive_formats[archive]['command'](output_dir,filename))==0\n", " return False" ]
[ 0.025, 0, 0, 0, 0, 0, 0.03125, 0.0625 ]
8
0.014844
def convert_destination_to_id(destination_node, destination_port, nodes): """ Convert a destination to device and port ID :param str destination_node: Destination node name :param str destination_port: Destination port name :param list nodes: list of nodes from :py:meth:`generate_nodes` :return: dict containing device ID, device name and port ID :rtype: dict """ device_id = None device_name = None port_id = None if destination_node != 'NIO': for node in nodes: if destination_node == node['properties']['name']: device_id = node['id'] device_name = destination_node for port in node['ports']: if destination_port == port['name']: port_id = port['id'] break break else: for node in nodes: if node['type'] == 'Cloud': for port in node['ports']: if destination_port.lower() == port['name'].lower(): device_id = node['id'] device_name = node['properties']['name'] port_id = port['id'] break info = {'id': device_id, 'name': device_name, 'pid': port_id} return info
[ "def", "convert_destination_to_id", "(", "destination_node", ",", "destination_port", ",", "nodes", ")", ":", "device_id", "=", "None", "device_name", "=", "None", "port_id", "=", "None", "if", "destination_node", "!=", "'NIO'", ":", "for", "node", "in", "nodes", ":", "if", "destination_node", "==", "node", "[", "'properties'", "]", "[", "'name'", "]", ":", "device_id", "=", "node", "[", "'id'", "]", "device_name", "=", "destination_node", "for", "port", "in", "node", "[", "'ports'", "]", ":", "if", "destination_port", "==", "port", "[", "'name'", "]", ":", "port_id", "=", "port", "[", "'id'", "]", "break", "break", "else", ":", "for", "node", "in", "nodes", ":", "if", "node", "[", "'type'", "]", "==", "'Cloud'", ":", "for", "port", "in", "node", "[", "'ports'", "]", ":", "if", "destination_port", ".", "lower", "(", ")", "==", "port", "[", "'name'", "]", ".", "lower", "(", ")", ":", "device_id", "=", "node", "[", "'id'", "]", "device_name", "=", "node", "[", "'properties'", "]", "[", "'name'", "]", "port_id", "=", "port", "[", "'id'", "]", "break", "info", "=", "{", "'id'", ":", "device_id", ",", "'name'", ":", "device_name", ",", "'pid'", ":", "port_id", "}", "return", "info" ]
39.297297
0.001342
[ "def convert_destination_to_id(destination_node, destination_port, nodes):\n", " \"\"\"\n", " Convert a destination to device and port ID\n", "\n", " :param str destination_node: Destination node name\n", " :param str destination_port: Destination port name\n", " :param list nodes: list of nodes from :py:meth:`generate_nodes`\n", " :return: dict containing device ID, device name and port ID\n", " :rtype: dict\n", " \"\"\"\n", " device_id = None\n", " device_name = None\n", " port_id = None\n", " if destination_node != 'NIO':\n", " for node in nodes:\n", " if destination_node == node['properties']['name']:\n", " device_id = node['id']\n", " device_name = destination_node\n", " for port in node['ports']:\n", " if destination_port == port['name']:\n", " port_id = port['id']\n", " break\n", " break\n", " else:\n", " for node in nodes:\n", " if node['type'] == 'Cloud':\n", " for port in node['ports']:\n", " if destination_port.lower() == port['name'].lower():\n", " device_id = node['id']\n", " device_name = node['properties']['name']\n", " port_id = port['id']\n", " break\n", "\n", " info = {'id': device_id,\n", " 'name': device_name,\n", " 'pid': port_id}\n", " return info" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842 ]
37
0.003675
def _match_pattern(filename, include, exclude, real, path, follow): """Match includes and excludes.""" if real: symlinks = {} if isinstance(filename, bytes): curdir = os.fsencode(os.curdir) mount = RE_BWIN_MOUNT if util.platform() == "windows" else RE_BMOUNT else: curdir = os.curdir mount = RE_WIN_MOUNT if util.platform() == "windows" else RE_MOUNT if not mount.match(filename): exists = os.path.lexists(os.path.join(curdir, filename)) else: exists = os.path.lexists(filename) if not exists: return False if path: return _match_real(filename, include, exclude, follow, symlinks) matched = False for pattern in include: if pattern.fullmatch(filename): matched = True break if not include and exclude: matched = True if matched: matched = True if exclude: for pattern in exclude: if not pattern.fullmatch(filename): matched = False break return matched
[ "def", "_match_pattern", "(", "filename", ",", "include", ",", "exclude", ",", "real", ",", "path", ",", "follow", ")", ":", "if", "real", ":", "symlinks", "=", "{", "}", "if", "isinstance", "(", "filename", ",", "bytes", ")", ":", "curdir", "=", "os", ".", "fsencode", "(", "os", ".", "curdir", ")", "mount", "=", "RE_BWIN_MOUNT", "if", "util", ".", "platform", "(", ")", "==", "\"windows\"", "else", "RE_BMOUNT", "else", ":", "curdir", "=", "os", ".", "curdir", "mount", "=", "RE_WIN_MOUNT", "if", "util", ".", "platform", "(", ")", "==", "\"windows\"", "else", "RE_MOUNT", "if", "not", "mount", ".", "match", "(", "filename", ")", ":", "exists", "=", "os", ".", "path", ".", "lexists", "(", "os", ".", "path", ".", "join", "(", "curdir", ",", "filename", ")", ")", "else", ":", "exists", "=", "os", ".", "path", ".", "lexists", "(", "filename", ")", "if", "not", "exists", ":", "return", "False", "if", "path", ":", "return", "_match_real", "(", "filename", ",", "include", ",", "exclude", ",", "follow", ",", "symlinks", ")", "matched", "=", "False", "for", "pattern", "in", "include", ":", "if", "pattern", ".", "fullmatch", "(", "filename", ")", ":", "matched", "=", "True", "break", "if", "not", "include", "and", "exclude", ":", "matched", "=", "True", "if", "matched", ":", "matched", "=", "True", "if", "exclude", ":", "for", "pattern", "in", "exclude", ":", "if", "not", "pattern", ".", "fullmatch", "(", "filename", ")", ":", "matched", "=", "False", "break", "return", "matched" ]
28.820513
0.001721
[ "def _match_pattern(filename, include, exclude, real, path, follow):\n", " \"\"\"Match includes and excludes.\"\"\"\n", "\n", " if real:\n", " symlinks = {}\n", " if isinstance(filename, bytes):\n", " curdir = os.fsencode(os.curdir)\n", " mount = RE_BWIN_MOUNT if util.platform() == \"windows\" else RE_BMOUNT\n", " else:\n", " curdir = os.curdir\n", " mount = RE_WIN_MOUNT if util.platform() == \"windows\" else RE_MOUNT\n", "\n", " if not mount.match(filename):\n", " exists = os.path.lexists(os.path.join(curdir, filename))\n", " else:\n", " exists = os.path.lexists(filename)\n", "\n", " if not exists:\n", " return False\n", " if path:\n", " return _match_real(filename, include, exclude, follow, symlinks)\n", "\n", " matched = False\n", " for pattern in include:\n", " if pattern.fullmatch(filename):\n", " matched = True\n", " break\n", "\n", " if not include and exclude:\n", " matched = True\n", "\n", " if matched:\n", " matched = True\n", " if exclude:\n", " for pattern in exclude:\n", " if not pattern.fullmatch(filename):\n", " matched = False\n", " break\n", " return matched" ]
[ 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05555555555555555 ]
39
0.001741
def get(self): '''Return a dictionary that represents the Tcl array''' value = {} for (elementname, elementvar) in self._elementvars.items(): value[elementname] = elementvar.get() return value
[ "def", "get", "(", "self", ")", ":", "value", "=", "{", "}", "for", "(", "elementname", ",", "elementvar", ")", "in", "self", ".", "_elementvars", ".", "items", "(", ")", ":", "value", "[", "elementname", "]", "=", "elementvar", ".", "get", "(", ")", "return", "value" ]
38.5
0.008475
[ "def get(self):\n", " '''Return a dictionary that represents the Tcl array'''\n", " value = {}\n", " for (elementname, elementvar) in self._elementvars.items():\n", " value[elementname] = elementvar.get()\n", " return value" ]
[ 0, 0.015625, 0, 0, 0, 0.05 ]
6
0.010938
def assert_not_visible(self, selector, testid=None, **kwargs): """Assert that the element is not visible in the dom Args: selector (str): the selector used to find the element test_id (str): the test_id or a str Kwargs: wait_until_not_visible (bool) highlight (bool) Returns: bool: True is the assertion succeed; False otherwise. """ self.info_log( "Assert not visible selector(%s) testid(%s)" % (selector, testid) ) highlight = kwargs.get( 'highlight', BROME_CONFIG['highlight']['highlight_on_assertion_failure'] ) self.debug_log("effective highlight: %s" % highlight) wait_until_not_visible = kwargs.get( 'wait_until_not_visible', BROME_CONFIG['proxy_driver']['wait_until_not_visible_before_assert_not_visible'] # noqa ) self.debug_log( "effective wait_until_not_visible: %s" % wait_until_not_visible ) if wait_until_not_visible: self.wait_until_not_visible(selector, raise_exception=False) element = self.find( selector, raise_exception=False, wait_until_visible=False, wait_until_present=False ) if element and element.is_displayed(raise_exception=False): data = self.execute_script( "return arguments[0].getBoundingClientRect();", element._element ) if highlight: element.highlight( style=BROME_CONFIG['highlight']['style_on_assertion_failure'] # noqa ) if testid is not None: self.create_test_result(testid, False, extra_data={ 'bounding_client_rect': data, 'video_x_offset': self.browser_config.get('video_x_offset', 0), # noqa 'video_y_offset': self.browser_config.get('video_y_offset', 0) # noqa }) return False else: if testid is not None: self.create_test_result(testid, True) return True
[ "def", "assert_not_visible", "(", "self", ",", "selector", ",", "testid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "info_log", "(", "\"Assert not visible selector(%s) testid(%s)\"", "%", "(", "selector", ",", "testid", ")", ")", "highlight", "=", "kwargs", ".", "get", "(", "'highlight'", ",", "BROME_CONFIG", "[", "'highlight'", "]", "[", "'highlight_on_assertion_failure'", "]", ")", "self", ".", "debug_log", "(", "\"effective highlight: %s\"", "%", "highlight", ")", "wait_until_not_visible", "=", "kwargs", ".", "get", "(", "'wait_until_not_visible'", ",", "BROME_CONFIG", "[", "'proxy_driver'", "]", "[", "'wait_until_not_visible_before_assert_not_visible'", "]", "# noqa", ")", "self", ".", "debug_log", "(", "\"effective wait_until_not_visible: %s\"", "%", "wait_until_not_visible", ")", "if", "wait_until_not_visible", ":", "self", ".", "wait_until_not_visible", "(", "selector", ",", "raise_exception", "=", "False", ")", "element", "=", "self", ".", "find", "(", "selector", ",", "raise_exception", "=", "False", ",", "wait_until_visible", "=", "False", ",", "wait_until_present", "=", "False", ")", "if", "element", "and", "element", ".", "is_displayed", "(", "raise_exception", "=", "False", ")", ":", "data", "=", "self", ".", "execute_script", "(", "\"return arguments[0].getBoundingClientRect();\"", ",", "element", ".", "_element", ")", "if", "highlight", ":", "element", ".", "highlight", "(", "style", "=", "BROME_CONFIG", "[", "'highlight'", "]", "[", "'style_on_assertion_failure'", "]", "# noqa", ")", "if", "testid", "is", "not", "None", ":", "self", ".", "create_test_result", "(", "testid", ",", "False", ",", "extra_data", "=", "{", "'bounding_client_rect'", ":", "data", ",", "'video_x_offset'", ":", "self", ".", "browser_config", ".", "get", "(", "'video_x_offset'", ",", "0", ")", ",", "# noqa", "'video_y_offset'", ":", "self", ".", "browser_config", ".", "get", "(", "'video_y_offset'", ",", "0", ")", "# noqa", "}", ")", "return", "False", "else", ":", "if", "testid", "is", "not", "None", ":", "self", ".", "create_test_result", "(", "testid", ",", "True", ")", "return", "True" ]
33.90625
0.000896
[ "def assert_not_visible(self, selector, testid=None, **kwargs):\n", " \"\"\"Assert that the element is not visible in the dom\n", "\n", " Args:\n", " selector (str): the selector used to find the element\n", " test_id (str): the test_id or a str\n", "\n", " Kwargs:\n", " wait_until_not_visible (bool)\n", " highlight (bool)\n", "\n", " Returns:\n", " bool: True is the assertion succeed; False otherwise.\n", " \"\"\"\n", " self.info_log(\n", " \"Assert not visible selector(%s) testid(%s)\" % (selector, testid)\n", " )\n", "\n", " highlight = kwargs.get(\n", " 'highlight',\n", " BROME_CONFIG['highlight']['highlight_on_assertion_failure']\n", " )\n", " self.debug_log(\"effective highlight: %s\" % highlight)\n", "\n", " wait_until_not_visible = kwargs.get(\n", " 'wait_until_not_visible',\n", " BROME_CONFIG['proxy_driver']['wait_until_not_visible_before_assert_not_visible'] # noqa\n", " )\n", " self.debug_log(\n", " \"effective wait_until_not_visible: %s\" % wait_until_not_visible\n", " )\n", "\n", " if wait_until_not_visible:\n", " self.wait_until_not_visible(selector, raise_exception=False)\n", "\n", " element = self.find(\n", " selector,\n", " raise_exception=False,\n", " wait_until_visible=False,\n", " wait_until_present=False\n", " )\n", " if element and element.is_displayed(raise_exception=False):\n", " data = self.execute_script(\n", " \"return arguments[0].getBoundingClientRect();\",\n", " element._element\n", " )\n", "\n", " if highlight:\n", " element.highlight(\n", " style=BROME_CONFIG['highlight']['style_on_assertion_failure'] # noqa\n", " )\n", " if testid is not None:\n", " self.create_test_result(testid, False, extra_data={\n", " 'bounding_client_rect': data,\n", " 'video_x_offset': self.browser_config.get('video_x_offset', 0), # noqa\n", " 'video_y_offset': self.browser_config.get('video_y_offset', 0) # noqa\n", " })\n", "\n", " return False\n", " else:\n", " if testid is not None:\n", " self.create_test_result(testid, True)\n", "\n", " return True" ]
[ 0, 0.01639344262295082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216 ]
64
0.000935
def aggregator(name, func, *args, type=None): 'Define simple aggregator `name` that calls func(values)' def _func(col, rows): # wrap builtins so they can have a .type vals = list(col.getValues(rows)) try: return func(vals, *args) except Exception as e: if len(vals) == 0: return None return e aggregators[name] = _defaggr(name, type, _func)
[ "def", "aggregator", "(", "name", ",", "func", ",", "*", "args", ",", "type", "=", "None", ")", ":", "def", "_func", "(", "col", ",", "rows", ")", ":", "# wrap builtins so they can have a .type", "vals", "=", "list", "(", "col", ".", "getValues", "(", "rows", ")", ")", "try", ":", "return", "func", "(", "vals", ",", "*", "args", ")", "except", "Exception", "as", "e", ":", "if", "len", "(", "vals", ")", "==", "0", ":", "return", "None", "return", "e", "aggregators", "[", "name", "]", "=", "_defaggr", "(", "name", ",", "type", ",", "_func", ")" ]
34.916667
0.002326
[ "def aggregator(name, func, *args, type=None):\n", " 'Define simple aggregator `name` that calls func(values)'\n", " def _func(col, rows): # wrap builtins so they can have a .type\n", " vals = list(col.getValues(rows))\n", " try:\n", " return func(vals, *args)\n", " except Exception as e:\n", " if len(vals) == 0:\n", " return None\n", " return e\n", "\n", " aggregators[name] = _defaggr(name, type, _func)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0196078431372549 ]
12
0.001634
def is_valid_nc3_name(s): """Test whether an object can be validly converted to a netCDF-3 dimension, variable or attribute name Earlier versions of the netCDF C-library reference implementation enforced a more restricted set of characters in creating new names, but permitted reading names containing arbitrary bytes. This specification extends the permitted characters in names to include multi-byte UTF-8 encoded Unicode and additional printing characters from the US-ASCII alphabet. The first character of a name must be alphanumeric, a multi-byte UTF-8 character, or '_' (reserved for special names with meaning to implementations, such as the "_FillValue" attribute). Subsequent characters may also include printing special characters, except for '/' which is not allowed in names. Names that have trailing space characters are also not permitted. """ if not isinstance(s, str): return False if not isinstance(s, str): s = s.decode('utf-8') num_bytes = len(s.encode('utf-8')) return ((unicodedata.normalize('NFC', s) == s) and (s not in _reserved_names) and (num_bytes >= 0) and ('/' not in s) and (s[-1] != ' ') and (_isalnumMUTF8(s[0]) or (s[0] == '_')) and all((_isalnumMUTF8(c) or c in _specialchars for c in s)))
[ "def", "is_valid_nc3_name", "(", "s", ")", ":", "if", "not", "isinstance", "(", "s", ",", "str", ")", ":", "return", "False", "if", "not", "isinstance", "(", "s", ",", "str", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "num_bytes", "=", "len", "(", "s", ".", "encode", "(", "'utf-8'", ")", ")", "return", "(", "(", "unicodedata", ".", "normalize", "(", "'NFC'", ",", "s", ")", "==", "s", ")", "and", "(", "s", "not", "in", "_reserved_names", ")", "and", "(", "num_bytes", ">=", "0", ")", "and", "(", "'/'", "not", "in", "s", ")", "and", "(", "s", "[", "-", "1", "]", "!=", "' '", ")", "and", "(", "_isalnumMUTF8", "(", "s", "[", "0", "]", ")", "or", "(", "s", "[", "0", "]", "==", "'_'", ")", ")", "and", "all", "(", "(", "_isalnumMUTF8", "(", "c", ")", "or", "c", "in", "_specialchars", "for", "c", "in", "s", ")", ")", ")" ]
46.896552
0.00072
[ "def is_valid_nc3_name(s):\n", " \"\"\"Test whether an object can be validly converted to a netCDF-3\n", " dimension, variable or attribute name\n", "\n", " Earlier versions of the netCDF C-library reference implementation\n", " enforced a more restricted set of characters in creating new names,\n", " but permitted reading names containing arbitrary bytes. This\n", " specification extends the permitted characters in names to include\n", " multi-byte UTF-8 encoded Unicode and additional printing characters\n", " from the US-ASCII alphabet. The first character of a name must be\n", " alphanumeric, a multi-byte UTF-8 character, or '_' (reserved for\n", " special names with meaning to implementations, such as the\n", " \"_FillValue\" attribute). Subsequent characters may also include\n", " printing special characters, except for '/' which is not allowed in\n", " names. Names that have trailing space characters are also not\n", " permitted.\n", " \"\"\"\n", " if not isinstance(s, str):\n", " return False\n", " if not isinstance(s, str):\n", " s = s.decode('utf-8')\n", " num_bytes = len(s.encode('utf-8'))\n", " return ((unicodedata.normalize('NFC', s) == s) and\n", " (s not in _reserved_names) and\n", " (num_bytes >= 0) and\n", " ('/' not in s) and\n", " (s[-1] != ' ') and\n", " (_isalnumMUTF8(s[0]) or (s[0] == '_')) and\n", " all((_isalnumMUTF8(c) or c in _specialchars for c in s)))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014492753623188406 ]
29
0.0005
def question_default_add_related_pks(self, obj): """Add related primary keys to a Question instance.""" if not hasattr(obj, '_choice_pks'): obj._choice_pks = list(obj.choices.values_list('pk', flat=True))
[ "def", "question_default_add_related_pks", "(", "self", ",", "obj", ")", ":", "if", "not", "hasattr", "(", "obj", ",", "'_choice_pks'", ")", ":", "obj", ".", "_choice_pks", "=", "list", "(", "obj", ".", "choices", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", ")" ]
57.25
0.008621
[ "def question_default_add_related_pks(self, obj):\n", " \"\"\"Add related primary keys to a Question instance.\"\"\"\n", " if not hasattr(obj, '_choice_pks'):\n", " obj._choice_pks = list(obj.choices.values_list('pk', flat=True))" ]
[ 0, 0.015873015873015872, 0, 0.013157894736842105 ]
4
0.007258
def run_async(self): """ Spawns a new thread that runs the message loop until the Pebble disconnects. ``run_async`` will call :meth:`fetch_watch_info` on your behalf, and block until it receives a response. """ thread = threading.Thread(target=self.run_sync) thread.daemon = True thread.name = "PebbleConnection" thread.start() self.fetch_watch_info()
[ "def", "run_async", "(", "self", ")", ":", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "run_sync", ")", "thread", ".", "daemon", "=", "True", "thread", ".", "name", "=", "\"PebbleConnection\"", "thread", ".", "start", "(", ")", "self", ".", "fetch_watch_info", "(", ")" ]
41.4
0.009456
[ "def run_async(self):\n", " \"\"\"\n", " Spawns a new thread that runs the message loop until the Pebble disconnects.\n", " ``run_async`` will call :meth:`fetch_watch_info` on your behalf, and block until it receives a response.\n", " \"\"\"\n", " thread = threading.Thread(target=self.run_sync)\n", " thread.daemon = True\n", " thread.name = \"PebbleConnection\"\n", " thread.start()\n", " self.fetch_watch_info()" ]
[ 0, 0.08333333333333333, 0.011764705882352941, 0.008849557522123894, 0, 0, 0, 0, 0, 0.03225806451612903 ]
10
0.013621
def ordered_symbols(self): """ :return: list of all symbols in this model, topologically sorted so they can be evaluated in the correct order. Within each group of equal priority symbols, we sort by the order of the derivative. """ key_func = lambda s: [isinstance(s, sympy.Derivative), isinstance(s, sympy.Derivative) and s.derivative_count] symbols = [] for symbol in toposort(self.connectivity_mapping): symbols.extend(sorted(symbol, key=key_func)) return symbols
[ "def", "ordered_symbols", "(", "self", ")", ":", "key_func", "=", "lambda", "s", ":", "[", "isinstance", "(", "s", ",", "sympy", ".", "Derivative", ")", ",", "isinstance", "(", "s", ",", "sympy", ".", "Derivative", ")", "and", "s", ".", "derivative_count", "]", "symbols", "=", "[", "]", "for", "symbol", "in", "toposort", "(", "self", ".", "connectivity_mapping", ")", ":", "symbols", ".", "extend", "(", "sorted", "(", "symbol", ",", "key", "=", "key_func", ")", ")", "return", "symbols" ]
39
0.011686
[ "def ordered_symbols(self):\n", " \"\"\"\n", " :return: list of all symbols in this model, topologically sorted so they\n", " can be evaluated in the correct order.\n", "\n", " Within each group of equal priority symbols, we sort by the order of\n", " the derivative.\n", " \"\"\"\n", " key_func = lambda s: [isinstance(s, sympy.Derivative),\n", " isinstance(s, sympy.Derivative) and s.derivative_count]\n", " symbols = []\n", " for symbol in toposort(self.connectivity_mapping):\n", " symbols.extend(sorted(symbol, key=key_func))\n", "\n", " return symbols" ]
[ 0, 0.08333333333333333, 0.012345679012345678, 0, 0, 0.012345679012345678, 0, 0, 0.015873015873015872, 0.024096385542168676, 0, 0, 0, 0, 0.045454545454545456 ]
15
0.012897
def print_usage(self, hint=None): """Usage format should be like: Lineno | Content 1 | Script description (__doc__) 2 | Usage: {script name} [COMMAND] [ARGUMENTS] 3 | \n 4 | Commands: 5 | cmd1 cmd1 description. 6 | cmd2isverylong cmd2 description, and it is also 7 | long as shit. 7 | cmd3 cmd3 description. """ buf = [] # Description if __doc__: buf.append(__doc__) # Usage script_name = sys.argv[0] buf.append('Usage: %s [COMMAND] [ARGUMENTS]' % script_name) buf.append('') buf.append('Commands:') # Commands indent_size = 2 tab_size = 4 doc_width = 50 grid_len = max(len(i) for i in list(self._commands.keys())) + tab_size for name in self._commands_list: command = self._commands[name] line = ' ' * indent_size + name + ' ' * (grid_len - len(name)) doc = command.doc pieces = [doc[i:i + doc_width] for i in range(0, len(doc), doc_width)] line += pieces[0] if len(pieces) > 1: line += '\n' line += '\n'.join(' ' * (grid_len + 2) + i for i in pieces[1:]) buf.append(line) print('\n'.join(buf))
[ "def", "print_usage", "(", "self", ",", "hint", "=", "None", ")", ":", "buf", "=", "[", "]", "# Description", "if", "__doc__", ":", "buf", ".", "append", "(", "__doc__", ")", "# Usage", "script_name", "=", "sys", ".", "argv", "[", "0", "]", "buf", ".", "append", "(", "'Usage: %s [COMMAND] [ARGUMENTS]'", "%", "script_name", ")", "buf", ".", "append", "(", "''", ")", "buf", ".", "append", "(", "'Commands:'", ")", "# Commands", "indent_size", "=", "2", "tab_size", "=", "4", "doc_width", "=", "50", "grid_len", "=", "max", "(", "len", "(", "i", ")", "for", "i", "in", "list", "(", "self", ".", "_commands", ".", "keys", "(", ")", ")", ")", "+", "tab_size", "for", "name", "in", "self", ".", "_commands_list", ":", "command", "=", "self", ".", "_commands", "[", "name", "]", "line", "=", "' '", "*", "indent_size", "+", "name", "+", "' '", "*", "(", "grid_len", "-", "len", "(", "name", ")", ")", "doc", "=", "command", ".", "doc", "pieces", "=", "[", "doc", "[", "i", ":", "i", "+", "doc_width", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "doc", ")", ",", "doc_width", ")", "]", "line", "+=", "pieces", "[", "0", "]", "if", "len", "(", "pieces", ")", ">", "1", ":", "line", "+=", "'\\n'", "line", "+=", "'\\n'", ".", "join", "(", "' '", "*", "(", "grid_len", "+", "2", ")", "+", "i", "for", "i", "in", "pieces", "[", "1", ":", "]", ")", "buf", ".", "append", "(", "line", ")", "print", "(", "'\\n'", ".", "join", "(", "buf", ")", ")" ]
31.681818
0.002088
[ "def print_usage(self, hint=None):\n", " \"\"\"Usage format should be like:\n", " Lineno | Content\n", " 1 | Script description (__doc__)\n", " 2 | Usage: {script name} [COMMAND] [ARGUMENTS]\n", " 3 | \\n\n", " 4 | Commands:\n", " 5 | cmd1 cmd1 description.\n", " 6 | cmd2isverylong cmd2 description, and it is also\n", " 7 | long as shit.\n", " 7 | cmd3 cmd3 description.\n", " \"\"\"\n", " buf = []\n", "\n", " # Description\n", " if __doc__:\n", " buf.append(__doc__)\n", "\n", " # Usage\n", " script_name = sys.argv[0]\n", " buf.append('Usage: %s [COMMAND] [ARGUMENTS]' % script_name)\n", "\n", " buf.append('')\n", " buf.append('Commands:')\n", "\n", " # Commands\n", " indent_size = 2\n", " tab_size = 4\n", " doc_width = 50\n", " grid_len = max(len(i) for i in list(self._commands.keys())) + tab_size\n", "\n", " for name in self._commands_list:\n", " command = self._commands[name]\n", " line = ' ' * indent_size + name + ' ' * (grid_len - len(name))\n", " doc = command.doc\n", " pieces = [doc[i:i + doc_width] for i in range(0, len(doc), doc_width)]\n", " line += pieces[0]\n", " if len(pieces) > 1:\n", " line += '\\n'\n", " line += '\\n'.join(' ' * (grid_len + 2) + i for i in pieces[1:])\n", "\n", " buf.append(line)\n", "\n", " print('\\n'.join(buf))" ]
[ 0, 0.025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655 ]
44
0.001626
def convert_to_sympy_matrix(expr, full_space=None): """Convert a QNET expression to an explicit ``n x n`` instance of `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries of the matrix may contain symbols. Parameters: expr: a QNET expression full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The Hilbert space in which `expr` is defined. If not given, ``expr.space`` is used. The Hilbert space must have a well-defined basis. Raises: qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space` does not have a defined basis ValueError: if `expr` is not in `full_space`, or if `expr` cannot be converted. """ if full_space is None: full_space = expr.space if not expr.space.is_tensor_factor_of(full_space): raise ValueError("expr must be in full_space") if expr is IdentityOperator: return sympy.eye(full_space.dimension) elif expr is ZeroOperator: return 0 elif isinstance(expr, LocalOperator): n = full_space.dimension if full_space != expr.space: all_spaces = full_space.local_factors own_space_index = all_spaces.index(expr.space) factors = [sympy.eye(s.dimension) for s in all_spaces[:own_space_index]] factors.append(convert_to_sympy_matrix(expr, expr.space)) factors.extend([sympy.eye(s.dimension) for s in all_spaces[own_space_index + 1:]]) return tensor(*factors) if isinstance(expr, (Create, Jz, Jplus)): return SympyCreate(n) elif isinstance(expr, (Destroy, Jminus)): return SympyCreate(n).H elif isinstance(expr, Phase): phi = expr.phase result = sympy.zeros(n) for i in range(n): result[i, i] = sympy.exp(sympy.I * i * phi) return result elif isinstance(expr, Displace): alpha = expr.operands[1] a = SympyCreate(n) return (alpha * a - alpha.conjugate() * a.H).exp() elif isinstance(expr, Squeeze): eta = expr.operands[1] a = SympyCreate(n) return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp() elif isinstance(expr, LocalSigma): ket = basis_state(expr.index_j, n) bra = basis_state(expr.index_k, n).H return ket * bra else: raise ValueError("Cannot convert '%s' of type %s" % (str(expr), type(expr))) elif (isinstance(expr, Operator) and isinstance(expr, Operation)): if isinstance(expr, OperatorPlus): s = convert_to_sympy_matrix(expr.operands[0], full_space) for op in expr.operands[1:]: s += convert_to_sympy_matrix(op, full_space) return s elif isinstance(expr, OperatorTimes): # if any factor acts non-locally, we need to expand distributively. if any(len(op.space) > 1 for op in expr.operands): se = expr.expand() if se == expr: raise ValueError("Cannot represent as sympy matrix: %s" % expr) return convert_to_sympy_matrix(se, full_space) all_spaces = full_space.local_factors by_space = [] ck = 0 for ls in all_spaces: # group factors by associated local space ls_ops = [convert_to_sympy_matrix(o, o.space) for o in expr.operands if o.space == ls] if len(ls_ops): # compute factor associated with local space by_space.append(ls_ops[0]) for ls_op in ls_ops[1:]: by_space[-1] *= ls_op ck += len(ls_ops) else: # if trivial action, take identity matrix by_space.append(sympy.eye(ls.dimension)) assert ck == len(expr.operands) # combine local factors in tensor product if len(by_space) == 1: return by_space[0] else: return tensor(*by_space) elif isinstance(expr, Adjoint): return convert_to_sympy_matrix(expr.operand, full_space).H elif isinstance(expr, PseudoInverse): raise NotImplementedError( 'Cannot convert PseudoInverse to sympy matrix') elif isinstance(expr, NullSpaceProjector): raise NotImplementedError( 'Cannot convert NullSpaceProjector to sympy') elif isinstance(expr, ScalarTimesOperator): return expr.coeff * convert_to_sympy_matrix(expr.term, full_space) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr))) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr)))
[ "def", "convert_to_sympy_matrix", "(", "expr", ",", "full_space", "=", "None", ")", ":", "if", "full_space", "is", "None", ":", "full_space", "=", "expr", ".", "space", "if", "not", "expr", ".", "space", ".", "is_tensor_factor_of", "(", "full_space", ")", ":", "raise", "ValueError", "(", "\"expr must be in full_space\"", ")", "if", "expr", "is", "IdentityOperator", ":", "return", "sympy", ".", "eye", "(", "full_space", ".", "dimension", ")", "elif", "expr", "is", "ZeroOperator", ":", "return", "0", "elif", "isinstance", "(", "expr", ",", "LocalOperator", ")", ":", "n", "=", "full_space", ".", "dimension", "if", "full_space", "!=", "expr", ".", "space", ":", "all_spaces", "=", "full_space", ".", "local_factors", "own_space_index", "=", "all_spaces", ".", "index", "(", "expr", ".", "space", ")", "factors", "=", "[", "sympy", ".", "eye", "(", "s", ".", "dimension", ")", "for", "s", "in", "all_spaces", "[", ":", "own_space_index", "]", "]", "factors", ".", "append", "(", "convert_to_sympy_matrix", "(", "expr", ",", "expr", ".", "space", ")", ")", "factors", ".", "extend", "(", "[", "sympy", ".", "eye", "(", "s", ".", "dimension", ")", "for", "s", "in", "all_spaces", "[", "own_space_index", "+", "1", ":", "]", "]", ")", "return", "tensor", "(", "*", "factors", ")", "if", "isinstance", "(", "expr", ",", "(", "Create", ",", "Jz", ",", "Jplus", ")", ")", ":", "return", "SympyCreate", "(", "n", ")", "elif", "isinstance", "(", "expr", ",", "(", "Destroy", ",", "Jminus", ")", ")", ":", "return", "SympyCreate", "(", "n", ")", ".", "H", "elif", "isinstance", "(", "expr", ",", "Phase", ")", ":", "phi", "=", "expr", ".", "phase", "result", "=", "sympy", ".", "zeros", "(", "n", ")", "for", "i", "in", "range", "(", "n", ")", ":", "result", "[", "i", ",", "i", "]", "=", "sympy", ".", "exp", "(", "sympy", ".", "I", "*", "i", "*", "phi", ")", "return", "result", "elif", "isinstance", "(", "expr", ",", "Displace", ")", ":", "alpha", "=", "expr", ".", "operands", "[", "1", "]", "a", "=", "SympyCreate", "(", "n", ")", "return", "(", "alpha", "*", "a", "-", "alpha", ".", "conjugate", "(", ")", "*", "a", ".", "H", ")", ".", "exp", "(", ")", "elif", "isinstance", "(", "expr", ",", "Squeeze", ")", ":", "eta", "=", "expr", ".", "operands", "[", "1", "]", "a", "=", "SympyCreate", "(", "n", ")", "return", "(", "(", "eta", "/", "2", ")", "*", "a", "**", "2", "-", "(", "eta", ".", "conjugate", "(", ")", "/", "2", ")", "*", "(", "a", ".", "H", ")", "**", "2", ")", ".", "exp", "(", ")", "elif", "isinstance", "(", "expr", ",", "LocalSigma", ")", ":", "ket", "=", "basis_state", "(", "expr", ".", "index_j", ",", "n", ")", "bra", "=", "basis_state", "(", "expr", ".", "index_k", ",", "n", ")", ".", "H", "return", "ket", "*", "bra", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")", "elif", "(", "isinstance", "(", "expr", ",", "Operator", ")", "and", "isinstance", "(", "expr", ",", "Operation", ")", ")", ":", "if", "isinstance", "(", "expr", ",", "OperatorPlus", ")", ":", "s", "=", "convert_to_sympy_matrix", "(", "expr", ".", "operands", "[", "0", "]", ",", "full_space", ")", "for", "op", "in", "expr", ".", "operands", "[", "1", ":", "]", ":", "s", "+=", "convert_to_sympy_matrix", "(", "op", ",", "full_space", ")", "return", "s", "elif", "isinstance", "(", "expr", ",", "OperatorTimes", ")", ":", "# if any factor acts non-locally, we need to expand distributively.", "if", "any", "(", "len", "(", "op", ".", "space", ")", ">", "1", "for", "op", "in", "expr", ".", "operands", ")", ":", "se", "=", "expr", ".", "expand", "(", ")", "if", "se", "==", "expr", ":", "raise", "ValueError", "(", "\"Cannot represent as sympy matrix: %s\"", "%", "expr", ")", "return", "convert_to_sympy_matrix", "(", "se", ",", "full_space", ")", "all_spaces", "=", "full_space", ".", "local_factors", "by_space", "=", "[", "]", "ck", "=", "0", "for", "ls", "in", "all_spaces", ":", "# group factors by associated local space", "ls_ops", "=", "[", "convert_to_sympy_matrix", "(", "o", ",", "o", ".", "space", ")", "for", "o", "in", "expr", ".", "operands", "if", "o", ".", "space", "==", "ls", "]", "if", "len", "(", "ls_ops", ")", ":", "# compute factor associated with local space", "by_space", ".", "append", "(", "ls_ops", "[", "0", "]", ")", "for", "ls_op", "in", "ls_ops", "[", "1", ":", "]", ":", "by_space", "[", "-", "1", "]", "*=", "ls_op", "ck", "+=", "len", "(", "ls_ops", ")", "else", ":", "# if trivial action, take identity matrix", "by_space", ".", "append", "(", "sympy", ".", "eye", "(", "ls", ".", "dimension", ")", ")", "assert", "ck", "==", "len", "(", "expr", ".", "operands", ")", "# combine local factors in tensor product", "if", "len", "(", "by_space", ")", "==", "1", ":", "return", "by_space", "[", "0", "]", "else", ":", "return", "tensor", "(", "*", "by_space", ")", "elif", "isinstance", "(", "expr", ",", "Adjoint", ")", ":", "return", "convert_to_sympy_matrix", "(", "expr", ".", "operand", ",", "full_space", ")", ".", "H", "elif", "isinstance", "(", "expr", ",", "PseudoInverse", ")", ":", "raise", "NotImplementedError", "(", "'Cannot convert PseudoInverse to sympy matrix'", ")", "elif", "isinstance", "(", "expr", ",", "NullSpaceProjector", ")", ":", "raise", "NotImplementedError", "(", "'Cannot convert NullSpaceProjector to sympy'", ")", "elif", "isinstance", "(", "expr", ",", "ScalarTimesOperator", ")", ":", "return", "expr", ".", "coeff", "*", "convert_to_sympy_matrix", "(", "expr", ".", "term", ",", "full_space", ")", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")" ]
44.122807
0.000194
[ "def convert_to_sympy_matrix(expr, full_space=None):\n", " \"\"\"Convert a QNET expression to an explicit ``n x n`` instance of\n", " `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries\n", " of the matrix may contain symbols.\n", "\n", " Parameters:\n", " expr: a QNET expression\n", " full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The\n", " Hilbert space in which `expr` is defined. If not given,\n", " ``expr.space`` is used. The Hilbert space must have a well-defined\n", " basis.\n", "\n", " Raises:\n", " qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space`\n", " does not have a defined basis\n", " ValueError: if `expr` is not in `full_space`, or if `expr` cannot be\n", " converted.\n", " \"\"\"\n", " if full_space is None:\n", " full_space = expr.space\n", " if not expr.space.is_tensor_factor_of(full_space):\n", " raise ValueError(\"expr must be in full_space\")\n", " if expr is IdentityOperator:\n", " return sympy.eye(full_space.dimension)\n", " elif expr is ZeroOperator:\n", " return 0\n", " elif isinstance(expr, LocalOperator):\n", " n = full_space.dimension\n", " if full_space != expr.space:\n", " all_spaces = full_space.local_factors\n", " own_space_index = all_spaces.index(expr.space)\n", " factors = [sympy.eye(s.dimension)\n", " for s in all_spaces[:own_space_index]]\n", " factors.append(convert_to_sympy_matrix(expr, expr.space))\n", " factors.extend([sympy.eye(s.dimension)\n", " for s in all_spaces[own_space_index + 1:]])\n", " return tensor(*factors)\n", " if isinstance(expr, (Create, Jz, Jplus)):\n", " return SympyCreate(n)\n", " elif isinstance(expr, (Destroy, Jminus)):\n", " return SympyCreate(n).H\n", " elif isinstance(expr, Phase):\n", " phi = expr.phase\n", " result = sympy.zeros(n)\n", " for i in range(n):\n", " result[i, i] = sympy.exp(sympy.I * i * phi)\n", " return result\n", " elif isinstance(expr, Displace):\n", " alpha = expr.operands[1]\n", " a = SympyCreate(n)\n", " return (alpha * a - alpha.conjugate() * a.H).exp()\n", " elif isinstance(expr, Squeeze):\n", " eta = expr.operands[1]\n", " a = SympyCreate(n)\n", " return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp()\n", " elif isinstance(expr, LocalSigma):\n", " ket = basis_state(expr.index_j, n)\n", " bra = basis_state(expr.index_k, n).H\n", " return ket * bra\n", " else:\n", " raise ValueError(\"Cannot convert '%s' of type %s\"\n", " % (str(expr), type(expr)))\n", " elif (isinstance(expr, Operator) and isinstance(expr, Operation)):\n", " if isinstance(expr, OperatorPlus):\n", " s = convert_to_sympy_matrix(expr.operands[0], full_space)\n", " for op in expr.operands[1:]:\n", " s += convert_to_sympy_matrix(op, full_space)\n", " return s\n", " elif isinstance(expr, OperatorTimes):\n", " # if any factor acts non-locally, we need to expand distributively.\n", " if any(len(op.space) > 1 for op in expr.operands):\n", " se = expr.expand()\n", " if se == expr:\n", " raise ValueError(\"Cannot represent as sympy matrix: %s\"\n", " % expr)\n", " return convert_to_sympy_matrix(se, full_space)\n", " all_spaces = full_space.local_factors\n", " by_space = []\n", " ck = 0\n", " for ls in all_spaces:\n", " # group factors by associated local space\n", " ls_ops = [convert_to_sympy_matrix(o, o.space)\n", " for o in expr.operands if o.space == ls]\n", " if len(ls_ops):\n", " # compute factor associated with local space\n", " by_space.append(ls_ops[0])\n", " for ls_op in ls_ops[1:]:\n", " by_space[-1] *= ls_op\n", " ck += len(ls_ops)\n", " else:\n", " # if trivial action, take identity matrix\n", " by_space.append(sympy.eye(ls.dimension))\n", " assert ck == len(expr.operands)\n", " # combine local factors in tensor product\n", " if len(by_space) == 1:\n", " return by_space[0]\n", " else:\n", " return tensor(*by_space)\n", " elif isinstance(expr, Adjoint):\n", " return convert_to_sympy_matrix(expr.operand, full_space).H\n", " elif isinstance(expr, PseudoInverse):\n", " raise NotImplementedError(\n", " 'Cannot convert PseudoInverse to sympy matrix')\n", " elif isinstance(expr, NullSpaceProjector):\n", " raise NotImplementedError(\n", " 'Cannot convert NullSpaceProjector to sympy')\n", " elif isinstance(expr, ScalarTimesOperator):\n", " return expr.coeff * convert_to_sympy_matrix(expr.term, full_space)\n", " else:\n", " raise ValueError(\n", " \"Cannot convert '%s' of type %s\" % (str(expr), type(expr)))\n", " else:\n", " raise ValueError(\n", " \"Cannot convert '%s' of type %s\" % (str(expr), type(expr)))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014084507042253521 ]
114
0.000124
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: WorkspaceCumulativeStatisticsContext for this WorkspaceCumulativeStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.workspace_cumulative_statistics.WorkspaceCumulativeStatisticsContext """ if self._context is None: self._context = WorkspaceCumulativeStatisticsContext( self._version, workspace_sid=self._solution['workspace_sid'], ) return self._context
[ "def", "_proxy", "(", "self", ")", ":", "if", "self", ".", "_context", "is", "None", ":", "self", ".", "_context", "=", "WorkspaceCumulativeStatisticsContext", "(", "self", ".", "_version", ",", "workspace_sid", "=", "self", ".", "_solution", "[", "'workspace_sid'", "]", ",", ")", "return", "self", ".", "_context" ]
46.785714
0.008982
[ "def _proxy(self):\n", " \"\"\"\n", " Generate an instance context for the instance, the context is capable of\n", " performing various actions. All instance actions are proxied to the context\n", "\n", " :returns: WorkspaceCumulativeStatisticsContext for this WorkspaceCumulativeStatisticsInstance\n", " :rtype: twilio.rest.taskrouter.v1.workspace.workspace_cumulative_statistics.WorkspaceCumulativeStatisticsContext\n", " \"\"\"\n", " if self._context is None:\n", " self._context = WorkspaceCumulativeStatisticsContext(\n", " self._version,\n", " workspace_sid=self._solution['workspace_sid'],\n", " )\n", " return self._context" ]
[ 0, 0.08333333333333333, 0.012345679012345678, 0.011764705882352941, 0, 0.00980392156862745, 0.008264462809917356, 0, 0, 0, 0, 0, 0, 0.03571428571428571 ]
14
0.011516
def build_uri(endpoint, api_version, uri_parts, uri_args={}): """ Build the URL using the endpoint, the api version, the uri parts and the args. :param dict uri_args: parameters to include in the URL. :param tuple uri_parts: url encoded and `uri_parts` too. :return: A string that represents the absolute URL of the request :rtype : str The resulting uri is as follows: {endpoint}/{api_version}/{uri_part1}/.../{uri_partn}?{uri_args} The `uri_args` and the `uri_parts` are url encoded. """ # to unicode uri_parts = [unicode(x) for x in uri_parts] # and encoded uri_parts = [urllib.quote(x) for x in uri_parts] # Add enpoint and version all_uri_parts = [endpoint, api_version, ] + uri_parts # join parts url_to_call = "/".join(all_uri_parts) # add params if any if uri_args: url_to_call = "{}?{}".format(url_to_call, urllib.urlencode(uri_args)) # return return url_to_call
[ "def", "build_uri", "(", "endpoint", ",", "api_version", ",", "uri_parts", ",", "uri_args", "=", "{", "}", ")", ":", "# to unicode", "uri_parts", "=", "[", "unicode", "(", "x", ")", "for", "x", "in", "uri_parts", "]", "# and encoded ", "uri_parts", "=", "[", "urllib", ".", "quote", "(", "x", ")", "for", "x", "in", "uri_parts", "]", "# Add enpoint and version ", "all_uri_parts", "=", "[", "endpoint", ",", "api_version", ",", "]", "+", "uri_parts", "# join parts", "url_to_call", "=", "\"/\"", ".", "join", "(", "all_uri_parts", ")", "# add params if any", "if", "uri_args", ":", "url_to_call", "=", "\"{}?{}\"", ".", "format", "(", "url_to_call", ",", "urllib", ".", "urlencode", "(", "uri_args", ")", ")", "# return", "return", "url_to_call" ]
36.740741
0.008841
[ "def build_uri(endpoint, api_version, uri_parts, uri_args={}):\n", " \"\"\" Build the URL using the endpoint, the api version, the uri parts and the args.\n", " \n", " :param dict uri_args: parameters to include in the URL. \n", " :param tuple uri_parts: url encoded and `uri_parts` too.\n", " :return: A string that represents the absolute URL of the request\n", " :rtype : str\n", " \n", " The resulting uri is as follows:\n", " \n", " {endpoint}/{api_version}/{uri_part1}/.../{uri_partn}?{uri_args}\n", " \n", " The `uri_args` and the `uri_parts` are url encoded.\n", " \"\"\"\n", " # to unicode\n", " uri_parts = [unicode(x) for x in uri_parts]\n", " # and encoded \n", " uri_parts = [urllib.quote(x) for x in uri_parts]\n", " # Add enpoint and version \n", " all_uri_parts = [endpoint, api_version, ] + uri_parts\n", " # join parts\n", " url_to_call = \"/\".join(all_uri_parts)\n", " # add params if any\n", " if uri_args:\n", " url_to_call = \"{}?{}\".format(url_to_call, urllib.urlencode(uri_args))\n", " # return\n", " return url_to_call" ]
[ 0, 0.011494252873563218, 0.2, 0.015384615384615385, 0, 0, 0, 0.2, 0, 0.2, 0, 0.1111111111111111, 0, 0, 0, 0, 0.05263157894736842, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456 ]
27
0.032161
def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a adaptive histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) # good_mask = ~(dnb_data.mask | sza_data.mask) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False has_multi_times = len(mixed_mask) > 0 if day_mask.any(): did_equalize = True if self.adaptive_day == "always" or ( has_multi_times and self.adaptive_day == "multiple"): LOG.debug("Adaptive histogram equalizing DNB day data...") local_histogram_equalization( dnb_data, day_mask, valid_data_mask=good_mask.values, local_radius_px=self.day_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if mixed_mask: for mask in mixed_mask: if mask.any(): did_equalize = True if self.adaptive_mixed == "always" or ( has_multi_times and self.adaptive_mixed == "multiple"): LOG.debug( "Adaptive histogram equalizing DNB mixed data...") local_histogram_equalization( dnb_data, mask, valid_data_mask=good_mask.values, local_radius_px=self.mixed_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if night_mask.any(): did_equalize = True if self.adaptive_night == "always" or ( has_multi_times and self.adaptive_night == "multiple"): LOG.debug("Adaptive histogram equalizing DNB night data...") local_histogram_equalization( dnb_data, night_mask, valid_data_mask=good_mask.values, local_radius_px=self.night_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset
[ "def", "_run_dnb_normalization", "(", "self", ",", "dnb_data", ",", "sza_data", ")", ":", "# convert dask arrays to DataArray objects", "dnb_data", "=", "xr", ".", "DataArray", "(", "dnb_data", ",", "dims", "=", "(", "'y'", ",", "'x'", ")", ")", "sza_data", "=", "xr", ".", "DataArray", "(", "sza_data", ",", "dims", "=", "(", "'y'", ",", "'x'", ")", ")", "good_mask", "=", "~", "(", "dnb_data", ".", "isnull", "(", ")", "|", "sza_data", ".", "isnull", "(", ")", ")", "# good_mask = ~(dnb_data.mask | sza_data.mask)", "output_dataset", "=", "dnb_data", ".", "where", "(", "good_mask", ")", "# we only need the numpy array", "output_dataset", "=", "output_dataset", ".", "values", ".", "copy", "(", ")", "dnb_data", "=", "dnb_data", ".", "values", "sza_data", "=", "sza_data", ".", "values", "day_mask", ",", "mixed_mask", ",", "night_mask", "=", "make_day_night_masks", "(", "sza_data", ",", "good_mask", ".", "values", ",", "self", ".", "high_angle_cutoff", ",", "self", ".", "low_angle_cutoff", ",", "stepsDegrees", "=", "self", ".", "mixed_degree_step", ")", "did_equalize", "=", "False", "has_multi_times", "=", "len", "(", "mixed_mask", ")", ">", "0", "if", "day_mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_day", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_day", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB day data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "day_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB day data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "out", "=", "output_dataset", ")", "if", "mixed_mask", ":", "for", "mask", "in", "mixed_mask", ":", "if", "mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_mixed", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_mixed", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB mixed data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "mixed_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB mixed data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "out", "=", "output_dataset", ")", "if", "night_mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_night", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_night", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB night data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "night_mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "night_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB night data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "night_mask", ",", "out", "=", "output_dataset", ")", "if", "not", "did_equalize", ":", "raise", "RuntimeError", "(", "\"No valid data found to histogram equalize\"", ")", "return", "output_dataset" ]
42.837209
0.000531
[ "def _run_dnb_normalization(self, dnb_data, sza_data):\n", " \"\"\"Scale the DNB data using a adaptive histogram equalization method.\n", "\n", " Args:\n", " dnb_data (ndarray): Day/Night Band data array\n", " sza_data (ndarray): Solar Zenith Angle data array\n", "\n", " \"\"\"\n", " # convert dask arrays to DataArray objects\n", " dnb_data = xr.DataArray(dnb_data, dims=('y', 'x'))\n", " sza_data = xr.DataArray(sza_data, dims=('y', 'x'))\n", "\n", " good_mask = ~(dnb_data.isnull() | sza_data.isnull())\n", " # good_mask = ~(dnb_data.mask | sza_data.mask)\n", " output_dataset = dnb_data.where(good_mask)\n", " # we only need the numpy array\n", " output_dataset = output_dataset.values.copy()\n", " dnb_data = dnb_data.values\n", " sza_data = sza_data.values\n", "\n", " day_mask, mixed_mask, night_mask = make_day_night_masks(\n", " sza_data,\n", " good_mask.values,\n", " self.high_angle_cutoff,\n", " self.low_angle_cutoff,\n", " stepsDegrees=self.mixed_degree_step)\n", "\n", " did_equalize = False\n", " has_multi_times = len(mixed_mask) > 0\n", " if day_mask.any():\n", " did_equalize = True\n", " if self.adaptive_day == \"always\" or (\n", " has_multi_times and self.adaptive_day == \"multiple\"):\n", " LOG.debug(\"Adaptive histogram equalizing DNB day data...\")\n", " local_histogram_equalization(\n", " dnb_data,\n", " day_mask,\n", " valid_data_mask=good_mask.values,\n", " local_radius_px=self.day_radius_pixels,\n", " out=output_dataset)\n", " else:\n", " LOG.debug(\"Histogram equalizing DNB day data...\")\n", " histogram_equalization(dnb_data,\n", " day_mask,\n", " out=output_dataset)\n", " if mixed_mask:\n", " for mask in mixed_mask:\n", " if mask.any():\n", " did_equalize = True\n", " if self.adaptive_mixed == \"always\" or (\n", " has_multi_times and\n", " self.adaptive_mixed == \"multiple\"):\n", " LOG.debug(\n", " \"Adaptive histogram equalizing DNB mixed data...\")\n", " local_histogram_equalization(\n", " dnb_data,\n", " mask,\n", " valid_data_mask=good_mask.values,\n", " local_radius_px=self.mixed_radius_pixels,\n", " out=output_dataset)\n", " else:\n", " LOG.debug(\"Histogram equalizing DNB mixed data...\")\n", " histogram_equalization(dnb_data,\n", " day_mask,\n", " out=output_dataset)\n", " if night_mask.any():\n", " did_equalize = True\n", " if self.adaptive_night == \"always\" or (\n", " has_multi_times and self.adaptive_night == \"multiple\"):\n", " LOG.debug(\"Adaptive histogram equalizing DNB night data...\")\n", " local_histogram_equalization(\n", " dnb_data,\n", " night_mask,\n", " valid_data_mask=good_mask.values,\n", " local_radius_px=self.night_radius_pixels,\n", " out=output_dataset)\n", " else:\n", " LOG.debug(\"Histogram equalizing DNB night data...\")\n", " histogram_equalization(dnb_data,\n", " night_mask,\n", " out=output_dataset)\n", "\n", " if not did_equalize:\n", " raise RuntimeError(\"No valid data found to histogram equalize\")\n", "\n", " return output_dataset" ]
[ 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655 ]
86
0.00055
def values(self): """ Return all values as numpy-array (mean, var, min, max, num). """ return np.array([self.mean, self.var, self.min, self.max, self.num])
[ "def", "values", "(", "self", ")", ":", "return", "np", ".", "array", "(", "[", "self", ".", "mean", ",", "self", ".", "var", ",", "self", ".", "min", ",", "self", ".", "max", ",", "self", ".", "num", "]", ")" ]
36.6
0.010695
[ "def values(self):\n", " \"\"\"\n", " Return all values as numpy-array (mean, var, min, max, num).\n", " \"\"\"\n", " return np.array([self.mean, self.var, self.min, self.max, self.num])" ]
[ 0, 0.08333333333333333, 0, 0, 0.013157894736842105 ]
5
0.019298
def _check_no_current_table(new_obj, current_table): """ Raises exception if we try to add a relation or a column with no current table. """ if current_table is None: msg = 'Cannot add {} before adding table' if isinstance(new_obj, Relation): raise NoCurrentTableException(msg.format('relation')) if isinstance(new_obj, Column): raise NoCurrentTableException(msg.format('column'))
[ "def", "_check_no_current_table", "(", "new_obj", ",", "current_table", ")", ":", "if", "current_table", "is", "None", ":", "msg", "=", "'Cannot add {} before adding table'", "if", "isinstance", "(", "new_obj", ",", "Relation", ")", ":", "raise", "NoCurrentTableException", "(", "msg", ".", "format", "(", "'relation'", ")", ")", "if", "isinstance", "(", "new_obj", ",", "Column", ")", ":", "raise", "NoCurrentTableException", "(", "msg", ".", "format", "(", "'column'", ")", ")" ]
48
0.002273
[ "def _check_no_current_table(new_obj, current_table):\n", " \"\"\" Raises exception if we try to add a relation or a column\n", " with no current table. \"\"\"\n", " if current_table is None:\n", " msg = 'Cannot add {} before adding table'\n", " if isinstance(new_obj, Relation):\n", " raise NoCurrentTableException(msg.format('relation'))\n", " if isinstance(new_obj, Column):\n", " raise NoCurrentTableException(msg.format('column'))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0.015873015873015872 ]
9
0.001764
def start(self): """ Begins the job by kicking off all tasks with no dependencies. """ logger.info('Job {0} starting job run'.format(self.name)) if not self.state.allow_start: raise DagobahError('job cannot be started in its current state; ' + 'it is probably already running') self.initialize_snapshot() # don't increment if the job was run manually if self.cron_iter and datetime.utcnow() > self.next_run: self.next_run = self.cron_iter.get_next(datetime) self.run_log = {'job_id': self.job_id, 'name': self.name, 'parent_id': self.parent.dagobah_id, 'log_id': self.backend.get_new_log_id(), 'start_time': datetime.utcnow(), 'tasks': {}} self._set_status('running') logger.debug('Job {0} resetting all tasks prior to start'.format(self.name)) for task in self.tasks.itervalues(): task.reset() logger.debug('Job {0} seeding run logs'.format(self.name)) for task_name in self.ind_nodes(self.snapshot): self._put_task_in_run_log(task_name) self.tasks[task_name].start() self._commit_run_log()
[ "def", "start", "(", "self", ")", ":", "logger", ".", "info", "(", "'Job {0} starting job run'", ".", "format", "(", "self", ".", "name", ")", ")", "if", "not", "self", ".", "state", ".", "allow_start", ":", "raise", "DagobahError", "(", "'job cannot be started in its current state; '", "+", "'it is probably already running'", ")", "self", ".", "initialize_snapshot", "(", ")", "# don't increment if the job was run manually", "if", "self", ".", "cron_iter", "and", "datetime", ".", "utcnow", "(", ")", ">", "self", ".", "next_run", ":", "self", ".", "next_run", "=", "self", ".", "cron_iter", ".", "get_next", "(", "datetime", ")", "self", ".", "run_log", "=", "{", "'job_id'", ":", "self", ".", "job_id", ",", "'name'", ":", "self", ".", "name", ",", "'parent_id'", ":", "self", ".", "parent", ".", "dagobah_id", ",", "'log_id'", ":", "self", ".", "backend", ".", "get_new_log_id", "(", ")", ",", "'start_time'", ":", "datetime", ".", "utcnow", "(", ")", ",", "'tasks'", ":", "{", "}", "}", "self", ".", "_set_status", "(", "'running'", ")", "logger", ".", "debug", "(", "'Job {0} resetting all tasks prior to start'", ".", "format", "(", "self", ".", "name", ")", ")", "for", "task", "in", "self", ".", "tasks", ".", "itervalues", "(", ")", ":", "task", ".", "reset", "(", ")", "logger", ".", "debug", "(", "'Job {0} seeding run logs'", ".", "format", "(", "self", ".", "name", ")", ")", "for", "task_name", "in", "self", ".", "ind_nodes", "(", "self", ".", "snapshot", ")", ":", "self", ".", "_put_task_in_run_log", "(", "task_name", ")", "self", ".", "tasks", "[", "task_name", "]", ".", "start", "(", ")", "self", ".", "_commit_run_log", "(", ")" ]
40.0625
0.002285
[ "def start(self):\n", " \"\"\" Begins the job by kicking off all tasks with no dependencies. \"\"\"\n", "\n", " logger.info('Job {0} starting job run'.format(self.name))\n", " if not self.state.allow_start:\n", " raise DagobahError('job cannot be started in its current state; ' +\n", " 'it is probably already running')\n", "\n", " self.initialize_snapshot()\n", "\n", " # don't increment if the job was run manually\n", " if self.cron_iter and datetime.utcnow() > self.next_run:\n", " self.next_run = self.cron_iter.get_next(datetime)\n", "\n", " self.run_log = {'job_id': self.job_id,\n", " 'name': self.name,\n", " 'parent_id': self.parent.dagobah_id,\n", " 'log_id': self.backend.get_new_log_id(),\n", " 'start_time': datetime.utcnow(),\n", " 'tasks': {}}\n", " self._set_status('running')\n", "\n", " logger.debug('Job {0} resetting all tasks prior to start'.format(self.name))\n", " for task in self.tasks.itervalues():\n", " task.reset()\n", "\n", " logger.debug('Job {0} seeding run logs'.format(self.name))\n", " for task_name in self.ind_nodes(self.snapshot):\n", " self._put_task_in_run_log(task_name)\n", " self.tasks[task_name].start()\n", "\n", " self._commit_run_log()" ]
[ 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333 ]
32
0.00181
def validate_args(api_key, *, rate="informers", **kwargs): "Проверяет и формирует аргументы для запроса" rate = Rate.validate(rate) headers = {"X-Yandex-API-Key": api_key} url = "https://api.weather.yandex.ru/v1/{}".format(rate) if rate == "informers": params = ARGS_SCHEMA(kwargs) else: params = ARGS_FORECAST_SCHEMA(kwargs) return (url,), {"headers": headers, "params": params}
[ "def", "validate_args", "(", "api_key", ",", "*", ",", "rate", "=", "\"informers\"", ",", "*", "*", "kwargs", ")", ":", "rate", "=", "Rate", ".", "validate", "(", "rate", ")", "headers", "=", "{", "\"X-Yandex-API-Key\"", ":", "api_key", "}", "url", "=", "\"https://api.weather.yandex.ru/v1/{}\"", ".", "format", "(", "rate", ")", "if", "rate", "==", "\"informers\"", ":", "params", "=", "ARGS_SCHEMA", "(", "kwargs", ")", "else", ":", "params", "=", "ARGS_FORECAST_SCHEMA", "(", "kwargs", ")", "return", "(", "url", ",", ")", ",", "{", "\"headers\"", ":", "headers", ",", "\"params\"", ":", "params", "}" ]
41.4
0.002364
[ "def validate_args(api_key, *, rate=\"informers\", **kwargs):\n", " \"Проверяет и формирует аргументы для запроса\"\n", " rate = Rate.validate(rate)\n", " headers = {\"X-Yandex-API-Key\": api_key}\n", " url = \"https://api.weather.yandex.ru/v1/{}\".format(rate)\n", " if rate == \"informers\":\n", " params = ARGS_SCHEMA(kwargs)\n", " else:\n", " params = ARGS_FORECAST_SCHEMA(kwargs)\n", " return (url,), {\"headers\": headers, \"params\": params}" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017543859649122806 ]
10
0.001754
def _encrypted_data_keys_hash(hasher, encrypted_data_keys): """Generates the expected hash for the provided encrypted data keys. :param hasher: Existing hasher to use :type hasher: cryptography.hazmat.primitives.hashes.Hash :param iterable encrypted_data_keys: Encrypted data keys to hash :returns: Concatenated, sorted, list of all hashes :rtype: bytes """ hashed_keys = [] for edk in encrypted_data_keys: serialized_edk = serialize_encrypted_data_key(edk) _hasher = hasher.copy() _hasher.update(serialized_edk) hashed_keys.append(_hasher.finalize()) return b"".join(sorted(hashed_keys))
[ "def", "_encrypted_data_keys_hash", "(", "hasher", ",", "encrypted_data_keys", ")", ":", "hashed_keys", "=", "[", "]", "for", "edk", "in", "encrypted_data_keys", ":", "serialized_edk", "=", "serialize_encrypted_data_key", "(", "edk", ")", "_hasher", "=", "hasher", ".", "copy", "(", ")", "_hasher", ".", "update", "(", "serialized_edk", ")", "hashed_keys", ".", "append", "(", "_hasher", ".", "finalize", "(", ")", ")", "return", "b\"\"", ".", "join", "(", "sorted", "(", "hashed_keys", ")", ")" ]
40.375
0.001513
[ "def _encrypted_data_keys_hash(hasher, encrypted_data_keys):\n", " \"\"\"Generates the expected hash for the provided encrypted data keys.\n", "\n", " :param hasher: Existing hasher to use\n", " :type hasher: cryptography.hazmat.primitives.hashes.Hash\n", " :param iterable encrypted_data_keys: Encrypted data keys to hash\n", " :returns: Concatenated, sorted, list of all hashes\n", " :rtype: bytes\n", " \"\"\"\n", " hashed_keys = []\n", " for edk in encrypted_data_keys:\n", " serialized_edk = serialize_encrypted_data_key(edk)\n", " _hasher = hasher.copy()\n", " _hasher.update(serialized_edk)\n", " hashed_keys.append(_hasher.finalize())\n", " return b\"\".join(sorted(hashed_keys))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.025 ]
16
0.001563
def parse_info(response): "Parse the result of Redis's INFO command into a Python dict" info = {} response = nativestr(response) def get_value(value): if ',' not in value or '=' not in value: try: if '.' in value: return float(value) else: return int(value) except ValueError: return value else: sub_dict = {} for item in value.split(','): k, v = item.rsplit('=', 1) sub_dict[k] = get_value(v) return sub_dict for line in response.splitlines(): if line and not line.startswith('#'): if line.find(':') != -1: key, value = line.split(':', 1) info[key] = get_value(value) else: # if the line isn't splittable, append it to the "__raw__" key info.setdefault('__raw__', []).append(line) return info
[ "def", "parse_info", "(", "response", ")", ":", "info", "=", "{", "}", "response", "=", "nativestr", "(", "response", ")", "def", "get_value", "(", "value", ")", ":", "if", "','", "not", "in", "value", "or", "'='", "not", "in", "value", ":", "try", ":", "if", "'.'", "in", "value", ":", "return", "float", "(", "value", ")", "else", ":", "return", "int", "(", "value", ")", "except", "ValueError", ":", "return", "value", "else", ":", "sub_dict", "=", "{", "}", "for", "item", "in", "value", ".", "split", "(", "','", ")", ":", "k", ",", "v", "=", "item", ".", "rsplit", "(", "'='", ",", "1", ")", "sub_dict", "[", "k", "]", "=", "get_value", "(", "v", ")", "return", "sub_dict", "for", "line", "in", "response", ".", "splitlines", "(", ")", ":", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", ":", "if", "line", ".", "find", "(", "':'", ")", "!=", "-", "1", ":", "key", ",", "value", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "info", "[", "key", "]", "=", "get_value", "(", "value", ")", "else", ":", "# if the line isn't splittable, append it to the \"__raw__\" key", "info", ".", "setdefault", "(", "'__raw__'", ",", "[", "]", ")", ".", "append", "(", "line", ")", "return", "info" ]
31.677419
0.000988
[ "def parse_info(response):\n", " \"Parse the result of Redis's INFO command into a Python dict\"\n", " info = {}\n", " response = nativestr(response)\n", "\n", " def get_value(value):\n", " if ',' not in value or '=' not in value:\n", " try:\n", " if '.' in value:\n", " return float(value)\n", " else:\n", " return int(value)\n", " except ValueError:\n", " return value\n", " else:\n", " sub_dict = {}\n", " for item in value.split(','):\n", " k, v = item.rsplit('=', 1)\n", " sub_dict[k] = get_value(v)\n", " return sub_dict\n", "\n", " for line in response.splitlines():\n", " if line and not line.startswith('#'):\n", " if line.find(':') != -1:\n", " key, value = line.split(':', 1)\n", " info[key] = get_value(value)\n", " else:\n", " # if the line isn't splittable, append it to the \"__raw__\" key\n", " info.setdefault('__raw__', []).append(line)\n", "\n", " return info" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667 ]
31
0.002151
def _rpt_unused_sections(self, prt): """Report unused sections.""" sections_unused = set(self.sections_seen).difference(self.section2goids.keys()) for sec in sections_unused: prt.write(" UNUSED SECTION: {SEC}\n".format(SEC=sec))
[ "def", "_rpt_unused_sections", "(", "self", ",", "prt", ")", ":", "sections_unused", "=", "set", "(", "self", ".", "sections_seen", ")", ".", "difference", "(", "self", ".", "section2goids", ".", "keys", "(", ")", ")", "for", "sec", "in", "sections_unused", ":", "prt", ".", "write", "(", "\" UNUSED SECTION: {SEC}\\n\"", ".", "format", "(", "SEC", "=", "sec", ")", ")" ]
52.2
0.011321
[ "def _rpt_unused_sections(self, prt):\n", " \"\"\"Report unused sections.\"\"\"\n", " sections_unused = set(self.sections_seen).difference(self.section2goids.keys())\n", " for sec in sections_unused:\n", " prt.write(\" UNUSED SECTION: {SEC}\\n\".format(SEC=sec))" ]
[ 0, 0.02631578947368421, 0.011363636363636364, 0, 0.015151515151515152 ]
5
0.010566
def _field_accessor(name, docstring=None, min_cftime_version='0.0'): """Adapted from pandas.tseries.index._field_accessor""" def f(self, min_cftime_version=min_cftime_version): import cftime version = cftime.__version__ if LooseVersion(version) >= LooseVersion(min_cftime_version): return get_date_field(self._data, name) else: raise ImportError('The {!r} accessor requires a minimum ' 'version of cftime of {}. Found an ' 'installed version of {}.'.format( name, min_cftime_version, version)) f.__name__ = name f.__doc__ = docstring return property(f)
[ "def", "_field_accessor", "(", "name", ",", "docstring", "=", "None", ",", "min_cftime_version", "=", "'0.0'", ")", ":", "def", "f", "(", "self", ",", "min_cftime_version", "=", "min_cftime_version", ")", ":", "import", "cftime", "version", "=", "cftime", ".", "__version__", "if", "LooseVersion", "(", "version", ")", ">=", "LooseVersion", "(", "min_cftime_version", ")", ":", "return", "get_date_field", "(", "self", ".", "_data", ",", "name", ")", "else", ":", "raise", "ImportError", "(", "'The {!r} accessor requires a minimum '", "'version of cftime of {}. Found an '", "'installed version of {}.'", ".", "format", "(", "name", ",", "min_cftime_version", ",", "version", ")", ")", "f", ".", "__name__", "=", "name", "f", ".", "__doc__", "=", "docstring", "return", "property", "(", "f", ")" ]
37.263158
0.001377
[ "def _field_accessor(name, docstring=None, min_cftime_version='0.0'):\n", " \"\"\"Adapted from pandas.tseries.index._field_accessor\"\"\"\n", "\n", " def f(self, min_cftime_version=min_cftime_version):\n", " import cftime\n", "\n", " version = cftime.__version__\n", "\n", " if LooseVersion(version) >= LooseVersion(min_cftime_version):\n", " return get_date_field(self._data, name)\n", " else:\n", " raise ImportError('The {!r} accessor requires a minimum '\n", " 'version of cftime of {}. Found an '\n", " 'installed version of {}.'.format(\n", " name, min_cftime_version, version))\n", "\n", " f.__name__ = name\n", " f.__doc__ = docstring\n", " return property(f)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456 ]
19
0.002392
def verify_checksum(*lines): """Verify the checksum of one or more TLE lines. Raises `ValueError` if any of the lines fails its checksum, and includes the failing line in the error message. """ for line in lines: checksum = line[68:69] if not checksum.isdigit(): continue checksum = int(checksum) computed = compute_checksum(line) if checksum != computed: complaint = ('TLE line gives its checksum as {}' ' but in fact tallies to {}:\n{}') raise ValueError(complaint.format(checksum, computed, line))
[ "def", "verify_checksum", "(", "*", "lines", ")", ":", "for", "line", "in", "lines", ":", "checksum", "=", "line", "[", "68", ":", "69", "]", "if", "not", "checksum", ".", "isdigit", "(", ")", ":", "continue", "checksum", "=", "int", "(", "checksum", ")", "computed", "=", "compute_checksum", "(", "line", ")", "if", "checksum", "!=", "computed", ":", "complaint", "=", "(", "'TLE line gives its checksum as {}'", "' but in fact tallies to {}:\\n{}'", ")", "raise", "ValueError", "(", "complaint", ".", "format", "(", "checksum", ",", "computed", ",", "line", ")", ")" ]
35.705882
0.001605
[ "def verify_checksum(*lines):\n", " \"\"\"Verify the checksum of one or more TLE lines.\n", "\n", " Raises `ValueError` if any of the lines fails its checksum, and\n", " includes the failing line in the error message.\n", "\n", " \"\"\"\n", " for line in lines:\n", " checksum = line[68:69]\n", " if not checksum.isdigit():\n", " continue\n", " checksum = int(checksum)\n", " computed = compute_checksum(line)\n", " if checksum != computed:\n", " complaint = ('TLE line gives its checksum as {}'\n", " ' but in fact tallies to {}:\\n{}')\n", " raise ValueError(complaint.format(checksum, computed, line))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013888888888888888 ]
17
0.000817
def otherrole(self, otherrole): """ Get the ``OTHERROLE`` attribute value. """ if otherrole is not None: self._el.set('ROLE', 'OTHER') self._el.set('OTHERROLE', otherrole)
[ "def", "otherrole", "(", "self", ",", "otherrole", ")", ":", "if", "otherrole", "is", "not", "None", ":", "self", ".", "_el", ".", "set", "(", "'ROLE'", ",", "'OTHER'", ")", "self", ".", "_el", ".", "set", "(", "'OTHERROLE'", ",", "otherrole", ")" ]
31.571429
0.008811
[ "def otherrole(self, otherrole):\n", " \"\"\"\n", " Get the ``OTHERROLE`` attribute value.\n", " \"\"\"\n", " if otherrole is not None:\n", " self._el.set('ROLE', 'OTHER')\n", " self._el.set('OTHERROLE', otherrole)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0.020833333333333332 ]
7
0.014881
def rcm_chip_order(machine): """A generator which iterates over a set of chips in a machine in Reverse-Cuthill-McKee order. For use as a chip ordering for the sequential placer. """ # Convert the Machine description into a placement-problem-style-graph # where the vertices are chip coordinate tuples (x, y) and each net # represents the links leaving each chip. This allows us to re-use the # rcm_vertex_order function above to generate an RCM ordering of chips in # the machine. vertices = list(machine) nets = [] for (x, y) in vertices: neighbours = [] for link in Links: if (x, y, link) in machine: dx, dy = link.to_vector() neighbour = ((x + dx) % machine.width, (y + dy) % machine.height) # In principle if the link to chip is marked as working, that # chip should be working. In practice this might not be the # case (especially for carelessly hand-defined Machine # objects). if neighbour in machine: neighbours.append(neighbour) nets.append(Net((x, y), neighbours)) return rcm_vertex_order(vertices, nets)
[ "def", "rcm_chip_order", "(", "machine", ")", ":", "# Convert the Machine description into a placement-problem-style-graph", "# where the vertices are chip coordinate tuples (x, y) and each net", "# represents the links leaving each chip. This allows us to re-use the", "# rcm_vertex_order function above to generate an RCM ordering of chips in", "# the machine.", "vertices", "=", "list", "(", "machine", ")", "nets", "=", "[", "]", "for", "(", "x", ",", "y", ")", "in", "vertices", ":", "neighbours", "=", "[", "]", "for", "link", "in", "Links", ":", "if", "(", "x", ",", "y", ",", "link", ")", "in", "machine", ":", "dx", ",", "dy", "=", "link", ".", "to_vector", "(", ")", "neighbour", "=", "(", "(", "x", "+", "dx", ")", "%", "machine", ".", "width", ",", "(", "y", "+", "dy", ")", "%", "machine", ".", "height", ")", "# In principle if the link to chip is marked as working, that", "# chip should be working. In practice this might not be the", "# case (especially for carelessly hand-defined Machine", "# objects).", "if", "neighbour", "in", "machine", ":", "neighbours", ".", "append", "(", "neighbour", ")", "nets", ".", "append", "(", "Net", "(", "(", "x", ",", "y", ")", ",", "neighbours", ")", ")", "return", "rcm_vertex_order", "(", "vertices", ",", "nets", ")" ]
41.233333
0.00079
[ "def rcm_chip_order(machine):\n", " \"\"\"A generator which iterates over a set of chips in a machine in\n", " Reverse-Cuthill-McKee order.\n", "\n", " For use as a chip ordering for the sequential placer.\n", " \"\"\"\n", " # Convert the Machine description into a placement-problem-style-graph\n", " # where the vertices are chip coordinate tuples (x, y) and each net\n", " # represents the links leaving each chip. This allows us to re-use the\n", " # rcm_vertex_order function above to generate an RCM ordering of chips in\n", " # the machine.\n", " vertices = list(machine)\n", " nets = []\n", " for (x, y) in vertices:\n", " neighbours = []\n", " for link in Links:\n", " if (x, y, link) in machine:\n", " dx, dy = link.to_vector()\n", " neighbour = ((x + dx) % machine.width,\n", " (y + dy) % machine.height)\n", "\n", " # In principle if the link to chip is marked as working, that\n", " # chip should be working. In practice this might not be the\n", " # case (especially for carelessly hand-defined Machine\n", " # objects).\n", " if neighbour in machine:\n", " neighbours.append(neighbour)\n", " nets.append(Net((x, y), neighbours))\n", "\n", " return rcm_vertex_order(vertices, nets)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.023255813953488372 ]
30
0.000775
def add_user(username, deployment_name, token_manager=None, app_url=defaults.APP_URL): """ add user to deployment """ deployment_id = get_deployment_id(deployment_name, token_manager=token_manager, app_url=app_url) account_id = accounts.get_account_id(username, token_manager=token_manager, app_url=app_url) headers = token_manager.get_access_token_headers() deployment_url = environment.get_deployment_url(app_url=app_url) response = requests.put('%s/api/v1/deployments/%s/accounts/%s' % (deployment_url, deployment_id, account_id), headers=headers) if response.status_code == 204: return response.text else: raise JutException('Error %s: %s' % (response.status_code, response.text))
[ "def", "add_user", "(", "username", ",", "deployment_name", ",", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "deployment_id", "=", "get_deployment_id", "(", "deployment_name", ",", "token_manager", "=", "token_manager", ",", "app_url", "=", "app_url", ")", "account_id", "=", "accounts", ".", "get_account_id", "(", "username", ",", "token_manager", "=", "token_manager", ",", "app_url", "=", "app_url", ")", "headers", "=", "token_manager", ".", "get_access_token_headers", "(", ")", "deployment_url", "=", "environment", ".", "get_deployment_url", "(", "app_url", "=", "app_url", ")", "response", "=", "requests", ".", "put", "(", "'%s/api/v1/deployments/%s/accounts/%s'", "%", "(", "deployment_url", ",", "deployment_id", ",", "account_id", ")", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", "==", "204", ":", "return", "response", ".", "text", "else", ":", "raise", "JutException", "(", "'Error %s: %s'", "%", "(", "response", ".", "status_code", ",", "response", ".", "text", ")", ")" ]
37.384615
0.002006
[ "def add_user(username,\n", " deployment_name,\n", " token_manager=None,\n", " app_url=defaults.APP_URL):\n", " \"\"\"\n", " add user to deployment\n", "\n", " \"\"\"\n", " deployment_id = get_deployment_id(deployment_name,\n", " token_manager=token_manager,\n", " app_url=app_url)\n", "\n", " account_id = accounts.get_account_id(username,\n", " token_manager=token_manager,\n", " app_url=app_url)\n", "\n", " headers = token_manager.get_access_token_headers()\n", " deployment_url = environment.get_deployment_url(app_url=app_url)\n", " response = requests.put('%s/api/v1/deployments/%s/accounts/%s' %\n", " (deployment_url, deployment_id, account_id),\n", " headers=headers)\n", "\n", " if response.status_code == 204:\n", " return response.text\n", " else:\n", " raise JutException('Error %s: %s' % (response.status_code, response.text))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025 ]
26
0.000938
def plotcdf(x,xmin,alpha): """ Plots CDF and powerlaw """ x=sort(x) n=len(x) xcdf = arange(n,0,-1,dtype='float')/float(n) q = x[x>=xmin] fcdf = (q/xmin)**(1-alpha) nc = xcdf[argmax(x>=xmin)] fcdf_norm = nc*fcdf loglog(x,xcdf) loglog(q,fcdf_norm)
[ "def", "plotcdf", "(", "x", ",", "xmin", ",", "alpha", ")", ":", "x", "=", "sort", "(", "x", ")", "n", "=", "len", "(", "x", ")", "xcdf", "=", "arange", "(", "n", ",", "0", ",", "-", "1", ",", "dtype", "=", "'float'", ")", "/", "float", "(", "n", ")", "q", "=", "x", "[", "x", ">=", "xmin", "]", "fcdf", "=", "(", "q", "/", "xmin", ")", "**", "(", "1", "-", "alpha", ")", "nc", "=", "xcdf", "[", "argmax", "(", "x", ">=", "xmin", ")", "]", "fcdf_norm", "=", "nc", "*", "fcdf", "loglog", "(", "x", ",", "xcdf", ")", "loglog", "(", "q", ",", "fcdf_norm", ")" ]
17.5625
0.040541
[ "def plotcdf(x,xmin,alpha):\n", " \"\"\"\n", " Plots CDF and powerlaw\n", " \"\"\"\n", "\n", " x=sort(x)\n", " n=len(x)\n", " xcdf = arange(n,0,-1,dtype='float')/float(n)\n", "\n", " q = x[x>=xmin]\n", " fcdf = (q/xmin)**(1-alpha)\n", " nc = xcdf[argmax(x>=xmin)]\n", " fcdf_norm = nc*fcdf\n", "\n", " loglog(x,xcdf)\n", " loglog(q,fcdf_norm)" ]
[ 0.07407407407407407, 0, 0, 0, 0, 0.07142857142857142, 0.07692307692307693, 0.061224489795918366, 0, 0.05263157894736842, 0, 0.03225806451612903, 0, 0, 0.05263157894736842, 0.08695652173913043 ]
16
0.031758
def unlock_wallet(self, *args, **kwargs): """ Unlock the library internal wallet """ self.blockchain.wallet.unlock(*args, **kwargs) return self
[ "def", "unlock_wallet", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "blockchain", ".", "wallet", ".", "unlock", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self" ]
34.2
0.011429
[ "def unlock_wallet(self, *args, **kwargs):\n", " \"\"\" Unlock the library internal wallet\n", " \"\"\"\n", " self.blockchain.wallet.unlock(*args, **kwargs)\n", " return self" ]
[ 0, 0.02127659574468085, 0, 0, 0.05263157894736842 ]
5
0.014782
def _check_stop_conditions(self, sensor_graph): """Check if any of our stop conditions are met. Args: sensor_graph (SensorGraph): The sensor graph we are currently simulating Returns: bool: True if we should stop the simulation """ for stop in self.stop_conditions: if stop.should_stop(self.tick_count, self.tick_count - self._start_tick, sensor_graph): return True return False
[ "def", "_check_stop_conditions", "(", "self", ",", "sensor_graph", ")", ":", "for", "stop", "in", "self", ".", "stop_conditions", ":", "if", "stop", ".", "should_stop", "(", "self", ".", "tick_count", ",", "self", ".", "tick_count", "-", "self", ".", "_start_tick", ",", "sensor_graph", ")", ":", "return", "True", "return", "False" ]
31.2
0.008299
[ "def _check_stop_conditions(self, sensor_graph):\n", " \"\"\"Check if any of our stop conditions are met.\n", "\n", " Args:\n", " sensor_graph (SensorGraph): The sensor graph we are currently simulating\n", "\n", " Returns:\n", " bool: True if we should stop the simulation\n", " \"\"\"\n", "\n", " for stop in self.stop_conditions:\n", " if stop.should_stop(self.tick_count, self.tick_count - self._start_tick, sensor_graph):\n", " return True\n", "\n", " return False" ]
[ 0, 0.017857142857142856, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0.05 ]
15
0.005975
def local_2d_halo_exchange(k, v, num_h_blocks, h_dim, num_w_blocks, w_dim, mask_right): """Halo exchange for keys and values for Local 2D attention.""" for blocks_dim, block_size_dim, halo_size in [ (num_h_blocks, h_dim, h_dim.size), (num_w_blocks, w_dim, w_dim.size)]: # shape of k is [num_h_blocks, num_w_blocks, h_dim, w_dim, kv_channels] if halo_size > 0: if blocks_dim is not None: if mask_right: k = mtf.left_halo_exchange(k, blocks_dim, block_size_dim, halo_size) v = mtf.left_halo_exchange(v, blocks_dim, block_size_dim, halo_size) else: k = mtf.halo_exchange(k, blocks_dim, block_size_dim, halo_size) v = mtf.halo_exchange(v, blocks_dim, block_size_dim, halo_size) else: if mask_right: k = mtf.pad(k, [halo_size, None], block_size_dim.name) v = mtf.pad(v, [halo_size, None], block_size_dim.name) else: k = mtf.pad(k, [halo_size, halo_size], block_size_dim.name) v = mtf.pad(v, [halo_size, halo_size], block_size_dim.name) return k, v
[ "def", "local_2d_halo_exchange", "(", "k", ",", "v", ",", "num_h_blocks", ",", "h_dim", ",", "num_w_blocks", ",", "w_dim", ",", "mask_right", ")", ":", "for", "blocks_dim", ",", "block_size_dim", ",", "halo_size", "in", "[", "(", "num_h_blocks", ",", "h_dim", ",", "h_dim", ".", "size", ")", ",", "(", "num_w_blocks", ",", "w_dim", ",", "w_dim", ".", "size", ")", "]", ":", "# shape of k is [num_h_blocks, num_w_blocks, h_dim, w_dim, kv_channels]", "if", "halo_size", ">", "0", ":", "if", "blocks_dim", "is", "not", "None", ":", "if", "mask_right", ":", "k", "=", "mtf", ".", "left_halo_exchange", "(", "k", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "v", "=", "mtf", ".", "left_halo_exchange", "(", "v", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "else", ":", "k", "=", "mtf", ".", "halo_exchange", "(", "k", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "v", "=", "mtf", ".", "halo_exchange", "(", "v", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "else", ":", "if", "mask_right", ":", "k", "=", "mtf", ".", "pad", "(", "k", ",", "[", "halo_size", ",", "None", "]", ",", "block_size_dim", ".", "name", ")", "v", "=", "mtf", ".", "pad", "(", "v", ",", "[", "halo_size", ",", "None", "]", ",", "block_size_dim", ".", "name", ")", "else", ":", "k", "=", "mtf", ".", "pad", "(", "k", ",", "[", "halo_size", ",", "halo_size", "]", ",", "block_size_dim", ".", "name", ")", "v", "=", "mtf", ".", "pad", "(", "v", ",", "[", "halo_size", ",", "halo_size", "]", ",", "block_size_dim", ".", "name", ")", "return", "k", ",", "v" ]
47.695652
0.013405
[ "def local_2d_halo_exchange(k, v, num_h_blocks, h_dim,\n", " num_w_blocks, w_dim, mask_right):\n", " \"\"\"Halo exchange for keys and values for Local 2D attention.\"\"\"\n", " for blocks_dim, block_size_dim, halo_size in [\n", " (num_h_blocks, h_dim, h_dim.size),\n", " (num_w_blocks, w_dim, w_dim.size)]:\n", " # shape of k is [num_h_blocks, num_w_blocks, h_dim, w_dim, kv_channels]\n", " if halo_size > 0:\n", " if blocks_dim is not None:\n", " if mask_right:\n", " k = mtf.left_halo_exchange(k, blocks_dim, block_size_dim, halo_size)\n", " v = mtf.left_halo_exchange(v, blocks_dim, block_size_dim, halo_size)\n", " else:\n", " k = mtf.halo_exchange(k, blocks_dim, block_size_dim, halo_size)\n", " v = mtf.halo_exchange(v, blocks_dim, block_size_dim, halo_size)\n", " else:\n", " if mask_right:\n", " k = mtf.pad(k, [halo_size, None], block_size_dim.name)\n", " v = mtf.pad(v, [halo_size, None], block_size_dim.name)\n", " else:\n", " k = mtf.pad(k, [halo_size, halo_size], block_size_dim.name)\n", " v = mtf.pad(v, [halo_size, halo_size], block_size_dim.name)\n", " return k, v" ]
[ 0, 0, 0.015151515151515152, 0.02040816326530612, 0, 0.023809523809523808, 0, 0, 0.030303030303030304, 0, 0.012658227848101266, 0.012658227848101266, 0, 0.013513513513513514, 0.013513513513513514, 0.08333333333333333, 0, 0.015384615384615385, 0.015384615384615385, 0, 0.014285714285714285, 0.014285714285714285, 0.15384615384615385 ]
23
0.019067
def from_json(graph_json_dict: Mapping[str, Any], check_version=True) -> BELGraph: """Build a graph from Node-Link JSON Object.""" graph = node_link_graph(graph_json_dict) return ensure_version(graph, check_version=check_version)
[ "def", "from_json", "(", "graph_json_dict", ":", "Mapping", "[", "str", ",", "Any", "]", ",", "check_version", "=", "True", ")", "->", "BELGraph", ":", "graph", "=", "node_link_graph", "(", "graph_json_dict", ")", "return", "ensure_version", "(", "graph", ",", "check_version", "=", "check_version", ")" ]
59.5
0.008299
[ "def from_json(graph_json_dict: Mapping[str, Any], check_version=True) -> BELGraph:\n", " \"\"\"Build a graph from Node-Link JSON Object.\"\"\"\n", " graph = node_link_graph(graph_json_dict)\n", " return ensure_version(graph, check_version=check_version)" ]
[ 0.012048192771084338, 0, 0, 0.01639344262295082 ]
4
0.00711
def _get_uniprot_id(agent): """Return the UniProt ID for an agent, looking up in HGNC if necessary. If the UniProt ID is a list then return the first ID by default. """ up_id = agent.db_refs.get('UP') hgnc_id = agent.db_refs.get('HGNC') if up_id is None: if hgnc_id is None: # If both UniProt and HGNC refs are missing we can't # sequence check and so don't report a failure. return None # Try to get UniProt ID from HGNC up_id = hgnc_client.get_uniprot_id(hgnc_id) # If this fails, again, we can't sequence check if up_id is None: return None # If the UniProt ID is a list then choose the first one. if not isinstance(up_id, basestring) and \ isinstance(up_id[0], basestring): up_id = up_id[0] return up_id
[ "def", "_get_uniprot_id", "(", "agent", ")", ":", "up_id", "=", "agent", ".", "db_refs", ".", "get", "(", "'UP'", ")", "hgnc_id", "=", "agent", ".", "db_refs", ".", "get", "(", "'HGNC'", ")", "if", "up_id", "is", "None", ":", "if", "hgnc_id", "is", "None", ":", "# If both UniProt and HGNC refs are missing we can't", "# sequence check and so don't report a failure.", "return", "None", "# Try to get UniProt ID from HGNC", "up_id", "=", "hgnc_client", ".", "get_uniprot_id", "(", "hgnc_id", ")", "# If this fails, again, we can't sequence check", "if", "up_id", "is", "None", ":", "return", "None", "# If the UniProt ID is a list then choose the first one.", "if", "not", "isinstance", "(", "up_id", ",", "basestring", ")", "and", "isinstance", "(", "up_id", "[", "0", "]", ",", "basestring", ")", ":", "up_id", "=", "up_id", "[", "0", "]", "return", "up_id" ]
37.545455
0.001181
[ "def _get_uniprot_id(agent):\n", " \"\"\"Return the UniProt ID for an agent, looking up in HGNC if necessary.\n", "\n", " If the UniProt ID is a list then return the first ID by default.\n", " \"\"\"\n", " up_id = agent.db_refs.get('UP')\n", " hgnc_id = agent.db_refs.get('HGNC')\n", " if up_id is None:\n", " if hgnc_id is None:\n", " # If both UniProt and HGNC refs are missing we can't\n", " # sequence check and so don't report a failure.\n", " return None\n", " # Try to get UniProt ID from HGNC\n", " up_id = hgnc_client.get_uniprot_id(hgnc_id)\n", " # If this fails, again, we can't sequence check\n", " if up_id is None:\n", " return None\n", " # If the UniProt ID is a list then choose the first one.\n", " if not isinstance(up_id, basestring) and \\\n", " isinstance(up_id[0], basestring):\n", " up_id = up_id[0]\n", " return up_id" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625 ]
22
0.002841
def purge_url(self, host, path): """Purge an individual URL.""" content = self._fetch(path, method="PURGE", headers={ "Host": host }) return FastlyPurge(self, content)
[ "def", "purge_url", "(", "self", ",", "host", ",", "path", ")", ":", "content", "=", "self", ".", "_fetch", "(", "path", ",", "method", "=", "\"PURGE\"", ",", "headers", "=", "{", "\"Host\"", ":", "host", "}", ")", "return", "FastlyPurge", "(", "self", ",", "content", ")" ]
42.75
0.051724
[ "def\tpurge_url(self, host, path):\n", "\t\t\"\"\"Purge an individual URL.\"\"\"\n", "\t\tcontent = self._fetch(path, method=\"PURGE\", headers={ \"Host\": host }) \n", "\t\treturn FastlyPurge(self, content)" ]
[ 0.030303030303030304, 0.06060606060606061, 0.0547945205479452, 0.05714285714285714 ]
4
0.050712
def transform_position_array(array, pos, euler, is_normal, reverse=False): """ Transform any Nx3 position array by translating to a center-of-mass 'pos' and applying an euler transformation :parameter array array: numpy array of Nx3 positions in the original (star) coordinate frame :parameter array pos: numpy array with length 3 giving cartesian coordinates to offset all positions :parameter array euler: euler angles (etheta, elongan, eincl) in radians :parameter bool is_normal: whether each entry is a normal vector rather than position vector. If true, the quantities won't be offset by 'pos' :return: new positions array with same shape as 'array'. """ trans_matrix = euler_trans_matrix(*euler) if not reverse: trans_matrix = trans_matrix.T if isinstance(array, ComputedColumn): array = array.for_computations if is_normal: # then we don't do an offset by the position return np.dot(np.asarray(array), trans_matrix) else: return np.dot(np.asarray(array), trans_matrix) + np.asarray(pos)
[ "def", "transform_position_array", "(", "array", ",", "pos", ",", "euler", ",", "is_normal", ",", "reverse", "=", "False", ")", ":", "trans_matrix", "=", "euler_trans_matrix", "(", "*", "euler", ")", "if", "not", "reverse", ":", "trans_matrix", "=", "trans_matrix", ".", "T", "if", "isinstance", "(", "array", ",", "ComputedColumn", ")", ":", "array", "=", "array", ".", "for_computations", "if", "is_normal", ":", "# then we don't do an offset by the position", "return", "np", ".", "dot", "(", "np", ".", "asarray", "(", "array", ")", ",", "trans_matrix", ")", "else", ":", "return", "np", ".", "dot", "(", "np", ".", "asarray", "(", "array", ")", ",", "trans_matrix", ")", "+", "np", ".", "asarray", "(", "pos", ")" ]
39.285714
0.000887
[ "def transform_position_array(array, pos, euler, is_normal, reverse=False):\n", " \"\"\"\n", " Transform any Nx3 position array by translating to a center-of-mass 'pos'\n", " and applying an euler transformation\n", "\n", " :parameter array array: numpy array of Nx3 positions in the original (star)\n", " coordinate frame\n", " :parameter array pos: numpy array with length 3 giving cartesian\n", " coordinates to offset all positions\n", " :parameter array euler: euler angles (etheta, elongan, eincl) in radians\n", " :parameter bool is_normal: whether each entry is a normal vector rather\n", " than position vector. If true, the quantities won't be offset by\n", " 'pos'\n", " :return: new positions array with same shape as 'array'.\n", " \"\"\"\n", " trans_matrix = euler_trans_matrix(*euler)\n", "\n", " if not reverse:\n", " trans_matrix = trans_matrix.T\n", "\n", " if isinstance(array, ComputedColumn):\n", " array = array.for_computations\n", "\n", " if is_normal:\n", " # then we don't do an offset by the position\n", " return np.dot(np.asarray(array), trans_matrix)\n", " else:\n", " return np.dot(np.asarray(array), trans_matrix) + np.asarray(pos)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013888888888888888 ]
28
0.000496
def _find_by(self, key): """Find devices.""" by_path = glob.glob('/dev/input/by-{key}/*-event-*'.format(key=key)) for device_path in by_path: self._parse_device_path(device_path)
[ "def", "_find_by", "(", "self", ",", "key", ")", ":", "by_path", "=", "glob", ".", "glob", "(", "'/dev/input/by-{key}/*-event-*'", ".", "format", "(", "key", "=", "key", ")", ")", "for", "device_path", "in", "by_path", ":", "self", ".", "_parse_device_path", "(", "device_path", ")" ]
42
0.009346
[ "def _find_by(self, key):\n", " \"\"\"Find devices.\"\"\"\n", " by_path = glob.glob('/dev/input/by-{key}/*-event-*'.format(key=key))\n", " for device_path in by_path:\n", " self._parse_device_path(device_path)" ]
[ 0, 0.03571428571428571, 0, 0, 0.020833333333333332 ]
5
0.01131
def download(self, force=False, silent=False): """Download from URL.""" def _download(): if self.url.startswith("http"): self._download_http(silent=silent) elif self.url.startswith("ftp"): self._download_ftp(silent=silent) else: raise ValueError("Invalid URL %s" % self.url) logger.debug("Moving %s to %s" % ( self._temp_file_name, self.destination)) shutil.move(self._temp_file_name, self.destination) logger.debug("Successfully downloaded %s" % self.url) try: is_already_downloaded = os.path.isfile(self.destination) if is_already_downloaded: if force: try: os.remove(self.destination) except Exception: logger.error("Cannot delete %s" % self.destination) logger.info( "Downloading %s to %s" % (self.url, self.destination)) logger.debug( "Downloading %s to %s" % (self.url, self._temp_file_name)) _download() else: logger.info(("File %s already exist. Use force=True if you" " would like to overwrite it.") % self.destination) else: _download() finally: try: os.remove(self._temp_file_name) except OSError: pass
[ "def", "download", "(", "self", ",", "force", "=", "False", ",", "silent", "=", "False", ")", ":", "def", "_download", "(", ")", ":", "if", "self", ".", "url", ".", "startswith", "(", "\"http\"", ")", ":", "self", ".", "_download_http", "(", "silent", "=", "silent", ")", "elif", "self", ".", "url", ".", "startswith", "(", "\"ftp\"", ")", ":", "self", ".", "_download_ftp", "(", "silent", "=", "silent", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid URL %s\"", "%", "self", ".", "url", ")", "logger", ".", "debug", "(", "\"Moving %s to %s\"", "%", "(", "self", ".", "_temp_file_name", ",", "self", ".", "destination", ")", ")", "shutil", ".", "move", "(", "self", ".", "_temp_file_name", ",", "self", ".", "destination", ")", "logger", ".", "debug", "(", "\"Successfully downloaded %s\"", "%", "self", ".", "url", ")", "try", ":", "is_already_downloaded", "=", "os", ".", "path", ".", "isfile", "(", "self", ".", "destination", ")", "if", "is_already_downloaded", ":", "if", "force", ":", "try", ":", "os", ".", "remove", "(", "self", ".", "destination", ")", "except", "Exception", ":", "logger", ".", "error", "(", "\"Cannot delete %s\"", "%", "self", ".", "destination", ")", "logger", ".", "info", "(", "\"Downloading %s to %s\"", "%", "(", "self", ".", "url", ",", "self", ".", "destination", ")", ")", "logger", ".", "debug", "(", "\"Downloading %s to %s\"", "%", "(", "self", ".", "url", ",", "self", ".", "_temp_file_name", ")", ")", "_download", "(", ")", "else", ":", "logger", ".", "info", "(", "(", "\"File %s already exist. Use force=True if you\"", "\" would like to overwrite it.\"", ")", "%", "self", ".", "destination", ")", "else", ":", "_download", "(", ")", "finally", ":", "try", ":", "os", ".", "remove", "(", "self", ".", "_temp_file_name", ")", "except", "OSError", ":", "pass" ]
40.75
0.001198
[ "def download(self, force=False, silent=False):\n", " \"\"\"Download from URL.\"\"\"\n", " def _download():\n", " if self.url.startswith(\"http\"):\n", " self._download_http(silent=silent)\n", " elif self.url.startswith(\"ftp\"):\n", " self._download_ftp(silent=silent)\n", " else:\n", " raise ValueError(\"Invalid URL %s\" % self.url)\n", " logger.debug(\"Moving %s to %s\" % (\n", " self._temp_file_name,\n", " self.destination))\n", " shutil.move(self._temp_file_name, self.destination)\n", " logger.debug(\"Successfully downloaded %s\" % self.url)\n", "\n", " try:\n", " is_already_downloaded = os.path.isfile(self.destination)\n", " if is_already_downloaded:\n", " if force:\n", " try:\n", " os.remove(self.destination)\n", " except Exception:\n", " logger.error(\"Cannot delete %s\" % self.destination)\n", " logger.info(\n", " \"Downloading %s to %s\" % (self.url, self.destination))\n", " logger.debug(\n", " \"Downloading %s to %s\" % (self.url,\n", " self._temp_file_name))\n", " _download()\n", " else:\n", " logger.info((\"File %s already exist. Use force=True if you\"\n", " \" would like to overwrite it.\") %\n", " self.destination)\n", " else:\n", " _download()\n", " finally:\n", " try:\n", " os.remove(self._temp_file_name)\n", " except OSError:\n", " pass" ]
[ 0, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05 ]
40
0.002008
def parse_line(self, statement, element, mode): """As part of real-time update, parses the statement and adjusts the attributes of the specified CustomType instance to reflect the changes. :arg statement: the lines of code that was added/removed/changed on the element after it had alread been parsed. The lines together form a single continuous code statement. :arg element: the CustomType instance to update. :arg mode: 'insert', or 'delete'. """ if element.incomplete: #We need to check for the end_token so we can close up the incomplete #status for the instance. if element.end_token in statement: element.incomplete = False return #This method deals with updating the *body* of the type declaration. The only #possible entries in the body are member variable declarations and type #executable definitions. self._process_execs_contents(statement, element.module.name, element, mode) self._rt_parse_members(statement, element, mode)
[ "def", "parse_line", "(", "self", ",", "statement", ",", "element", ",", "mode", ")", ":", "if", "element", ".", "incomplete", ":", "#We need to check for the end_token so we can close up the incomplete", "#status for the instance.", "if", "element", ".", "end_token", "in", "statement", ":", "element", ".", "incomplete", "=", "False", "return", "#This method deals with updating the *body* of the type declaration. The only", "#possible entries in the body are member variable declarations and type", "#executable definitions.", "self", ".", "_process_execs_contents", "(", "statement", ",", "element", ".", "module", ".", "name", ",", "element", ",", "mode", ")", "self", ".", "_rt_parse_members", "(", "statement", ",", "element", ",", "mode", ")" ]
50.363636
0.0124
[ "def parse_line(self, statement, element, mode):\n", " \"\"\"As part of real-time update, parses the statement and adjusts the attributes\n", " of the specified CustomType instance to reflect the changes.\n", "\n", " :arg statement: the lines of code that was added/removed/changed on the \n", " element after it had alread been parsed. The lines together form a single\n", " continuous code statement.\n", " :arg element: the CustomType instance to update.\n", " :arg mode: 'insert', or 'delete'.\n", " \"\"\"\n", " if element.incomplete:\n", " #We need to check for the end_token so we can close up the incomplete\n", " #status for the instance.\n", " if element.end_token in statement:\n", " element.incomplete = False\n", " return\n", "\n", " #This method deals with updating the *body* of the type declaration. The only\n", " #possible entries in the body are member variable declarations and type\n", " #executable definitions.\n", " self._process_execs_contents(statement, element.module.name, element, mode) \n", " self._rt_parse_members(statement, element, mode)" ]
[ 0, 0.022727272727272728, 0, 0, 0.012345679012345678, 0.011904761904761904, 0, 0, 0, 0, 0, 0.024390243902439025, 0.02631578947368421, 0, 0, 0, 0, 0.023255813953488372, 0.0125, 0.030303030303030304, 0.022222222222222223, 0.017857142857142856 ]
22
0.009265
def envs(self): ''' Check the refs and return a list of the ones which can be used as salt environments. ''' ref_paths = [x.path for x in self.repo.refs] return self._get_envs_from_ref_paths(ref_paths)
[ "def", "envs", "(", "self", ")", ":", "ref_paths", "=", "[", "x", ".", "path", "for", "x", "in", "self", ".", "repo", ".", "refs", "]", "return", "self", ".", "_get_envs_from_ref_paths", "(", "ref_paths", ")" ]
34.714286
0.008032
[ "def envs(self):\n", " '''\n", " Check the refs and return a list of the ones which can be used as salt\n", " environments.\n", " '''\n", " ref_paths = [x.path for x in self.repo.refs]\n", " return self._get_envs_from_ref_paths(ref_paths)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0.01818181818181818 ]
7
0.014502
def render(self): """Render the axes data into the dict data""" for opt,values in self.data.items(): if opt == 'ticks': self['chxtc'] = '|'.join(values) else: self['chx%s'%opt[0]] = '|'.join(values) return self
[ "def", "render", "(", "self", ")", ":", "for", "opt", ",", "values", "in", "self", ".", "data", ".", "items", "(", ")", ":", "if", "opt", "==", "'ticks'", ":", "self", "[", "'chxtc'", "]", "=", "'|'", ".", "join", "(", "values", ")", "else", ":", "self", "[", "'chx%s'", "%", "opt", "[", "0", "]", "]", "=", "'|'", ".", "join", "(", "values", ")", "return", "self" ]
35.375
0.013793
[ "def render(self):\n", " \"\"\"Render the axes data into the dict data\"\"\"\n", " for opt,values in self.data.items():\n", " if opt == 'ticks':\n", " self['chxtc'] = '|'.join(values)\n", " else:\n", " self['chx%s'%opt[0]] = '|'.join(values)\n", " return self" ]
[ 0, 0.018518518518518517, 0.022222222222222223, 0, 0, 0, 0.017857142857142856, 0.05263157894736842 ]
8
0.013904
def s_to_ev(offset_us, source_to_detector_m, array): """convert time (s) to energy (eV) Parameters: =========== numpy array of time in s offset_us: float. Delay of detector in us source_to_detector_m: float. Distance source to detector in m Returns: ======== numpy array of energy in eV """ lambda_a = 3956. * (array + offset_us * 1e-6) / source_to_detector_m return (81.787 / pow(lambda_a, 2)) / 1000.
[ "def", "s_to_ev", "(", "offset_us", ",", "source_to_detector_m", ",", "array", ")", ":", "lambda_a", "=", "3956.", "*", "(", "array", "+", "offset_us", "*", "1e-6", ")", "/", "source_to_detector_m", "return", "(", "81.787", "/", "pow", "(", "lambda_a", ",", "2", ")", ")", "/", "1000." ]
31.285714
0.002217
[ "def s_to_ev(offset_us, source_to_detector_m, array):\n", " \"\"\"convert time (s) to energy (eV)\n", " Parameters:\n", " ===========\n", " numpy array of time in s\n", " offset_us: float. Delay of detector in us\n", " source_to_detector_m: float. Distance source to detector in m\n", "\n", " Returns:\n", " ========\n", " numpy array of energy in eV\n", " \"\"\"\n", " lambda_a = 3956. * (array + offset_us * 1e-6) / source_to_detector_m\n", " return (81.787 / pow(lambda_a, 2)) / 1000." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608 ]
14
0.001553
def get_uaa(self): """ Returns an insstance of the UAA Service. """ import predix.security.uaa uaa = predix.security.uaa.UserAccountAuthentication() return uaa
[ "def", "get_uaa", "(", "self", ")", ":", "import", "predix", ".", "security", ".", "uaa", "uaa", "=", "predix", ".", "security", ".", "uaa", ".", "UserAccountAuthentication", "(", ")", "return", "uaa" ]
28.714286
0.009662
[ "def get_uaa(self):\n", " \"\"\"\n", " Returns an insstance of the UAA Service.\n", " \"\"\"\n", " import predix.security.uaa\n", " uaa = predix.security.uaa.UserAccountAuthentication()\n", " return uaa" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0.05555555555555555 ]
7
0.019841
def transform(line, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event TSV into a JSON """ return jsonify_good_event(line.split('\t'), known_fields, add_geolocation_data)
[ "def", "transform", "(", "line", ",", "known_fields", "=", "ENRICHED_EVENT_FIELD_TYPES", ",", "add_geolocation_data", "=", "True", ")", ":", "return", "jsonify_good_event", "(", "line", ".", "split", "(", "'\\t'", ")", ",", "known_fields", ",", "add_geolocation_data", ")" ]
47.6
0.012397
[ "def transform(line, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True):\n", " \"\"\"\n", " Convert a Snowplow enriched event TSV into a JSON\n", " \"\"\"\n", " return jsonify_good_event(line.split('\\t'), known_fields, add_geolocation_data)" ]
[ 0.011235955056179775, 0, 0, 0, 0.024096385542168676 ]
5
0.007066
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files. """ if defaults is None: defaults = DEFAULT_FILES config = ConfigParser(allow_no_value=True) if defaults: config.read(defaults) if path: with open(path) as fh: config.read_file(fh) return config
[ "def", "load_config", "(", "path", "=", "None", ",", "defaults", "=", "None", ")", ":", "if", "defaults", "is", "None", ":", "defaults", "=", "DEFAULT_FILES", "config", "=", "ConfigParser", "(", "allow_no_value", "=", "True", ")", "if", "defaults", ":", "config", ".", "read", "(", "defaults", ")", "if", "path", ":", "with", "open", "(", "path", ")", "as", "fh", ":", "config", ".", "read_file", "(", "fh", ")", "return", "config" ]
32.96
0.001179
[ "def load_config(path=None, defaults=None):\n", " \"\"\"\n", " Loads and parses an INI style configuration file using Python's built-in\n", " configparser module. If path is specified, load it.\n", " If ``defaults`` (a list of strings) is given, try to load each entry as a\n", " file, without throwing any error if the operation fails.\n", " If ``defaults`` is not given, the following locations listed in the\n", " DEFAULT_FILES constant are tried.\n", " To completely disable defaults loading, pass in an empty list or ``False``.\n", " Returns the SafeConfigParser instance used to load and parse the files.\n", " \"\"\"\n", "\n", " if defaults is None:\n", " defaults = DEFAULT_FILES\n", "\n", " config = ConfigParser(allow_no_value=True)\n", "\n", " if defaults:\n", " config.read(defaults)\n", "\n", " if path:\n", " with open(path) as fh:\n", " config.read_file(fh)\n", "\n", " return config" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705 ]
25
0.002353
def _iter_from_annotations_dict(graph: BELGraph, annotations_dict: AnnotationsDict, ) -> Iterable[Tuple[str, Set[str]]]: """Iterate over the key/value pairs in this edge data dictionary normalized to their source URLs.""" for key, names in annotations_dict.items(): if key in graph.annotation_url: url = graph.annotation_url[key] elif key in graph.annotation_list: continue # skip those elif key in graph.annotation_pattern: log.debug('pattern annotation in database not implemented yet not implemented') # FIXME continue else: raise ValueError('Graph resources does not contain keyword: {}'.format(key)) yield url, set(names)
[ "def", "_iter_from_annotations_dict", "(", "graph", ":", "BELGraph", ",", "annotations_dict", ":", "AnnotationsDict", ",", ")", "->", "Iterable", "[", "Tuple", "[", "str", ",", "Set", "[", "str", "]", "]", "]", ":", "for", "key", ",", "names", "in", "annotations_dict", ".", "items", "(", ")", ":", "if", "key", "in", "graph", ".", "annotation_url", ":", "url", "=", "graph", ".", "annotation_url", "[", "key", "]", "elif", "key", "in", "graph", ".", "annotation_list", ":", "continue", "# skip those", "elif", "key", "in", "graph", ".", "annotation_pattern", ":", "log", ".", "debug", "(", "'pattern annotation in database not implemented yet not implemented'", ")", "# FIXME", "continue", "else", ":", "raise", "ValueError", "(", "'Graph resources does not contain keyword: {}'", ".", "format", "(", "key", ")", ")", "yield", "url", ",", "set", "(", "names", ")" ]
52.625
0.008168
[ "def _iter_from_annotations_dict(graph: BELGraph,\n", " annotations_dict: AnnotationsDict,\n", " ) -> Iterable[Tuple[str, Set[str]]]:\n", " \"\"\"Iterate over the key/value pairs in this edge data dictionary normalized to their source URLs.\"\"\"\n", " for key, names in annotations_dict.items():\n", " if key in graph.annotation_url:\n", " url = graph.annotation_url[key]\n", " elif key in graph.annotation_list:\n", " continue # skip those\n", " elif key in graph.annotation_pattern:\n", " log.debug('pattern annotation in database not implemented yet not implemented') # FIXME\n", " continue\n", " else:\n", " raise ValueError('Graph resources does not contain keyword: {}'.format(key))\n", "\n", " yield url, set(names)" ]
[ 0, 0.014084507042253521, 0.0136986301369863, 0.01834862385321101, 0, 0, 0, 0, 0, 0, 0.009523809523809525, 0, 0, 0.010752688172043012, 0, 0.030303030303030304 ]
16
0.006044
def CopyToDict(self): """Copies the path specification to a dictionary. Returns: dict[str, object]: path specification attributes. """ path_spec_dict = {} for attribute_name, attribute_value in iter(self.__dict__.items()): if attribute_value is None: continue if attribute_name == 'parent': attribute_value = attribute_value.CopyToDict() path_spec_dict[attribute_name] = attribute_value return path_spec_dict
[ "def", "CopyToDict", "(", "self", ")", ":", "path_spec_dict", "=", "{", "}", "for", "attribute_name", ",", "attribute_value", "in", "iter", "(", "self", ".", "__dict__", ".", "items", "(", ")", ")", ":", "if", "attribute_value", "is", "None", ":", "continue", "if", "attribute_name", "==", "'parent'", ":", "attribute_value", "=", "attribute_value", ".", "CopyToDict", "(", ")", "path_spec_dict", "[", "attribute_name", "]", "=", "attribute_value", "return", "path_spec_dict" ]
27.058824
0.008403
[ "def CopyToDict(self):\n", " \"\"\"Copies the path specification to a dictionary.\n", "\n", " Returns:\n", " dict[str, object]: path specification attributes.\n", " \"\"\"\n", " path_spec_dict = {}\n", " for attribute_name, attribute_value in iter(self.__dict__.items()):\n", " if attribute_value is None:\n", " continue\n", "\n", " if attribute_name == 'parent':\n", " attribute_value = attribute_value.CopyToDict()\n", "\n", " path_spec_dict[attribute_name] = attribute_value\n", "\n", " return path_spec_dict" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0, 0, 0.02702702702702703, 0, 0, 0.01818181818181818, 0, 0.04 ]
17
0.006742
def from_string(cls, s): """Create an istance from string s containing a YAML dictionary.""" stream = cStringIO(s) stream.seek(0) return cls(**yaml.safe_load(stream))
[ "def", "from_string", "(", "cls", ",", "s", ")", ":", "stream", "=", "cStringIO", "(", "s", ")", "stream", ".", "seek", "(", "0", ")", "return", "cls", "(", "*", "*", "yaml", ".", "safe_load", "(", "stream", ")", ")" ]
38.8
0.010101
[ "def from_string(cls, s):\n", " \"\"\"Create an istance from string s containing a YAML dictionary.\"\"\"\n", " stream = cStringIO(s)\n", " stream.seek(0)\n", " return cls(**yaml.safe_load(stream))" ]
[ 0, 0.013157894736842105, 0, 0, 0.022727272727272728 ]
5
0.007177
def read_config(config_path): """read config_path and return options as dictionary""" result = {} with open(config_path, 'r') as fd: for line in fd.readlines(): if '=' in line: key, value = line.split('=', 1) try: result[key] = json.loads(value) except ValueError: result[key] = value.rstrip('\n') return result
[ "def", "read_config", "(", "config_path", ")", ":", "result", "=", "{", "}", "with", "open", "(", "config_path", ",", "'r'", ")", "as", "fd", ":", "for", "line", "in", "fd", ".", "readlines", "(", ")", ":", "if", "'='", "in", "line", ":", "key", ",", "value", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "try", ":", "result", "[", "key", "]", "=", "json", ".", "loads", "(", "value", ")", "except", "ValueError", ":", "result", "[", "key", "]", "=", "value", ".", "rstrip", "(", "'\\n'", ")", "return", "result" ]
35.333333
0.002299
[ "def read_config(config_path):\n", " \"\"\"read config_path and return options as dictionary\"\"\"\n", " result = {}\n", " with open(config_path, 'r') as fd:\n", " for line in fd.readlines():\n", " if '=' in line:\n", " key, value = line.split('=', 1)\n", " try:\n", " result[key] = json.loads(value)\n", " except ValueError:\n", " result[key] = value.rstrip('\\n')\n", " return result" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705 ]
12
0.004902
def assign_hosting_device_to_cfg_agent(self, context, cfg_agent_id, hosting_device_id): """Make config agent handle an (unassigned) hosting device.""" hd_db = self._get_hosting_device(context, hosting_device_id) if hd_db.cfg_agent_id: if hd_db.cfg_agent_id == cfg_agent_id: return LOG.debug('Hosting device %(hd_id)s has already been assigned to ' 'Cisco cfg agent %(agent_id)s', {'hd_id': hosting_device_id, 'agent_id': cfg_agent_id}) raise ciscocfgagentscheduler.HostingDeviceAssignedToCfgAgent( hosting_device_id=hosting_device_id, agent_id=cfg_agent_id) cfg_agent_db = get_agent_db_obj(self._get_agent(context, cfg_agent_id)) if (cfg_agent_db.agent_type != c_constants.AGENT_TYPE_CFG or cfg_agent_db.admin_state_up is not True): raise ciscocfgagentscheduler.InvalidCfgAgent(agent_id=cfg_agent_id) self._bind_hosting_device_to_cfg_agent(context, hd_db, cfg_agent_db) cfg_notifier = self.agent_notifiers.get(c_constants.AGENT_TYPE_CFG) if cfg_notifier: cfg_notifier.hosting_devices_assigned_to_cfg_agent( context, [hosting_device_id], cfg_agent_db.host)
[ "def", "assign_hosting_device_to_cfg_agent", "(", "self", ",", "context", ",", "cfg_agent_id", ",", "hosting_device_id", ")", ":", "hd_db", "=", "self", ".", "_get_hosting_device", "(", "context", ",", "hosting_device_id", ")", "if", "hd_db", ".", "cfg_agent_id", ":", "if", "hd_db", ".", "cfg_agent_id", "==", "cfg_agent_id", ":", "return", "LOG", ".", "debug", "(", "'Hosting device %(hd_id)s has already been assigned to '", "'Cisco cfg agent %(agent_id)s'", ",", "{", "'hd_id'", ":", "hosting_device_id", ",", "'agent_id'", ":", "cfg_agent_id", "}", ")", "raise", "ciscocfgagentscheduler", ".", "HostingDeviceAssignedToCfgAgent", "(", "hosting_device_id", "=", "hosting_device_id", ",", "agent_id", "=", "cfg_agent_id", ")", "cfg_agent_db", "=", "get_agent_db_obj", "(", "self", ".", "_get_agent", "(", "context", ",", "cfg_agent_id", ")", ")", "if", "(", "cfg_agent_db", ".", "agent_type", "!=", "c_constants", ".", "AGENT_TYPE_CFG", "or", "cfg_agent_db", ".", "admin_state_up", "is", "not", "True", ")", ":", "raise", "ciscocfgagentscheduler", ".", "InvalidCfgAgent", "(", "agent_id", "=", "cfg_agent_id", ")", "self", ".", "_bind_hosting_device_to_cfg_agent", "(", "context", ",", "hd_db", ",", "cfg_agent_db", ")", "cfg_notifier", "=", "self", ".", "agent_notifiers", ".", "get", "(", "c_constants", ".", "AGENT_TYPE_CFG", ")", "if", "cfg_notifier", ":", "cfg_notifier", ".", "hosting_devices_assigned_to_cfg_agent", "(", "context", ",", "[", "hosting_device_id", "]", ",", "cfg_agent_db", ".", "host", ")" ]
62.380952
0.002256
[ "def assign_hosting_device_to_cfg_agent(self, context, cfg_agent_id,\n", " hosting_device_id):\n", " \"\"\"Make config agent handle an (unassigned) hosting device.\"\"\"\n", " hd_db = self._get_hosting_device(context, hosting_device_id)\n", " if hd_db.cfg_agent_id:\n", " if hd_db.cfg_agent_id == cfg_agent_id:\n", " return\n", " LOG.debug('Hosting device %(hd_id)s has already been assigned to '\n", " 'Cisco cfg agent %(agent_id)s',\n", " {'hd_id': hosting_device_id, 'agent_id': cfg_agent_id})\n", " raise ciscocfgagentscheduler.HostingDeviceAssignedToCfgAgent(\n", " hosting_device_id=hosting_device_id, agent_id=cfg_agent_id)\n", " cfg_agent_db = get_agent_db_obj(self._get_agent(context, cfg_agent_id))\n", " if (cfg_agent_db.agent_type != c_constants.AGENT_TYPE_CFG or\n", " cfg_agent_db.admin_state_up is not True):\n", " raise ciscocfgagentscheduler.InvalidCfgAgent(agent_id=cfg_agent_id)\n", " self._bind_hosting_device_to_cfg_agent(context, hd_db, cfg_agent_db)\n", " cfg_notifier = self.agent_notifiers.get(c_constants.AGENT_TYPE_CFG)\n", " if cfg_notifier:\n", " cfg_notifier.hosting_devices_assigned_to_cfg_agent(\n", " context, [hosting_device_id], cfg_agent_db.host)" ]
[ 0, 0.015873015873015872, 0.014084507042253521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015625 ]
21
0.002171
def _run_bubbletree(vcf_csv, cnv_csv, data, wide_lrr=False, do_plots=True, handle_failures=True): """Create R script and run on input data BubbleTree has some internal hardcoded paramters that assume a smaller distribution of log2 scores. This is not true for tumor-only calls, so if we specify wide_lrr we scale the calculations to actually get calls. Need a better long term solution with flexible parameters. """ lrr_scale = 10.0 if wide_lrr else 1.0 local_sitelib = utils.R_sitelib() base = utils.splitext_plus(vcf_csv)[0] r_file = "%s-run.R" % base bubbleplot_out = "%s-bubbleplot.pdf" % base trackplot_out = "%s-trackplot.pdf" % base calls_out = "%s-calls.rds" % base freqs_out = "%s-bubbletree_prevalence.txt" % base sample = dd.get_sample_name(data) do_plots = "yes" if do_plots else "no" with open(r_file, "w") as out_handle: out_handle.write(_script.format(**locals())) if not utils.file_exists(freqs_out): cmd = "%s && %s --no-environ %s" % (utils.get_R_exports(), utils.Rscript_cmd(), r_file) try: do.run(cmd, "Assess heterogeneity with BubbleTree") except subprocess.CalledProcessError as msg: if handle_failures and _allowed_bubbletree_errorstates(str(msg)): with open(freqs_out, "w") as out_handle: out_handle.write('bubbletree failed:\n %s"\n' % (str(msg))) else: logger.exception() raise return {"caller": "bubbletree", "report": freqs_out, "plot": {"bubble": bubbleplot_out, "track": trackplot_out}}
[ "def", "_run_bubbletree", "(", "vcf_csv", ",", "cnv_csv", ",", "data", ",", "wide_lrr", "=", "False", ",", "do_plots", "=", "True", ",", "handle_failures", "=", "True", ")", ":", "lrr_scale", "=", "10.0", "if", "wide_lrr", "else", "1.0", "local_sitelib", "=", "utils", ".", "R_sitelib", "(", ")", "base", "=", "utils", ".", "splitext_plus", "(", "vcf_csv", ")", "[", "0", "]", "r_file", "=", "\"%s-run.R\"", "%", "base", "bubbleplot_out", "=", "\"%s-bubbleplot.pdf\"", "%", "base", "trackplot_out", "=", "\"%s-trackplot.pdf\"", "%", "base", "calls_out", "=", "\"%s-calls.rds\"", "%", "base", "freqs_out", "=", "\"%s-bubbletree_prevalence.txt\"", "%", "base", "sample", "=", "dd", ".", "get_sample_name", "(", "data", ")", "do_plots", "=", "\"yes\"", "if", "do_plots", "else", "\"no\"", "with", "open", "(", "r_file", ",", "\"w\"", ")", "as", "out_handle", ":", "out_handle", ".", "write", "(", "_script", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ")", "if", "not", "utils", ".", "file_exists", "(", "freqs_out", ")", ":", "cmd", "=", "\"%s && %s --no-environ %s\"", "%", "(", "utils", ".", "get_R_exports", "(", ")", ",", "utils", ".", "Rscript_cmd", "(", ")", ",", "r_file", ")", "try", ":", "do", ".", "run", "(", "cmd", ",", "\"Assess heterogeneity with BubbleTree\"", ")", "except", "subprocess", ".", "CalledProcessError", "as", "msg", ":", "if", "handle_failures", "and", "_allowed_bubbletree_errorstates", "(", "str", "(", "msg", ")", ")", ":", "with", "open", "(", "freqs_out", ",", "\"w\"", ")", "as", "out_handle", ":", "out_handle", ".", "write", "(", "'bubbletree failed:\\n %s\"\\n'", "%", "(", "str", "(", "msg", ")", ")", ")", "else", ":", "logger", ".", "exception", "(", ")", "raise", "return", "{", "\"caller\"", ":", "\"bubbletree\"", ",", "\"report\"", ":", "freqs_out", ",", "\"plot\"", ":", "{", "\"bubble\"", ":", "bubbleplot_out", ",", "\"track\"", ":", "trackplot_out", "}", "}" ]
46.857143
0.001195
[ "def _run_bubbletree(vcf_csv, cnv_csv, data, wide_lrr=False, do_plots=True,\n", " handle_failures=True):\n", " \"\"\"Create R script and run on input data\n", "\n", " BubbleTree has some internal hardcoded paramters that assume a smaller\n", " distribution of log2 scores. This is not true for tumor-only calls, so if\n", " we specify wide_lrr we scale the calculations to actually get calls. Need a\n", " better long term solution with flexible parameters.\n", " \"\"\"\n", " lrr_scale = 10.0 if wide_lrr else 1.0\n", " local_sitelib = utils.R_sitelib()\n", " base = utils.splitext_plus(vcf_csv)[0]\n", " r_file = \"%s-run.R\" % base\n", " bubbleplot_out = \"%s-bubbleplot.pdf\" % base\n", " trackplot_out = \"%s-trackplot.pdf\" % base\n", " calls_out = \"%s-calls.rds\" % base\n", " freqs_out = \"%s-bubbletree_prevalence.txt\" % base\n", " sample = dd.get_sample_name(data)\n", " do_plots = \"yes\" if do_plots else \"no\"\n", " with open(r_file, \"w\") as out_handle:\n", " out_handle.write(_script.format(**locals()))\n", " if not utils.file_exists(freqs_out):\n", " cmd = \"%s && %s --no-environ %s\" % (utils.get_R_exports(), utils.Rscript_cmd(), r_file)\n", " try:\n", " do.run(cmd, \"Assess heterogeneity with BubbleTree\")\n", " except subprocess.CalledProcessError as msg:\n", " if handle_failures and _allowed_bubbletree_errorstates(str(msg)):\n", " with open(freqs_out, \"w\") as out_handle:\n", " out_handle.write('bubbletree failed:\\n %s\"\\n' % (str(msg)))\n", " else:\n", " logger.exception()\n", " raise\n", " return {\"caller\": \"bubbletree\",\n", " \"report\": freqs_out,\n", " \"plot\": {\"bubble\": bubbleplot_out, \"track\": trackplot_out}}" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014084507042253521 ]
35
0.0007
def plot_slippage_sweep(returns, positions, transactions, slippage_params=(3, 8, 10, 12, 15, 20, 50), ax=None, **kwargs): """ Plots equity curves at different per-dollar slippage assumptions. Parameters ---------- returns : pd.Series Timeseries of portfolio returns to be adjusted for various degrees of slippage. positions : pd.DataFrame Daily net position values. - See full explanation in tears.create_full_tear_sheet. transactions : pd.DataFrame Prices and amounts of executed trades. One row per trade. - See full explanation in tears.create_full_tear_sheet. slippage_params: tuple Slippage pameters to apply to the return time series (in basis points). ax : matplotlib.Axes, optional Axes upon which to plot. **kwargs, optional Passed to seaborn plotting function. Returns ------- ax : matplotlib.Axes The axes that were plotted on. """ if ax is None: ax = plt.gca() slippage_sweep = pd.DataFrame() for bps in slippage_params: adj_returns = txn.adjust_returns_for_slippage(returns, positions, transactions, bps) label = str(bps) + " bps" slippage_sweep[label] = ep.cum_returns(adj_returns, 1) slippage_sweep.plot(alpha=1.0, lw=0.5, ax=ax) ax.set_title('Cumulative returns given additional per-dollar slippage') ax.set_ylabel('') ax.legend(loc='center left', frameon=True, framealpha=0.5) return ax
[ "def", "plot_slippage_sweep", "(", "returns", ",", "positions", ",", "transactions", ",", "slippage_params", "=", "(", "3", ",", "8", ",", "10", ",", "12", ",", "15", ",", "20", ",", "50", ")", ",", "ax", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "ax", "is", "None", ":", "ax", "=", "plt", ".", "gca", "(", ")", "slippage_sweep", "=", "pd", ".", "DataFrame", "(", ")", "for", "bps", "in", "slippage_params", ":", "adj_returns", "=", "txn", ".", "adjust_returns_for_slippage", "(", "returns", ",", "positions", ",", "transactions", ",", "bps", ")", "label", "=", "str", "(", "bps", ")", "+", "\" bps\"", "slippage_sweep", "[", "label", "]", "=", "ep", ".", "cum_returns", "(", "adj_returns", ",", "1", ")", "slippage_sweep", ".", "plot", "(", "alpha", "=", "1.0", ",", "lw", "=", "0.5", ",", "ax", "=", "ax", ")", "ax", ".", "set_title", "(", "'Cumulative returns given additional per-dollar slippage'", ")", "ax", ".", "set_ylabel", "(", "''", ")", "ax", ".", "legend", "(", "loc", "=", "'center left'", ",", "frameon", "=", "True", ",", "framealpha", "=", "0.5", ")", "return", "ax" ]
32.142857
0.000616
[ "def plot_slippage_sweep(returns, positions, transactions,\n", " slippage_params=(3, 8, 10, 12, 15, 20, 50),\n", " ax=None, **kwargs):\n", " \"\"\"\n", " Plots equity curves at different per-dollar slippage assumptions.\n", "\n", " Parameters\n", " ----------\n", " returns : pd.Series\n", " Timeseries of portfolio returns to be adjusted for various\n", " degrees of slippage.\n", " positions : pd.DataFrame\n", " Daily net position values.\n", " - See full explanation in tears.create_full_tear_sheet.\n", " transactions : pd.DataFrame\n", " Prices and amounts of executed trades. One row per trade.\n", " - See full explanation in tears.create_full_tear_sheet.\n", " slippage_params: tuple\n", " Slippage pameters to apply to the return time series (in\n", " basis points).\n", " ax : matplotlib.Axes, optional\n", " Axes upon which to plot.\n", " **kwargs, optional\n", " Passed to seaborn plotting function.\n", "\n", " Returns\n", " -------\n", " ax : matplotlib.Axes\n", " The axes that were plotted on.\n", " \"\"\"\n", "\n", " if ax is None:\n", " ax = plt.gca()\n", "\n", " slippage_sweep = pd.DataFrame()\n", " for bps in slippage_params:\n", " adj_returns = txn.adjust_returns_for_slippage(returns, positions,\n", " transactions, bps)\n", " label = str(bps) + \" bps\"\n", " slippage_sweep[label] = ep.cum_returns(adj_returns, 1)\n", "\n", " slippage_sweep.plot(alpha=1.0, lw=0.5, ax=ax)\n", "\n", " ax.set_title('Cumulative returns given additional per-dollar slippage')\n", " ax.set_ylabel('')\n", "\n", " ax.legend(loc='center left', frameon=True, framealpha=0.5)\n", "\n", " return ax" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693 ]
49
0.00157