text
stringlengths
75
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
0.18
texts
sequence
scores
sequence
num_lines
int64
3
2.77k
avg_score
float64
0
0.37
def add_item(self,item): """Add an item to `self`. :Parameters: - `item`: the item to add. :Types: - `item`: `MucItemBase` """ if not isinstance(item,MucItemBase): raise TypeError("Bad item type for muc#user") item.as_xml(self.xmlnode)
[ "def", "add_item", "(", "self", ",", "item", ")", ":", "if", "not", "isinstance", "(", "item", ",", "MucItemBase", ")", ":", "raise", "TypeError", "(", "\"Bad item type for muc#user\"", ")", "item", ".", "as_xml", "(", "self", ".", "xmlnode", ")" ]
28.181818
0.0125
[ "def add_item(self,item):\n", " \"\"\"Add an item to `self`.\n", "\n", " :Parameters:\n", " - `item`: the item to add.\n", " :Types:\n", " - `item`: `MucItemBase`\n", " \"\"\"\n", " if not isinstance(item,MucItemBase):\n", " raise TypeError(\"Bad item type for muc#user\")\n", " item.as_xml(self.xmlnode)" ]
[ 0.04, 0.029411764705882353, 0, 0, 0, 0, 0, 0, 0.022222222222222223, 0, 0.030303030303030304 ]
11
0.011085
def Nu_vertical_plate_Churchill(Pr, Gr): r'''Calculates Nusselt number for natural convection around a vertical plate according to the Churchill-Chu [1]_ correlation, also presented in [2]_. Plate must be isothermal; an alternate expression exists for constant heat flux. .. math:: Nu_{L}=\left[0.825+\frac{0.387Ra_{L}^{1/6}} {[1+(0.492/Pr)^{9/16}]^{8/27}}\right]^2 Parameters ---------- Pr : float Prandtl number [-] Gr : float Grashof number [-] Returns ------- Nu : float Nusselt number, [-] Notes ----- Although transition from laminar to turbulent is discrete in reality, this equation provides a smooth transition in value from laminar to turbulent. Checked with the original source. Can be applied to vertical cylinders as well, subject to the criteria below: .. math:: \frac{D}{L}\ge \frac{35}{Gr_L^{1/4}} Examples -------- From [2]_, Example 9.2, matches: >>> Nu_vertical_plate_Churchill(0.69, 2.63E9) 147.16185223770603 References ---------- .. [1] Churchill, Stuart W., and Humbert H. S. Chu. "Correlating Equations for Laminar and Turbulent Free Convection from a Vertical Plate." International Journal of Heat and Mass Transfer 18, no. 11 (November 1, 1975): 1323-29. doi:10.1016/0017-9310(75)90243-4. .. [2] Bergman, Theodore L., Adrienne S. Lavine, Frank P. Incropera, and David P. DeWitt. Introduction to Heat Transfer. 6E. Hoboken, NJ: Wiley, 2011. ''' Ra = Pr * Gr Nu = (0.825 + (0.387*Ra**(1/6.)/(1 + (0.492/Pr)**(9/16.))**(8/27.)))**2 return Nu
[ "def", "Nu_vertical_plate_Churchill", "(", "Pr", ",", "Gr", ")", ":", "Ra", "=", "Pr", "*", "Gr", "Nu", "=", "(", "0.825", "+", "(", "0.387", "*", "Ra", "**", "(", "1", "/", "6.", ")", "/", "(", "1", "+", "(", "0.492", "/", "Pr", ")", "**", "(", "9", "/", "16.", ")", ")", "**", "(", "8", "/", "27.", ")", ")", ")", "**", "2", "return", "Nu" ]
30.792453
0.001188
[ "def Nu_vertical_plate_Churchill(Pr, Gr):\n", " r'''Calculates Nusselt number for natural convection around a vertical\n", " plate according to the Churchill-Chu [1]_ correlation, also presented in\n", " [2]_. Plate must be isothermal; an alternate expression exists for constant\n", " heat flux.\n", "\n", " .. math::\n", " Nu_{L}=\\left[0.825+\\frac{0.387Ra_{L}^{1/6}}\n", " {[1+(0.492/Pr)^{9/16}]^{8/27}}\\right]^2\n", "\n", " Parameters\n", " ----------\n", " Pr : float\n", " Prandtl number [-]\n", " Gr : float\n", " Grashof number [-]\n", "\n", " Returns\n", " -------\n", " Nu : float\n", " Nusselt number, [-]\n", "\n", " Notes\n", " -----\n", " Although transition from laminar to turbulent is discrete in reality, this\n", " equation provides a smooth transition in value from laminar to turbulent.\n", " Checked with the original source.\n", "\n", " Can be applied to vertical cylinders as well, subject to the criteria below:\n", "\n", " .. math::\n", " \\frac{D}{L}\\ge \\frac{35}{Gr_L^{1/4}}\n", "\n", " Examples\n", " --------\n", " From [2]_, Example 9.2, matches:\n", "\n", " >>> Nu_vertical_plate_Churchill(0.69, 2.63E9)\n", " 147.16185223770603\n", "\n", " References\n", " ----------\n", " .. [1] Churchill, Stuart W., and Humbert H. S. Chu. \"Correlating Equations\n", " for Laminar and Turbulent Free Convection from a Vertical Plate.\"\n", " International Journal of Heat and Mass Transfer 18, no. 11\n", " (November 1, 1975): 1323-29. doi:10.1016/0017-9310(75)90243-4.\n", " .. [2] Bergman, Theodore L., Adrienne S. Lavine, Frank P. Incropera, and\n", " David P. DeWitt. Introduction to Heat Transfer. 6E. Hoboken, NJ:\n", " Wiley, 2011.\n", " '''\n", " Ra = Pr * Gr\n", " Nu = (0.825 + (0.387*Ra**(1/6.)/(1 + (0.492/Pr)**(9/16.))**(8/27.)))**2\n", " return Nu" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693 ]
53
0.001684
def control_group(action, action_space, control_group_act, control_group_id): """Act on a control group, selecting, setting, etc.""" del action_space select = action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id
[ "def", "control_group", "(", "action", ",", "action_space", ",", "control_group_act", ",", "control_group_id", ")", ":", "del", "action_space", "select", "=", "action", ".", "action_ui", ".", "control_group", "select", ".", "action", "=", "control_group_act", "select", ".", "control_group_index", "=", "control_group_id" ]
45.666667
0.021505
[ "def control_group(action, action_space, control_group_act, control_group_id):\n", " \"\"\"Act on a control group, selecting, setting, etc.\"\"\"\n", " del action_space\n", " select = action.action_ui.control_group\n", " select.action = control_group_act\n", " select.control_group_index = control_group_id" ]
[ 0, 0.017543859649122806, 0.05263157894736842, 0.023809523809523808, 0.027777777777777776, 0.0425531914893617 ]
6
0.027386
def create_process_worker(self, cmd_list, environ=None): """Create a new process worker instance.""" worker = ProcessWorker(cmd_list, environ=environ) self._create_worker(worker) return worker
[ "def", "create_process_worker", "(", "self", ",", "cmd_list", ",", "environ", "=", "None", ")", ":", "worker", "=", "ProcessWorker", "(", "cmd_list", ",", "environ", "=", "environ", ")", "self", ".", "_create_worker", "(", "worker", ")", "return", "worker" ]
44
0.008929
[ "def create_process_worker(self, cmd_list, environ=None):\n", " \"\"\"Create a new process worker instance.\"\"\"\n", " worker = ProcessWorker(cmd_list, environ=environ)\n", " self._create_worker(worker)\n", " return worker" ]
[ 0, 0.019230769230769232, 0, 0, 0.047619047619047616 ]
5
0.01337
def parse_atom(tokens, options): """atom ::= '(' expr ')' | '[' expr ']' | 'options' | long | shorts | argument | command ; """ token = tokens.current() result = [] if token == '(': tokens.move() result = [Required(*parse_expr(tokens, options))] if tokens.move() != ')': raise tokens.error("Unmatched '('") return result elif token == '[': tokens.move() result = [Optional(*parse_expr(tokens, options))] if tokens.move() != ']': raise tokens.error("Unmatched '['") return result elif token == 'options': tokens.move() return [AnyOptions()] elif token.startswith('--') and token != '--': return parse_long(tokens, options) elif token.startswith('-') and token not in ('-', '--'): return parse_shorts(tokens, options) elif token.startswith('<') and token.endswith('>') or token.isupper(): return [Argument(tokens.move())] else: return [Command(tokens.move())]
[ "def", "parse_atom", "(", "tokens", ",", "options", ")", ":", "token", "=", "tokens", ".", "current", "(", ")", "result", "=", "[", "]", "if", "token", "==", "'('", ":", "tokens", ".", "move", "(", ")", "result", "=", "[", "Required", "(", "*", "parse_expr", "(", "tokens", ",", "options", ")", ")", "]", "if", "tokens", ".", "move", "(", ")", "!=", "')'", ":", "raise", "tokens", ".", "error", "(", "\"Unmatched '('\"", ")", "return", "result", "elif", "token", "==", "'['", ":", "tokens", ".", "move", "(", ")", "result", "=", "[", "Optional", "(", "*", "parse_expr", "(", "tokens", ",", "options", ")", ")", "]", "if", "tokens", ".", "move", "(", ")", "!=", "']'", ":", "raise", "tokens", ".", "error", "(", "\"Unmatched '['\"", ")", "return", "result", "elif", "token", "==", "'options'", ":", "tokens", ".", "move", "(", ")", "return", "[", "AnyOptions", "(", ")", "]", "elif", "token", ".", "startswith", "(", "'--'", ")", "and", "token", "!=", "'--'", ":", "return", "parse_long", "(", "tokens", ",", "options", ")", "elif", "token", ".", "startswith", "(", "'-'", ")", "and", "token", "not", "in", "(", "'-'", ",", "'--'", ")", ":", "return", "parse_shorts", "(", "tokens", ",", "options", ")", "elif", "token", ".", "startswith", "(", "'<'", ")", "and", "token", ".", "endswith", "(", "'>'", ")", "or", "token", ".", "isupper", "(", ")", ":", "return", "[", "Argument", "(", "tokens", ".", "move", "(", ")", ")", "]", "else", ":", "return", "[", "Command", "(", "tokens", ".", "move", "(", ")", ")", "]" ]
35.241379
0.000952
[ "def parse_atom(tokens, options):\n", " \"\"\"atom ::= '(' expr ')' | '[' expr ']' | 'options'\n", " | long | shorts | argument | command ;\n", " \"\"\"\n", " token = tokens.current()\n", " result = []\n", " if token == '(':\n", " tokens.move()\n", " result = [Required(*parse_expr(tokens, options))]\n", " if tokens.move() != ')':\n", " raise tokens.error(\"Unmatched '('\")\n", " return result\n", " elif token == '[':\n", " tokens.move()\n", " result = [Optional(*parse_expr(tokens, options))]\n", " if tokens.move() != ']':\n", " raise tokens.error(\"Unmatched '['\")\n", " return result\n", " elif token == 'options':\n", " tokens.move()\n", " return [AnyOptions()]\n", " elif token.startswith('--') and token != '--':\n", " return parse_long(tokens, options)\n", " elif token.startswith('-') and token not in ('-', '--'):\n", " return parse_shorts(tokens, options)\n", " elif token.startswith('<') and token.endswith('>') or token.isupper():\n", " return [Argument(tokens.move())]\n", " else:\n", " return [Command(tokens.move())]" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02564102564102564 ]
29
0.000884
def _read_mac_addr(self): """Read MAC address.""" _byte = self._read_fileng(6) _addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) return _addr
[ "def", "_read_mac_addr", "(", "self", ")", ":", "_byte", "=", "self", ".", "_read_fileng", "(", "6", ")", "_addr", "=", "'-'", ".", "join", "(", "textwrap", ".", "wrap", "(", "_byte", ".", "hex", "(", ")", ",", "2", ")", ")", "return", "_addr" ]
33.4
0.011696
[ "def _read_mac_addr(self):\n", " \"\"\"Read MAC address.\"\"\"\n", " _byte = self._read_fileng(6)\n", " _addr = '-'.join(textwrap.wrap(_byte.hex(), 2))\n", " return _addr" ]
[ 0, 0.03125, 0, 0, 0.05 ]
5
0.01625
def subj_pred_idx_to_uri(s: URIRef, p: URIRef, idx: Optional[int] = None) -> URIRef: """ Convert FHIR subject, predicate and entry index into a URI. The resulting element can be substituted for the name of the target BNODE :param s: Subject URI (e.g. "fhir:Patient/f201", "fhir:Patient/f201.Patient.identifier_0", ...) :param p: Predicate URI (e.g. "fhir:Patient.identifier", "fhir.Identifier.use :param idx: Relative position of BNODE if in a list :return: URI that can replace the BNODE (e.g. "fhir:Patient/f201 """ return URIRef(str(s) + '.' + str(p).rsplit('/', 1)[1] + ("_{}".format(idx) if idx is not None else ''))
[ "def", "subj_pred_idx_to_uri", "(", "s", ":", "URIRef", ",", "p", ":", "URIRef", ",", "idx", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "URIRef", ":", "return", "URIRef", "(", "str", "(", "s", ")", "+", "'.'", "+", "str", "(", "p", ")", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "1", "]", "+", "(", "\"_{}\"", ".", "format", "(", "idx", ")", "if", "idx", "is", "not", "None", "else", "''", ")", ")" ]
71.777778
0.009174
[ "def subj_pred_idx_to_uri(s: URIRef, p: URIRef, idx: Optional[int] = None) -> URIRef:\n", " \"\"\" Convert FHIR subject, predicate and entry index into a URI. The resulting element can be substituted\n", " for the name of the target BNODE\n", " :param s: Subject URI (e.g. \"fhir:Patient/f201\", \"fhir:Patient/f201.Patient.identifier_0\", ...)\n", " :param p: Predicate URI (e.g. \"fhir:Patient.identifier\", \"fhir.Identifier.use\n", " :param idx: Relative position of BNODE if in a list\n", " :return: URI that can replace the BNODE (e.g. \"fhir:Patient/f201\n", " \"\"\"\n", " return URIRef(str(s) + '.' + str(p).rsplit('/', 1)[1] + (\"_{}\".format(idx) if idx is not None else ''))" ]
[ 0.011764705882352941, 0.00909090909090909, 0, 0.01, 0.012195121951219513, 0, 0, 0, 0.018691588785046728 ]
9
0.00686
def ystep(self): r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{y}`. """ self.Y = self.Pcn(self.AX + self.U)
[ "def", "ystep", "(", "self", ")", ":", "self", ".", "Y", "=", "self", ".", "Pcn", "(", "self", ".", "AX", "+", "self", ".", "U", ")" ]
25.666667
0.012579
[ "def ystep(self):\n", " r\"\"\"Minimise Augmented Lagrangian with respect to\n", " :math:`\\mathbf{y}`.\n", " \"\"\"\n", "\n", " self.Y = self.Pcn(self.AX + self.U)" ]
[ 0, 0.017241379310344827, 0, 0, 0, 0.023255813953488372 ]
6
0.00675
def main(arguments=None): """ *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command* """ # setup the command-line util settings su = tools( arguments=arguments, docString=__doc__, logLevel="DEBUG", options_first=False, projectName="picaxe" ) arguments, settings, log, dbConn = su.setup() startTime = times.get_now_sql_datetime() # unpack remaining cl arguments using `exec` to setup the variable names # automatically for arg, val in arguments.iteritems(): if arg[0] == "-": varname = arg.replace("-", "") + "Flag" else: varname = arg.replace("<", "").replace(">", "") if isinstance(val, str) or isinstance(val, unicode): exec(varname + " = '%s'" % (val,)) else: exec(varname + " = %s" % (val,)) if arg == "--dbConn": dbConn = val log.debug('%s = %s' % (varname, val,)) if init: from os.path import expanduser home = expanduser("~") filepath = home + "/.config/picaxe/picaxe.yaml" try: cmd = """open %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass try: cmd = """start %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass if auth: from picaxe import picaxe client = picaxe( log=log, settings=settings, pathToSettingsFile=pathToSettingsFile ) client.authenticate() if md: from picaxe import picaxe Flickr = picaxe( log=log, settings=settings ) if not width: width = "original" mdLink = Flickr.md( url=urlOrPhotoid, # [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048] width=width ) print mdLink if albums: from picaxe import picaxe flickr = picaxe( log=log, settings=settings ) albumList = flickr.list_album_titles() for a in albumList: print a if upload: from picaxe import picaxe flickr = picaxe( log=log, settings=settings ) imageType = "photo" if screenGrabFlag: imageType = "screengrab" elif imageFlag: imageType = "image" album = "inbox" if albumFlag: album = albumFlag photoid = flickr.upload( imagePath=imagePath, title=titleFlag, private=publicFlag, tags=tagsFlag, description=descFlag, imageType=imageType, # image|screengrab|photo album=albumFlag, openInBrowser=openFlag ) print photoid if grab: # for k, v in locals().iteritems(): # print k, v # return try: os.remove("/tmp/screengrab.png") except: pass if delayFlag: time.sleep(int(delayFlag)) from subprocess import Popen, PIPE, STDOUT cmd = """screencapture -i /tmp/screengrab.png""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = p.communicate() log.debug('output: %(stdout)s' % locals()) exists = os.path.exists("/tmp/screengrab.png") if exists: from picaxe import picaxe flickr = picaxe( log=log, settings=settings ) if not albumFlag: albumFlag = "screengrabs" photoid = flickr.upload( imagePath="/tmp/screengrab.png", title=titleFlag, private=publicFlag, tags=tagsFlag, description=descFlag, imageType="screengrab", # image|screengrab|photo album=albumFlag, openInBrowser=openFlag ) mdLink = flickr.md( url=photoid, # [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048] width="original" ) print mdLink # CALL FUNCTIONS/OBJECTS if "dbConn" in locals() and dbConn: dbConn.commit() dbConn.close() ## FINISH LOGGING ## endTime = times.get_now_sql_datetime() runningTime = times.calculate_time_difference(startTime, endTime) log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (endTime, runningTime, )) return
[ "def", "main", "(", "arguments", "=", "None", ")", ":", "# setup the command-line util settings", "su", "=", "tools", "(", "arguments", "=", "arguments", ",", "docString", "=", "__doc__", ",", "logLevel", "=", "\"DEBUG\"", ",", "options_first", "=", "False", ",", "projectName", "=", "\"picaxe\"", ")", "arguments", ",", "settings", ",", "log", ",", "dbConn", "=", "su", ".", "setup", "(", ")", "startTime", "=", "times", ".", "get_now_sql_datetime", "(", ")", "# unpack remaining cl arguments using `exec` to setup the variable names", "# automatically", "for", "arg", ",", "val", "in", "arguments", ".", "iteritems", "(", ")", ":", "if", "arg", "[", "0", "]", "==", "\"-\"", ":", "varname", "=", "arg", ".", "replace", "(", "\"-\"", ",", "\"\"", ")", "+", "\"Flag\"", "else", ":", "varname", "=", "arg", ".", "replace", "(", "\"<\"", ",", "\"\"", ")", ".", "replace", "(", "\">\"", ",", "\"\"", ")", "if", "isinstance", "(", "val", ",", "str", ")", "or", "isinstance", "(", "val", ",", "unicode", ")", ":", "exec", "(", "varname", "+", "\" = '%s'\"", "%", "(", "val", ",", ")", ")", "else", ":", "exec", "(", "varname", "+", "\" = %s\"", "%", "(", "val", ",", ")", ")", "if", "arg", "==", "\"--dbConn\"", ":", "dbConn", "=", "val", "log", ".", "debug", "(", "'%s = %s'", "%", "(", "varname", ",", "val", ",", ")", ")", "if", "init", ":", "from", "os", ".", "path", "import", "expanduser", "home", "=", "expanduser", "(", "\"~\"", ")", "filepath", "=", "home", "+", "\"/.config/picaxe/picaxe.yaml\"", "try", ":", "cmd", "=", "\"\"\"open %(filepath)s\"\"\"", "%", "locals", "(", ")", "p", "=", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "True", ")", "except", ":", "pass", "try", ":", "cmd", "=", "\"\"\"start %(filepath)s\"\"\"", "%", "locals", "(", ")", "p", "=", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "True", ")", "except", ":", "pass", "if", "auth", ":", "from", "picaxe", "import", "picaxe", "client", "=", "picaxe", "(", "log", "=", "log", ",", "settings", "=", "settings", ",", "pathToSettingsFile", "=", "pathToSettingsFile", ")", "client", ".", "authenticate", "(", ")", "if", "md", ":", "from", "picaxe", "import", "picaxe", "Flickr", "=", "picaxe", "(", "log", "=", "log", ",", "settings", "=", "settings", ")", "if", "not", "width", ":", "width", "=", "\"original\"", "mdLink", "=", "Flickr", ".", "md", "(", "url", "=", "urlOrPhotoid", ",", "# [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048]", "width", "=", "width", ")", "print", "mdLink", "if", "albums", ":", "from", "picaxe", "import", "picaxe", "flickr", "=", "picaxe", "(", "log", "=", "log", ",", "settings", "=", "settings", ")", "albumList", "=", "flickr", ".", "list_album_titles", "(", ")", "for", "a", "in", "albumList", ":", "print", "a", "if", "upload", ":", "from", "picaxe", "import", "picaxe", "flickr", "=", "picaxe", "(", "log", "=", "log", ",", "settings", "=", "settings", ")", "imageType", "=", "\"photo\"", "if", "screenGrabFlag", ":", "imageType", "=", "\"screengrab\"", "elif", "imageFlag", ":", "imageType", "=", "\"image\"", "album", "=", "\"inbox\"", "if", "albumFlag", ":", "album", "=", "albumFlag", "photoid", "=", "flickr", ".", "upload", "(", "imagePath", "=", "imagePath", ",", "title", "=", "titleFlag", ",", "private", "=", "publicFlag", ",", "tags", "=", "tagsFlag", ",", "description", "=", "descFlag", ",", "imageType", "=", "imageType", ",", "# image|screengrab|photo", "album", "=", "albumFlag", ",", "openInBrowser", "=", "openFlag", ")", "print", "photoid", "if", "grab", ":", "# for k, v in locals().iteritems():", "# print k, v", "# return", "try", ":", "os", ".", "remove", "(", "\"/tmp/screengrab.png\"", ")", "except", ":", "pass", "if", "delayFlag", ":", "time", ".", "sleep", "(", "int", "(", "delayFlag", ")", ")", "from", "subprocess", "import", "Popen", ",", "PIPE", ",", "STDOUT", "cmd", "=", "\"\"\"screencapture -i /tmp/screengrab.png\"\"\"", "%", "locals", "(", ")", "p", "=", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "True", ")", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "log", ".", "debug", "(", "'output: %(stdout)s'", "%", "locals", "(", ")", ")", "exists", "=", "os", ".", "path", ".", "exists", "(", "\"/tmp/screengrab.png\"", ")", "if", "exists", ":", "from", "picaxe", "import", "picaxe", "flickr", "=", "picaxe", "(", "log", "=", "log", ",", "settings", "=", "settings", ")", "if", "not", "albumFlag", ":", "albumFlag", "=", "\"screengrabs\"", "photoid", "=", "flickr", ".", "upload", "(", "imagePath", "=", "\"/tmp/screengrab.png\"", ",", "title", "=", "titleFlag", ",", "private", "=", "publicFlag", ",", "tags", "=", "tagsFlag", ",", "description", "=", "descFlag", ",", "imageType", "=", "\"screengrab\"", ",", "# image|screengrab|photo", "album", "=", "albumFlag", ",", "openInBrowser", "=", "openFlag", ")", "mdLink", "=", "flickr", ".", "md", "(", "url", "=", "photoid", ",", "# [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048]", "width", "=", "\"original\"", ")", "print", "mdLink", "# CALL FUNCTIONS/OBJECTS", "if", "\"dbConn\"", "in", "locals", "(", ")", "and", "dbConn", ":", "dbConn", ".", "commit", "(", ")", "dbConn", ".", "close", "(", ")", "## FINISH LOGGING ##", "endTime", "=", "times", ".", "get_now_sql_datetime", "(", ")", "runningTime", "=", "times", ".", "calculate_time_difference", "(", "startTime", ",", "endTime", ")", "log", ".", "info", "(", "'-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --'", "%", "(", "endTime", ",", "runningTime", ",", ")", ")", "return" ]
27.467456
0.001455
[ "def main(arguments=None):\n", " \"\"\"\n", " *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*\n", " \"\"\"\n", " # setup the command-line util settings\n", " su = tools(\n", " arguments=arguments,\n", " docString=__doc__,\n", " logLevel=\"DEBUG\",\n", " options_first=False,\n", " projectName=\"picaxe\"\n", " )\n", " arguments, settings, log, dbConn = su.setup()\n", "\n", " startTime = times.get_now_sql_datetime()\n", "\n", " # unpack remaining cl arguments using `exec` to setup the variable names\n", " # automatically\n", " for arg, val in arguments.iteritems():\n", " if arg[0] == \"-\":\n", " varname = arg.replace(\"-\", \"\") + \"Flag\"\n", " else:\n", " varname = arg.replace(\"<\", \"\").replace(\">\", \"\")\n", " if isinstance(val, str) or isinstance(val, unicode):\n", " exec(varname + \" = '%s'\" % (val,))\n", " else:\n", " exec(varname + \" = %s\" % (val,))\n", " if arg == \"--dbConn\":\n", " dbConn = val\n", " log.debug('%s = %s' % (varname, val,))\n", "\n", " if init:\n", " from os.path import expanduser\n", " home = expanduser(\"~\")\n", " filepath = home + \"/.config/picaxe/picaxe.yaml\"\n", " try:\n", " cmd = \"\"\"open %(filepath)s\"\"\" % locals()\n", " p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)\n", " except:\n", " pass\n", " try:\n", " cmd = \"\"\"start %(filepath)s\"\"\" % locals()\n", " p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)\n", " except:\n", " pass\n", "\n", " if auth:\n", " from picaxe import picaxe\n", " client = picaxe(\n", " log=log,\n", " settings=settings,\n", " pathToSettingsFile=pathToSettingsFile\n", " )\n", " client.authenticate()\n", "\n", " if md:\n", " from picaxe import picaxe\n", " Flickr = picaxe(\n", " log=log,\n", " settings=settings\n", " )\n", " if not width:\n", " width = \"original\"\n", " mdLink = Flickr.md(\n", " url=urlOrPhotoid,\n", " # [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048]\n", " width=width\n", " )\n", " print mdLink\n", "\n", " if albums:\n", " from picaxe import picaxe\n", " flickr = picaxe(\n", " log=log,\n", " settings=settings\n", " )\n", " albumList = flickr.list_album_titles()\n", " for a in albumList:\n", " print a\n", "\n", " if upload:\n", " from picaxe import picaxe\n", " flickr = picaxe(\n", " log=log,\n", " settings=settings\n", " )\n", "\n", " imageType = \"photo\"\n", " if screenGrabFlag:\n", " imageType = \"screengrab\"\n", " elif imageFlag:\n", " imageType = \"image\"\n", "\n", " album = \"inbox\"\n", " if albumFlag:\n", " album = albumFlag\n", "\n", " photoid = flickr.upload(\n", " imagePath=imagePath,\n", " title=titleFlag,\n", " private=publicFlag,\n", " tags=tagsFlag,\n", " description=descFlag,\n", " imageType=imageType, # image|screengrab|photo\n", " album=albumFlag,\n", " openInBrowser=openFlag\n", " )\n", " print photoid\n", "\n", " if grab:\n", "\n", " # for k, v in locals().iteritems():\n", " # print k, v\n", " # return\n", " try:\n", " os.remove(\"/tmp/screengrab.png\")\n", " except:\n", " pass\n", "\n", " if delayFlag:\n", "\n", " time.sleep(int(delayFlag))\n", "\n", " from subprocess import Popen, PIPE, STDOUT\n", " cmd = \"\"\"screencapture -i /tmp/screengrab.png\"\"\" % locals()\n", " p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)\n", " stdout, stderr = p.communicate()\n", " log.debug('output: %(stdout)s' % locals())\n", "\n", " exists = os.path.exists(\"/tmp/screengrab.png\")\n", " if exists:\n", " from picaxe import picaxe\n", " flickr = picaxe(\n", " log=log,\n", " settings=settings\n", " )\n", "\n", " if not albumFlag:\n", " albumFlag = \"screengrabs\"\n", "\n", " photoid = flickr.upload(\n", " imagePath=\"/tmp/screengrab.png\",\n", " title=titleFlag,\n", " private=publicFlag,\n", " tags=tagsFlag,\n", " description=descFlag,\n", " imageType=\"screengrab\", # image|screengrab|photo\n", " album=albumFlag,\n", " openInBrowser=openFlag\n", " )\n", " mdLink = flickr.md(\n", " url=photoid,\n", " # [75, 100, 150, 240, 320, 500, 640, 800, 1024, 1600, 2048]\n", " width=\"original\"\n", " )\n", " print mdLink\n", "\n", " # CALL FUNCTIONS/OBJECTS\n", "\n", " if \"dbConn\" in locals() and dbConn:\n", " dbConn.commit()\n", " dbConn.close()\n", " ## FINISH LOGGING ##\n", " endTime = times.get_now_sql_datetime()\n", " runningTime = times.calculate_time_difference(startTime, endTime)\n", " log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %\n", " (endTime, runningTime, ))\n", "\n", " return" ]
[ 0, 0, 0.008130081300813009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0.012048192771084338, 0, 0, 0.1 ]
169
0.002057
def inspect_node(node): """ This function accept a `elasticluster.cluster.Node` class, connects to a node and tries to discover the kind of batch system installed, and some other information. """ node_information = {} ssh = node.connect() if not ssh: log.error("Unable to connect to node %s", node.name) return (_in, _out, _err) = ssh.exec_command("(type >& /dev/null -a srun && echo slurm) \ || (type >& /dev/null -a qconf && echo sge) \ || (type >& /dev/null -a pbsnodes && echo pbs) \ || echo UNKNOWN") node_information['type'] = _out.read().strip() (_in, _out, _err) = ssh.exec_command("arch") node_information['architecture'] = _out.read().strip() if node_information['type'] == 'slurm': inspect_slurm_cluster(ssh, node_information) elif node_information['type'] == 'sge': inspect_sge_cluster(ssh, node_information) ssh.close() return node_information
[ "def", "inspect_node", "(", "node", ")", ":", "node_information", "=", "{", "}", "ssh", "=", "node", ".", "connect", "(", ")", "if", "not", "ssh", ":", "log", ".", "error", "(", "\"Unable to connect to node %s\"", ",", "node", ".", "name", ")", "return", "(", "_in", ",", "_out", ",", "_err", ")", "=", "ssh", ".", "exec_command", "(", "\"(type >& /dev/null -a srun && echo slurm) \\\n || (type >& /dev/null -a qconf && echo sge) \\\n || (type >& /dev/null -a pbsnodes && echo pbs) \\\n || echo UNKNOWN\"", ")", "node_information", "[", "'type'", "]", "=", "_out", ".", "read", "(", ")", ".", "strip", "(", ")", "(", "_in", ",", "_out", ",", "_err", ")", "=", "ssh", ".", "exec_command", "(", "\"arch\"", ")", "node_information", "[", "'architecture'", "]", "=", "_out", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "node_information", "[", "'type'", "]", "==", "'slurm'", ":", "inspect_slurm_cluster", "(", "ssh", ",", "node_information", ")", "elif", "node_information", "[", "'type'", "]", "==", "'sge'", ":", "inspect_sge_cluster", "(", "ssh", ",", "node_information", ")", "ssh", ".", "close", "(", ")", "return", "node_information" ]
36.851852
0.001959
[ "def inspect_node(node):\n", " \"\"\"\n", " This function accept a `elasticluster.cluster.Node` class,\n", " connects to a node and tries to discover the kind of batch system\n", " installed, and some other information.\n", " \"\"\"\n", " node_information = {}\n", " ssh = node.connect()\n", " if not ssh:\n", " log.error(\"Unable to connect to node %s\", node.name)\n", " return\n", "\n", " (_in, _out, _err) = ssh.exec_command(\"(type >& /dev/null -a srun && echo slurm) \\\n", " || (type >& /dev/null -a qconf && echo sge) \\\n", " || (type >& /dev/null -a pbsnodes && echo pbs) \\\n", " || echo UNKNOWN\")\n", " node_information['type'] = _out.read().strip()\n", "\n", " (_in, _out, _err) = ssh.exec_command(\"arch\")\n", " node_information['architecture'] = _out.read().strip()\n", "\n", " if node_information['type'] == 'slurm':\n", " inspect_slurm_cluster(ssh, node_information)\n", " elif node_information['type'] == 'sge':\n", " inspect_sge_cluster(ssh, node_information)\n", " ssh.close()\n", " return node_information" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035 ]
27
0.001802
def set_global_defaults(**kwargs): """Set global defaults for the options passed to the icon painter.""" valid_options = [ 'active', 'selected', 'disabled', 'on', 'off', 'on_active', 'on_selected', 'on_disabled', 'off_active', 'off_selected', 'off_disabled', 'color', 'color_on', 'color_off', 'color_active', 'color_selected', 'color_disabled', 'color_on_selected', 'color_on_active', 'color_on_disabled', 'color_off_selected', 'color_off_active', 'color_off_disabled', 'animation', 'offset', 'scale_factor', ] for kw in kwargs: if kw in valid_options: _default_options[kw] = kwargs[kw] else: error = "Invalid option '{0}'".format(kw) raise KeyError(error)
[ "def", "set_global_defaults", "(", "*", "*", "kwargs", ")", ":", "valid_options", "=", "[", "'active'", ",", "'selected'", ",", "'disabled'", ",", "'on'", ",", "'off'", ",", "'on_active'", ",", "'on_selected'", ",", "'on_disabled'", ",", "'off_active'", ",", "'off_selected'", ",", "'off_disabled'", ",", "'color'", ",", "'color_on'", ",", "'color_off'", ",", "'color_active'", ",", "'color_selected'", ",", "'color_disabled'", ",", "'color_on_selected'", ",", "'color_on_active'", ",", "'color_on_disabled'", ",", "'color_off_selected'", ",", "'color_off_active'", ",", "'color_off_disabled'", ",", "'animation'", ",", "'offset'", ",", "'scale_factor'", ",", "]", "for", "kw", "in", "kwargs", ":", "if", "kw", "in", "valid_options", ":", "_default_options", "[", "kw", "]", "=", "kwargs", "[", "kw", "]", "else", ":", "error", "=", "\"Invalid option '{0}'\"", ".", "format", "(", "kw", ")", "raise", "KeyError", "(", "error", ")" ]
38.75
0.001259
[ "def set_global_defaults(**kwargs):\n", " \"\"\"Set global defaults for the options passed to the icon painter.\"\"\"\n", "\n", " valid_options = [\n", " 'active', 'selected', 'disabled', 'on', 'off',\n", " 'on_active', 'on_selected', 'on_disabled',\n", " 'off_active', 'off_selected', 'off_disabled',\n", " 'color', 'color_on', 'color_off',\n", " 'color_active', 'color_selected', 'color_disabled',\n", " 'color_on_selected', 'color_on_active', 'color_on_disabled',\n", " 'color_off_selected', 'color_off_active', 'color_off_disabled',\n", " 'animation', 'offset', 'scale_factor',\n", " ]\n", "\n", " for kw in kwargs:\n", " if kw in valid_options:\n", " _default_options[kw] = kwargs[kw]\n", " else:\n", " error = \"Invalid option '{0}'\".format(kw)\n", " raise KeyError(error)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.030303030303030304 ]
20
0.001515
def IDIV(cpu, src): """ Signed divide. Divides (signed) the value in the AL, AX, or EAX register by the source operand and stores the result in the AX, DX:AX, or EDX:EAX registers. The source operand can be a general-purpose register or a memory location. The action of this instruction depends on the operand size.:: IF SRC = 0 THEN #DE; (* divide error *) FI; IF OpernadSize = 8 (* word/byte operation *) THEN temp = AX / SRC; (* signed division *) IF (temp > 7FH) Operators.OR(temp < 80H) (* if a positive result is greater than 7FH or a negative result is less than 80H *) THEN #DE; (* divide error *) ; ELSE AL = temp; AH = AX SignedModulus SRC; FI; ELSE IF OpernadSize = 16 (* doubleword/word operation *) THEN temp = DX:AX / SRC; (* signed division *) IF (temp > 7FFFH) Operators.OR(temp < 8000H) (* if a positive result is greater than 7FFFH *) (* or a negative result is less than 8000H *) THEN #DE; (* divide error *) ; ELSE AX = temp; DX = DX:AX SignedModulus SRC; FI; ELSE (* quadword/doubleword operation *) temp = EDX:EAX / SRC; (* signed division *) IF (temp > 7FFFFFFFH) Operators.OR(temp < 80000000H) (* if a positive result is greater than 7FFFFFFFH *) (* or a negative result is less than 80000000H *) THEN #DE; (* divide error *) ; ELSE EAX = temp; EDX = EDX:EAX SignedModulus SRC; FI; FI; FI; :param cpu: current CPU. :param src: source operand. """ reg_name_h = {8: 'AH', 16: 'DX', 32: 'EDX', 64: 'RDX'}[src.size] reg_name_l = {8: 'AL', 16: 'AX', 32: 'EAX', 64: 'RAX'}[src.size] dividend = Operators.CONCAT(src.size * 2, cpu.read_register(reg_name_h), cpu.read_register(reg_name_l)) divisor = src.read() if isinstance(divisor, int) and divisor == 0: raise DivideByZeroError() dst_size = src.size * 2 divisor = Operators.SEXTEND(divisor, src.size, dst_size) mask = (1 << dst_size) - 1 sign_mask = 1 << (dst_size - 1) dividend_sign = (dividend & sign_mask) != 0 divisor_sign = (divisor & sign_mask) != 0 if isinstance(divisor, int): if divisor_sign: divisor = ((~divisor) + 1) & mask divisor = -divisor if isinstance(dividend, int): if dividend_sign: dividend = ((~dividend) + 1) & mask dividend = -dividend quotient = Operators.SDIV(dividend, divisor) if (isinstance(dividend, int) and isinstance(dividend, int)): # handle the concrete case remainder = dividend - (quotient * divisor) else: # symbolic case -- optimize via SREM remainder = Operators.SREM(dividend, divisor) cpu.write_register(reg_name_l, Operators.EXTRACT(quotient, 0, src.size)) cpu.write_register(reg_name_h, Operators.EXTRACT(remainder, 0, src.size))
[ "def", "IDIV", "(", "cpu", ",", "src", ")", ":", "reg_name_h", "=", "{", "8", ":", "'AH'", ",", "16", ":", "'DX'", ",", "32", ":", "'EDX'", ",", "64", ":", "'RDX'", "}", "[", "src", ".", "size", "]", "reg_name_l", "=", "{", "8", ":", "'AL'", ",", "16", ":", "'AX'", ",", "32", ":", "'EAX'", ",", "64", ":", "'RAX'", "}", "[", "src", ".", "size", "]", "dividend", "=", "Operators", ".", "CONCAT", "(", "src", ".", "size", "*", "2", ",", "cpu", ".", "read_register", "(", "reg_name_h", ")", ",", "cpu", ".", "read_register", "(", "reg_name_l", ")", ")", "divisor", "=", "src", ".", "read", "(", ")", "if", "isinstance", "(", "divisor", ",", "int", ")", "and", "divisor", "==", "0", ":", "raise", "DivideByZeroError", "(", ")", "dst_size", "=", "src", ".", "size", "*", "2", "divisor", "=", "Operators", ".", "SEXTEND", "(", "divisor", ",", "src", ".", "size", ",", "dst_size", ")", "mask", "=", "(", "1", "<<", "dst_size", ")", "-", "1", "sign_mask", "=", "1", "<<", "(", "dst_size", "-", "1", ")", "dividend_sign", "=", "(", "dividend", "&", "sign_mask", ")", "!=", "0", "divisor_sign", "=", "(", "divisor", "&", "sign_mask", ")", "!=", "0", "if", "isinstance", "(", "divisor", ",", "int", ")", ":", "if", "divisor_sign", ":", "divisor", "=", "(", "(", "~", "divisor", ")", "+", "1", ")", "&", "mask", "divisor", "=", "-", "divisor", "if", "isinstance", "(", "dividend", ",", "int", ")", ":", "if", "dividend_sign", ":", "dividend", "=", "(", "(", "~", "dividend", ")", "+", "1", ")", "&", "mask", "dividend", "=", "-", "dividend", "quotient", "=", "Operators", ".", "SDIV", "(", "dividend", ",", "divisor", ")", "if", "(", "isinstance", "(", "dividend", ",", "int", ")", "and", "isinstance", "(", "dividend", ",", "int", ")", ")", ":", "# handle the concrete case", "remainder", "=", "dividend", "-", "(", "quotient", "*", "divisor", ")", "else", ":", "# symbolic case -- optimize via SREM", "remainder", "=", "Operators", ".", "SREM", "(", "dividend", ",", "divisor", ")", "cpu", ".", "write_register", "(", "reg_name_l", ",", "Operators", ".", "EXTRACT", "(", "quotient", ",", "0", ",", "src", ".", "size", ")", ")", "cpu", ".", "write_register", "(", "reg_name_h", ",", "Operators", ".", "EXTRACT", "(", "remainder", ",", "0", ",", "src", ".", "size", ")", ")" ]
37.225806
0.001125
[ "def IDIV(cpu, src):\n", " \"\"\"\n", " Signed divide.\n", "\n", " Divides (signed) the value in the AL, AX, or EAX register by the source\n", " operand and stores the result in the AX, DX:AX, or EDX:EAX registers.\n", " The source operand can be a general-purpose register or a memory\n", " location. The action of this instruction depends on the operand size.::\n", "\n", " IF SRC = 0\n", " THEN #DE; (* divide error *)\n", " FI;\n", " IF OpernadSize = 8 (* word/byte operation *)\n", " THEN\n", " temp = AX / SRC; (* signed division *)\n", " IF (temp > 7FH) Operators.OR(temp < 80H)\n", " (* if a positive result is greater than 7FH or a negative result is\n", " less than 80H *)\n", " THEN #DE; (* divide error *) ;\n", " ELSE\n", " AL = temp;\n", " AH = AX SignedModulus SRC;\n", " FI;\n", " ELSE\n", " IF OpernadSize = 16 (* doubleword/word operation *)\n", " THEN\n", " temp = DX:AX / SRC; (* signed division *)\n", " IF (temp > 7FFFH) Operators.OR(temp < 8000H)\n", " (* if a positive result is greater than 7FFFH *)\n", " (* or a negative result is less than 8000H *)\n", " THEN #DE; (* divide error *) ;\n", " ELSE\n", " AX = temp;\n", " DX = DX:AX SignedModulus SRC;\n", " FI;\n", " ELSE (* quadword/doubleword operation *)\n", " temp = EDX:EAX / SRC; (* signed division *)\n", " IF (temp > 7FFFFFFFH) Operators.OR(temp < 80000000H)\n", " (* if a positive result is greater than 7FFFFFFFH *)\n", " (* or a negative result is less than 80000000H *)\n", " THEN #DE; (* divide error *) ;\n", " ELSE\n", " EAX = temp;\n", " EDX = EDX:EAX SignedModulus SRC;\n", " FI;\n", " FI;\n", " FI;\n", "\n", " :param cpu: current CPU.\n", " :param src: source operand.\n", " \"\"\"\n", "\n", " reg_name_h = {8: 'AH', 16: 'DX', 32: 'EDX', 64: 'RDX'}[src.size]\n", " reg_name_l = {8: 'AL', 16: 'AX', 32: 'EAX', 64: 'RAX'}[src.size]\n", "\n", " dividend = Operators.CONCAT(src.size * 2,\n", " cpu.read_register(reg_name_h),\n", " cpu.read_register(reg_name_l))\n", "\n", " divisor = src.read()\n", " if isinstance(divisor, int) and divisor == 0:\n", " raise DivideByZeroError()\n", "\n", " dst_size = src.size * 2\n", "\n", " divisor = Operators.SEXTEND(divisor, src.size, dst_size)\n", " mask = (1 << dst_size) - 1\n", " sign_mask = 1 << (dst_size - 1)\n", "\n", " dividend_sign = (dividend & sign_mask) != 0\n", " divisor_sign = (divisor & sign_mask) != 0\n", "\n", " if isinstance(divisor, int):\n", " if divisor_sign:\n", " divisor = ((~divisor) + 1) & mask\n", " divisor = -divisor\n", "\n", " if isinstance(dividend, int):\n", " if dividend_sign:\n", " dividend = ((~dividend) + 1) & mask\n", " dividend = -dividend\n", "\n", " quotient = Operators.SDIV(dividend, divisor)\n", " if (isinstance(dividend, int) and\n", " isinstance(dividend, int)):\n", " # handle the concrete case\n", " remainder = dividend - (quotient * divisor)\n", " else:\n", " # symbolic case -- optimize via SREM\n", " remainder = Operators.SREM(dividend, divisor)\n", "\n", " cpu.write_register(reg_name_l, Operators.EXTRACT(quotient, 0, src.size))\n", " cpu.write_register(reg_name_h, Operators.EXTRACT(remainder, 0, src.size))" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.024691358024691357 ]
93
0.001294
def delete_by_user_name(user_name): ''' Delete user in the database by `user_name`. ''' try: del_count = TabMember.delete().where(TabMember.user_name == user_name) del_count.execute() return True except: return False
[ "def", "delete_by_user_name", "(", "user_name", ")", ":", "try", ":", "del_count", "=", "TabMember", ".", "delete", "(", ")", ".", "where", "(", "TabMember", ".", "user_name", "==", "user_name", ")", "del_count", ".", "execute", "(", ")", "return", "True", "except", ":", "return", "False" ]
29.5
0.013158
[ "def delete_by_user_name(user_name):\n", " '''\n", " Delete user in the database by `user_name`.\n", " '''\n", " try:\n", " del_count = TabMember.delete().where(TabMember.user_name == user_name)\n", " del_count.execute()\n", " return True\n", " except:\n", " return False" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0.012048192771084338, 0, 0, 0.0625, 0.041666666666666664 ]
10
0.019955
def relative(self): """Identify if this URI is relative to some "current context". For example, if the protocol is missing, it's protocol-relative. If the host is missing, it's host-relative, etc. """ scheme = self.scheme if not scheme: return True return scheme.is_relative(self)
[ "def", "relative", "(", "self", ")", ":", "scheme", "=", "self", ".", "scheme", "if", "not", "scheme", ":", "return", "True", "return", "scheme", ".", "is_relative", "(", "self", ")" ]
24.666667
0.058632
[ "def relative(self):\n", "\t\t\"\"\"Identify if this URI is relative to some \"current context\".\n", "\t\t\n", "\t\tFor example, if the protocol is missing, it's protocol-relative. If the host is missing, it's host-relative, etc.\n", "\t\t\"\"\"\n", "\t\t\n", "\t\tscheme = self.scheme\n", "\t\t\n", "\t\tif not scheme:\n", "\t\t\treturn True\n", "\t\t\n", "\t\treturn scheme.is_relative(self)" ]
[ 0, 0.03076923076923077, 0.6666666666666666, 0.017241379310344827, 0.16666666666666666, 0.6666666666666666, 0.043478260869565216, 0.6666666666666666, 0.058823529411764705, 0.06666666666666667, 0.6666666666666666, 0.06060606060606061 ]
12
0.259243
def mesh(faces, coordinates, meta_data=None, properties=None): ''' mesh(faces, coordinates) yields a mesh with the given face and coordinate matrices. ''' return Mesh(faces, coordinates, meta_data=meta_data, properties=properties)
[ "def", "mesh", "(", "faces", ",", "coordinates", ",", "meta_data", "=", "None", ",", "properties", "=", "None", ")", ":", "return", "Mesh", "(", "faces", ",", "coordinates", ",", "meta_data", "=", "meta_data", ",", "properties", "=", "properties", ")" ]
48.4
0.00813
[ "def mesh(faces, coordinates, meta_data=None, properties=None):\n", " '''\n", " mesh(faces, coordinates) yields a mesh with the given face and coordinate matrices.\n", " '''\n", " return Mesh(faces, coordinates, meta_data=meta_data, properties=properties)" ]
[ 0, 0, 0.011363636363636364, 0, 0.012658227848101266 ]
5
0.004804
def extract_frames(self, bpf_buffer): """Extract all frames from the buffer and stored them in the received list.""" # noqa: E501 # Ensure that the BPF buffer contains at least the header len_bb = len(bpf_buffer) if len_bb < 20: # Note: 20 == sizeof(struct bfp_hdr) return # Extract useful information from the BPF header if FREEBSD or NETBSD: # struct bpf_xhdr or struct bpf_hdr32 bh_tstamp_offset = 16 else: # struct bpf_hdr bh_tstamp_offset = 8 # Parse the BPF header bh_caplen = struct.unpack('I', bpf_buffer[bh_tstamp_offset:bh_tstamp_offset + 4])[0] # noqa: E501 next_offset = bh_tstamp_offset + 4 bh_datalen = struct.unpack('I', bpf_buffer[next_offset:next_offset + 4])[0] # noqa: E501 next_offset += 4 bh_hdrlen = struct.unpack('H', bpf_buffer[next_offset:next_offset + 2])[0] # noqa: E501 if bh_datalen == 0: return # Get and store the Scapy object frame_str = bpf_buffer[bh_hdrlen:bh_hdrlen + bh_caplen] try: pkt = self.guessed_cls(frame_str) except Exception: if conf.debug_dissector: raise pkt = conf.raw_layer(frame_str) self.received_frames.append(pkt) # Extract the next frame end = self.bpf_align(bh_hdrlen, bh_caplen) if (len_bb - end) >= 20: self.extract_frames(bpf_buffer[end:])
[ "def", "extract_frames", "(", "self", ",", "bpf_buffer", ")", ":", "# noqa: E501", "# Ensure that the BPF buffer contains at least the header", "len_bb", "=", "len", "(", "bpf_buffer", ")", "if", "len_bb", "<", "20", ":", "# Note: 20 == sizeof(struct bfp_hdr)", "return", "# Extract useful information from the BPF header", "if", "FREEBSD", "or", "NETBSD", ":", "# struct bpf_xhdr or struct bpf_hdr32", "bh_tstamp_offset", "=", "16", "else", ":", "# struct bpf_hdr", "bh_tstamp_offset", "=", "8", "# Parse the BPF header", "bh_caplen", "=", "struct", ".", "unpack", "(", "'I'", ",", "bpf_buffer", "[", "bh_tstamp_offset", ":", "bh_tstamp_offset", "+", "4", "]", ")", "[", "0", "]", "# noqa: E501", "next_offset", "=", "bh_tstamp_offset", "+", "4", "bh_datalen", "=", "struct", ".", "unpack", "(", "'I'", ",", "bpf_buffer", "[", "next_offset", ":", "next_offset", "+", "4", "]", ")", "[", "0", "]", "# noqa: E501", "next_offset", "+=", "4", "bh_hdrlen", "=", "struct", ".", "unpack", "(", "'H'", ",", "bpf_buffer", "[", "next_offset", ":", "next_offset", "+", "2", "]", ")", "[", "0", "]", "# noqa: E501", "if", "bh_datalen", "==", "0", ":", "return", "# Get and store the Scapy object", "frame_str", "=", "bpf_buffer", "[", "bh_hdrlen", ":", "bh_hdrlen", "+", "bh_caplen", "]", "try", ":", "pkt", "=", "self", ".", "guessed_cls", "(", "frame_str", ")", "except", "Exception", ":", "if", "conf", ".", "debug_dissector", ":", "raise", "pkt", "=", "conf", ".", "raw_layer", "(", "frame_str", ")", "self", ".", "received_frames", ".", "append", "(", "pkt", ")", "# Extract the next frame", "end", "=", "self", ".", "bpf_align", "(", "bh_hdrlen", ",", "bh_caplen", ")", "if", "(", "len_bb", "-", "end", ")", ">=", "20", ":", "self", ".", "extract_frames", "(", "bpf_buffer", "[", "end", ":", "]", ")" ]
37.974359
0.001317
[ "def extract_frames(self, bpf_buffer):\n", " \"\"\"Extract all frames from the buffer and stored them in the received list.\"\"\" # noqa: E501\n", "\n", " # Ensure that the BPF buffer contains at least the header\n", " len_bb = len(bpf_buffer)\n", " if len_bb < 20: # Note: 20 == sizeof(struct bfp_hdr)\n", " return\n", "\n", " # Extract useful information from the BPF header\n", " if FREEBSD or NETBSD:\n", " # struct bpf_xhdr or struct bpf_hdr32\n", " bh_tstamp_offset = 16\n", " else:\n", " # struct bpf_hdr\n", " bh_tstamp_offset = 8\n", "\n", " # Parse the BPF header\n", " bh_caplen = struct.unpack('I', bpf_buffer[bh_tstamp_offset:bh_tstamp_offset + 4])[0] # noqa: E501\n", " next_offset = bh_tstamp_offset + 4\n", " bh_datalen = struct.unpack('I', bpf_buffer[next_offset:next_offset + 4])[0] # noqa: E501\n", " next_offset += 4\n", " bh_hdrlen = struct.unpack('H', bpf_buffer[next_offset:next_offset + 2])[0] # noqa: E501\n", " if bh_datalen == 0:\n", " return\n", "\n", " # Get and store the Scapy object\n", " frame_str = bpf_buffer[bh_hdrlen:bh_hdrlen + bh_caplen]\n", " try:\n", " pkt = self.guessed_cls(frame_str)\n", " except Exception:\n", " if conf.debug_dissector:\n", " raise\n", " pkt = conf.raw_layer(frame_str)\n", " self.received_frames.append(pkt)\n", "\n", " # Extract the next frame\n", " end = self.bpf_align(bh_hdrlen, bh_caplen)\n", " if (len_bb - end) >= 20:\n", " self.extract_frames(bpf_buffer[end:])" ]
[ 0, 0.009900990099009901, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612 ]
39
0.000777
def conf_matrix(p,labels,names=['1','0'],threshold=.5,show=True): """ Returns error rate and true/false positives in a binary classification problem - Actual classes are displayed by column. - Predicted classes are displayed by row. :param p: array of class '1' probabilities. :param labels: array of actual classes. :param names: list of class names, defaults to ['1','0']. :param threshold: probability value used to decide the class. :param show: whether the matrix should be shown or not :type show: False|True """ assert p.size == labels.size, "Arrays p and labels have different dimensions." decision = np.ones((labels.size,1)) decision[p<threshold] = 0 diff = decision - labels false_0 = diff[diff == -1].size false_1 = diff[diff == 1].size true_1 = np.sum(decision[diff ==0]) true_0 = labels.size - true_1 - false_0 - false_1 error = (false_1 + false_0)/np.float(labels.size) if show: print(100. - error * 100,'% instances correctly classified') print('%-10s| %-10s| %-10s| ' % ('',names[0],names[1])) print('----------|------------|------------|') print('%-10s| %-10s| %-10s| ' % (names[0],true_1,false_0)) print('%-10s| %-10s| %-10s| ' % (names[1],false_1,true_0)) return error,true_1, false_1, true_0, false_0
[ "def", "conf_matrix", "(", "p", ",", "labels", ",", "names", "=", "[", "'1'", ",", "'0'", "]", ",", "threshold", "=", ".5", ",", "show", "=", "True", ")", ":", "assert", "p", ".", "size", "==", "labels", ".", "size", ",", "\"Arrays p and labels have different dimensions.\"", "decision", "=", "np", ".", "ones", "(", "(", "labels", ".", "size", ",", "1", ")", ")", "decision", "[", "p", "<", "threshold", "]", "=", "0", "diff", "=", "decision", "-", "labels", "false_0", "=", "diff", "[", "diff", "==", "-", "1", "]", ".", "size", "false_1", "=", "diff", "[", "diff", "==", "1", "]", ".", "size", "true_1", "=", "np", ".", "sum", "(", "decision", "[", "diff", "==", "0", "]", ")", "true_0", "=", "labels", ".", "size", "-", "true_1", "-", "false_0", "-", "false_1", "error", "=", "(", "false_1", "+", "false_0", ")", "/", "np", ".", "float", "(", "labels", ".", "size", ")", "if", "show", ":", "print", "(", "100.", "-", "error", "*", "100", ",", "'% instances correctly classified'", ")", "print", "(", "'%-10s| %-10s| %-10s| '", "%", "(", "''", ",", "names", "[", "0", "]", ",", "names", "[", "1", "]", ")", ")", "print", "(", "'----------|------------|------------|'", ")", "print", "(", "'%-10s| %-10s| %-10s| '", "%", "(", "names", "[", "0", "]", ",", "true_1", ",", "false_0", ")", ")", "print", "(", "'%-10s| %-10s| %-10s| '", "%", "(", "names", "[", "1", "]", ",", "false_1", ",", "true_0", ")", ")", "return", "error", ",", "true_1", ",", "false_1", ",", "true_0", ",", "false_0" ]
45.758621
0.014022
[ "def conf_matrix(p,labels,names=['1','0'],threshold=.5,show=True):\n", " \"\"\"\n", " Returns error rate and true/false positives in a binary classification problem\n", " - Actual classes are displayed by column.\n", " - Predicted classes are displayed by row.\n", "\n", " :param p: array of class '1' probabilities.\n", " :param labels: array of actual classes.\n", " :param names: list of class names, defaults to ['1','0'].\n", " :param threshold: probability value used to decide the class.\n", " :param show: whether the matrix should be shown or not\n", " :type show: False|True\n", " \"\"\"\n", " assert p.size == labels.size, \"Arrays p and labels have different dimensions.\"\n", " decision = np.ones((labels.size,1))\n", " decision[p<threshold] = 0\n", " diff = decision - labels\n", " false_0 = diff[diff == -1].size\n", " false_1 = diff[diff == 1].size\n", " true_1 = np.sum(decision[diff ==0])\n", " true_0 = labels.size - true_1 - false_0 - false_1\n", " error = (false_1 + false_0)/np.float(labels.size)\n", " if show:\n", " print(100. - error * 100,'% instances correctly classified')\n", " print('%-10s| %-10s| %-10s| ' % ('',names[0],names[1]))\n", " print('----------|------------|------------|')\n", " print('%-10s| %-10s| %-10s| ' % (names[0],true_1,false_0))\n", " print('%-10s| %-10s| %-10s| ' % (names[1],false_1,true_0))\n", " return error,true_1, false_1, true_0, false_0" ]
[ 0.07575757575757576, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0.025, 0.03333333333333333, 0, 0, 0, 0.025, 0, 0, 0, 0.014492753623188406, 0.030303030303030304, 0, 0.028985507246376812, 0.028985507246376812, 0.04081632653061224 ]
29
0.011268
def dirs(self): """Get an iter of VenvDirs within the directory.""" contents = self.paths contents = (VenvDir(path.path) for path in contents if path.is_dir) return contents
[ "def", "dirs", "(", "self", ")", ":", "contents", "=", "self", ".", "paths", "contents", "=", "(", "VenvDir", "(", "path", ".", "path", ")", "for", "path", "in", "contents", "if", "path", ".", "is_dir", ")", "return", "contents" ]
40.2
0.009756
[ "def dirs(self):\n", " \"\"\"Get an iter of VenvDirs within the directory.\"\"\"\n", " contents = self.paths\n", " contents = (VenvDir(path.path) for path in contents if path.is_dir)\n", " return contents" ]
[ 0, 0.016666666666666666, 0, 0, 0.043478260869565216 ]
5
0.012029
def uavionix_adsb_out_cfg_send(self, ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect, force_mavlink1=False): ''' Static data to configure the ADS-B transponder (send within 10 sec of a POR and every 10 sec thereafter) ICAO : Vehicle address (24 bit) (uint32_t) callsign : Vehicle identifier (8 characters, null terminated, valid characters are A-Z, 0-9, " " only) (char) emitterType : Transmitting vehicle type. See ADSB_EMITTER_TYPE enum (uint8_t) aircraftSize : Aircraft length and width encoding (table 2-35 of DO-282B) (uint8_t) gpsOffsetLat : GPS antenna lateral offset (table 2-36 of DO-282B) (uint8_t) gpsOffsetLon : GPS antenna longitudinal offset from nose [if non-zero, take position (in meters) divide by 2 and add one] (table 2-37 DO-282B) (uint8_t) stallSpeed : Aircraft stall speed in cm/s (uint16_t) rfSelect : ADS-B transponder reciever and transmit enable flags (uint8_t) ''' return self.send(self.uavionix_adsb_out_cfg_encode(ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect), force_mavlink1=force_mavlink1)
[ "def", "uavionix_adsb_out_cfg_send", "(", "self", ",", "ICAO", ",", "callsign", ",", "emitterType", ",", "aircraftSize", ",", "gpsOffsetLat", ",", "gpsOffsetLon", ",", "stallSpeed", ",", "rfSelect", ",", "force_mavlink1", "=", "False", ")", ":", "return", "self", ".", "send", "(", "self", ".", "uavionix_adsb_out_cfg_encode", "(", "ICAO", ",", "callsign", ",", "emitterType", ",", "aircraftSize", ",", "gpsOffsetLat", ",", "gpsOffsetLon", ",", "stallSpeed", ",", "rfSelect", ")", ",", "force_mavlink1", "=", "force_mavlink1", ")" ]
89.375
0.008304
[ "def uavionix_adsb_out_cfg_send(self, ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect, force_mavlink1=False):\n", " '''\n", " Static data to configure the ADS-B transponder (send within 10 sec of\n", " a POR and every 10 sec thereafter)\n", "\n", " ICAO : Vehicle address (24 bit) (uint32_t)\n", " callsign : Vehicle identifier (8 characters, null terminated, valid characters are A-Z, 0-9, \" \" only) (char)\n", " emitterType : Transmitting vehicle type. See ADSB_EMITTER_TYPE enum (uint8_t)\n", " aircraftSize : Aircraft length and width encoding (table 2-35 of DO-282B) (uint8_t)\n", " gpsOffsetLat : GPS antenna lateral offset (table 2-36 of DO-282B) (uint8_t)\n", " gpsOffsetLon : GPS antenna longitudinal offset from nose [if non-zero, take position (in meters) divide by 2 and add one] (table 2-37 DO-282B) (uint8_t)\n", " stallSpeed : Aircraft stall speed in cm/s (uint16_t)\n", " rfSelect : ADS-B transponder reciever and transmit enable flags (uint8_t)\n", "\n", " '''\n", " return self.send(self.uavionix_adsb_out_cfg_encode(ICAO, callsign, emitterType, aircraftSize, gpsOffsetLat, gpsOffsetLon, stallSpeed, rfSelect), force_mavlink1=force_mavlink1)" ]
[ 0.006535947712418301, 0.05, 0.011627906976744186, 0, 0, 0, 0.006993006993006993, 0.009259259259259259, 0.008849557522123894, 0.009523809523809525, 0.005494505494505495, 0.011904761904761904, 0.009345794392523364, 0, 0, 0.010471204188481676 ]
16
0.00875
def strip_secrets(qp, matcher, kwlist): """ This function will scrub the secrets from a query param string based on the passed in matcher and kwlist. blah=1&secret=password&valid=true will result in blah=1&secret=<redacted>&valid=true You can even pass in path query combinations: /signup?blah=1&secret=password&valid=true will result in /signup?blah=1&secret=<redacted>&valid=true :param qp: a string representing the query params in URL form (unencoded) :param matcher: the matcher to use :param kwlist: the list of keywords to match :return: a scrubbed query param string """ path = None try: if qp is None: return '' if type(kwlist) is not list: logger.debug("strip_secrets: bad keyword list") return qp # If there are no key=values, then just return if not '=' in qp: return qp if '?' in qp: path, query = qp.split('?') else: query = qp params = parse.parse_qsl(query, keep_blank_values=True) redacted = ['<redacted>'] if matcher == 'equals-ignore-case': for keyword in kwlist: for index, kv in enumerate(params): if kv[0].lower() == keyword.lower(): params[index] = (kv[0], redacted) elif matcher == 'equals': for keyword in kwlist: for index, kv in enumerate(params): if kv[0] == keyword: params[index] = (kv[0], redacted) elif matcher == 'contains-ignore-case': for keyword in kwlist: for index, kv in enumerate(params): if keyword.lower() in kv[0].lower(): params[index] = (kv[0], redacted) elif matcher == 'contains': for keyword in kwlist: for index, kv in enumerate(params): if keyword in kv[0]: params[index] = (kv[0], redacted) elif matcher == 'regex': for regexp in kwlist: for index, kv in enumerate(params): if re.match(regexp, kv[0]): params[index] = (kv[0], redacted) else: logger.debug("strip_secrets: unknown matcher") return qp if sys.version_info < (3, 0): result = urllib.urlencode(params, doseq=True) else: result = parse.urlencode(params, doseq=True) query = parse.unquote(result) if path: query = path + '?' + query return query except: logger.debug("strip_secrets", exc_info=True)
[ "def", "strip_secrets", "(", "qp", ",", "matcher", ",", "kwlist", ")", ":", "path", "=", "None", "try", ":", "if", "qp", "is", "None", ":", "return", "''", "if", "type", "(", "kwlist", ")", "is", "not", "list", ":", "logger", ".", "debug", "(", "\"strip_secrets: bad keyword list\"", ")", "return", "qp", "# If there are no key=values, then just return", "if", "not", "'='", "in", "qp", ":", "return", "qp", "if", "'?'", "in", "qp", ":", "path", ",", "query", "=", "qp", ".", "split", "(", "'?'", ")", "else", ":", "query", "=", "qp", "params", "=", "parse", ".", "parse_qsl", "(", "query", ",", "keep_blank_values", "=", "True", ")", "redacted", "=", "[", "'<redacted>'", "]", "if", "matcher", "==", "'equals-ignore-case'", ":", "for", "keyword", "in", "kwlist", ":", "for", "index", ",", "kv", "in", "enumerate", "(", "params", ")", ":", "if", "kv", "[", "0", "]", ".", "lower", "(", ")", "==", "keyword", ".", "lower", "(", ")", ":", "params", "[", "index", "]", "=", "(", "kv", "[", "0", "]", ",", "redacted", ")", "elif", "matcher", "==", "'equals'", ":", "for", "keyword", "in", "kwlist", ":", "for", "index", ",", "kv", "in", "enumerate", "(", "params", ")", ":", "if", "kv", "[", "0", "]", "==", "keyword", ":", "params", "[", "index", "]", "=", "(", "kv", "[", "0", "]", ",", "redacted", ")", "elif", "matcher", "==", "'contains-ignore-case'", ":", "for", "keyword", "in", "kwlist", ":", "for", "index", ",", "kv", "in", "enumerate", "(", "params", ")", ":", "if", "keyword", ".", "lower", "(", ")", "in", "kv", "[", "0", "]", ".", "lower", "(", ")", ":", "params", "[", "index", "]", "=", "(", "kv", "[", "0", "]", ",", "redacted", ")", "elif", "matcher", "==", "'contains'", ":", "for", "keyword", "in", "kwlist", ":", "for", "index", ",", "kv", "in", "enumerate", "(", "params", ")", ":", "if", "keyword", "in", "kv", "[", "0", "]", ":", "params", "[", "index", "]", "=", "(", "kv", "[", "0", "]", ",", "redacted", ")", "elif", "matcher", "==", "'regex'", ":", "for", "regexp", "in", "kwlist", ":", "for", "index", ",", "kv", "in", "enumerate", "(", "params", ")", ":", "if", "re", ".", "match", "(", "regexp", ",", "kv", "[", "0", "]", ")", ":", "params", "[", "index", "]", "=", "(", "kv", "[", "0", "]", ",", "redacted", ")", "else", ":", "logger", ".", "debug", "(", "\"strip_secrets: unknown matcher\"", ")", "return", "qp", "if", "sys", ".", "version_info", "<", "(", "3", ",", "0", ")", ":", "result", "=", "urllib", ".", "urlencode", "(", "params", ",", "doseq", "=", "True", ")", "else", ":", "result", "=", "parse", ".", "urlencode", "(", "params", ",", "doseq", "=", "True", ")", "query", "=", "parse", ".", "unquote", "(", "result", ")", "if", "path", ":", "query", "=", "path", "+", "'?'", "+", "query", "return", "query", "except", ":", "logger", ".", "debug", "(", "\"strip_secrets\"", ",", "exc_info", "=", "True", ")" ]
33.948718
0.002202
[ "def strip_secrets(qp, matcher, kwlist):\n", " \"\"\"\n", " This function will scrub the secrets from a query param string based on the passed in matcher and kwlist.\n", "\n", " blah=1&secret=password&valid=true will result in blah=1&secret=<redacted>&valid=true\n", "\n", " You can even pass in path query combinations:\n", "\n", " /signup?blah=1&secret=password&valid=true will result in /signup?blah=1&secret=<redacted>&valid=true\n", "\n", " :param qp: a string representing the query params in URL form (unencoded)\n", " :param matcher: the matcher to use\n", " :param kwlist: the list of keywords to match\n", " :return: a scrubbed query param string\n", " \"\"\"\n", " path = None\n", "\n", " try:\n", " if qp is None:\n", " return ''\n", "\n", " if type(kwlist) is not list:\n", " logger.debug(\"strip_secrets: bad keyword list\")\n", " return qp\n", "\n", " # If there are no key=values, then just return\n", " if not '=' in qp:\n", " return qp\n", "\n", " if '?' in qp:\n", " path, query = qp.split('?')\n", " else:\n", " query = qp\n", "\n", " params = parse.parse_qsl(query, keep_blank_values=True)\n", " redacted = ['<redacted>']\n", "\n", " if matcher == 'equals-ignore-case':\n", " for keyword in kwlist:\n", " for index, kv in enumerate(params):\n", " if kv[0].lower() == keyword.lower():\n", " params[index] = (kv[0], redacted)\n", " elif matcher == 'equals':\n", " for keyword in kwlist:\n", " for index, kv in enumerate(params):\n", " if kv[0] == keyword:\n", " params[index] = (kv[0], redacted)\n", " elif matcher == 'contains-ignore-case':\n", " for keyword in kwlist:\n", " for index, kv in enumerate(params):\n", " if keyword.lower() in kv[0].lower():\n", " params[index] = (kv[0], redacted)\n", " elif matcher == 'contains':\n", " for keyword in kwlist:\n", " for index, kv in enumerate(params):\n", " if keyword in kv[0]:\n", " params[index] = (kv[0], redacted)\n", " elif matcher == 'regex':\n", " for regexp in kwlist:\n", " for index, kv in enumerate(params):\n", " if re.match(regexp, kv[0]):\n", " params[index] = (kv[0], redacted)\n", " else:\n", " logger.debug(\"strip_secrets: unknown matcher\")\n", " return qp\n", "\n", " if sys.version_info < (3, 0):\n", " result = urllib.urlencode(params, doseq=True)\n", " else:\n", " result = parse.urlencode(params, doseq=True)\n", " query = parse.unquote(result)\n", "\n", " if path:\n", " query = path + '?' + query\n", "\n", " return query\n", " except:\n", " logger.debug(\"strip_secrets\", exc_info=True)" ]
[ 0, 0, 0.00909090909090909, 0, 0.011235955056179775, 0, 0, 0, 0.009523809523809525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0.019230769230769232 ]
78
0.002191
def loadFromURL(self, url): """Load an xml file from a URL and return a DOM document.""" if isfile(url) is True: file = open(url, 'r') else: file = urlopen(url) try: result = self.loadDocument(file) except Exception, ex: file.close() raise ParseError(('Failed to load document %s' %url,) + ex.args) else: file.close() return result
[ "def", "loadFromURL", "(", "self", ",", "url", ")", ":", "if", "isfile", "(", "url", ")", "is", "True", ":", "file", "=", "open", "(", "url", ",", "'r'", ")", "else", ":", "file", "=", "urlopen", "(", "url", ")", "try", ":", "result", "=", "self", ".", "loadDocument", "(", "file", ")", "except", "Exception", ",", "ex", ":", "file", ".", "close", "(", ")", "raise", "ParseError", "(", "(", "'Failed to load document %s'", "%", "url", ",", ")", "+", "ex", ".", "args", ")", "else", ":", "file", ".", "close", "(", ")", "return", "result" ]
30.066667
0.008602
[ "def loadFromURL(self, url):\n", " \"\"\"Load an xml file from a URL and return a DOM document.\"\"\"\n", " if isfile(url) is True:\n", " file = open(url, 'r')\n", " else:\n", " file = urlopen(url)\n", "\n", " try: \n", " result = self.loadDocument(file)\n", " except Exception, ex:\n", " file.close()\n", " raise ParseError(('Failed to load document %s' %url,) + ex.args)\n", " else:\n", " file.close()\n", " return result" ]
[ 0, 0.014492753623188406, 0, 0, 0, 0, 0, 0.05555555555555555, 0, 0, 0, 0.012987012987012988, 0, 0, 0.047619047619047616 ]
15
0.00871
def build(self, paths, tags=None, wheel_version=None): """ Build a wheel from files in specified paths, and use any specified tags when determining the name of the wheel. """ if tags is None: tags = {} libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] if libkey == 'platlib': is_pure = 'false' default_pyver = [IMPVER] default_abi = [ABI] default_arch = [ARCH] else: is_pure = 'true' default_pyver = [PYVER] default_abi = ['none'] default_arch = ['any'] self.pyver = tags.get('pyver', default_pyver) self.abi = tags.get('abi', default_abi) self.arch = tags.get('arch', default_arch) libdir = paths[libkey] name_ver = '%s-%s' % (self.name, self.version) data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver archive_paths = [] # First, stuff which is not in site-packages for key in ('data', 'headers', 'scripts'): if key not in paths: continue path = paths[key] if os.path.isdir(path): for root, dirs, files in os.walk(path): for fn in files: p = fsdecode(os.path.join(root, fn)) rp = os.path.relpath(p, path) ap = to_posix(os.path.join(data_dir, key, rp)) archive_paths.append((ap, p)) if key == 'scripts' and not p.endswith('.exe'): with open(p, 'rb') as f: data = f.read() data = self.process_shebang(data) with open(p, 'wb') as f: f.write(data) # Now, stuff which is in site-packages, other than the # distinfo stuff. path = libdir distinfo = None for root, dirs, files in os.walk(path): if root == path: # At the top level only, save distinfo for later # and skip it for now for i, dn in enumerate(dirs): dn = fsdecode(dn) if dn.endswith('.dist-info'): distinfo = os.path.join(root, dn) del dirs[i] break assert distinfo, '.dist-info directory expected, not found' for fn in files: # comment out next suite to leave .pyc files in if fsdecode(fn).endswith(('.pyc', '.pyo')): continue p = os.path.join(root, fn) rp = to_posix(os.path.relpath(p, path)) archive_paths.append((rp, p)) # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. files = os.listdir(distinfo) for fn in files: if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): p = fsdecode(os.path.join(distinfo, fn)) ap = to_posix(os.path.join(info_dir, fn)) archive_paths.append((ap, p)) wheel_metadata = [ 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), 'Generator: distlib %s' % __version__, 'Root-Is-Purelib: %s' % is_pure, ] for pyver, abi, arch in self.tags: wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) p = os.path.join(distinfo, 'WHEEL') with open(p, 'w') as f: f.write('\n'.join(wheel_metadata)) ap = to_posix(os.path.join(info_dir, 'WHEEL')) archive_paths.append((ap, p)) # Now, at last, RECORD. # Paths in here are archive paths - nothing else makes sense. self.write_records((distinfo, info_dir), libdir, archive_paths) # Now, ready to build the zip file pathname = os.path.join(self.dirname, self.filename) self.build_zip(pathname, archive_paths) return pathname
[ "def", "build", "(", "self", ",", "paths", ",", "tags", "=", "None", ",", "wheel_version", "=", "None", ")", ":", "if", "tags", "is", "None", ":", "tags", "=", "{", "}", "libkey", "=", "list", "(", "filter", "(", "lambda", "o", ":", "o", "in", "paths", ",", "(", "'purelib'", ",", "'platlib'", ")", ")", ")", "[", "0", "]", "if", "libkey", "==", "'platlib'", ":", "is_pure", "=", "'false'", "default_pyver", "=", "[", "IMPVER", "]", "default_abi", "=", "[", "ABI", "]", "default_arch", "=", "[", "ARCH", "]", "else", ":", "is_pure", "=", "'true'", "default_pyver", "=", "[", "PYVER", "]", "default_abi", "=", "[", "'none'", "]", "default_arch", "=", "[", "'any'", "]", "self", ".", "pyver", "=", "tags", ".", "get", "(", "'pyver'", ",", "default_pyver", ")", "self", ".", "abi", "=", "tags", ".", "get", "(", "'abi'", ",", "default_abi", ")", "self", ".", "arch", "=", "tags", ".", "get", "(", "'arch'", ",", "default_arch", ")", "libdir", "=", "paths", "[", "libkey", "]", "name_ver", "=", "'%s-%s'", "%", "(", "self", ".", "name", ",", "self", ".", "version", ")", "data_dir", "=", "'%s.data'", "%", "name_ver", "info_dir", "=", "'%s.dist-info'", "%", "name_ver", "archive_paths", "=", "[", "]", "# First, stuff which is not in site-packages", "for", "key", "in", "(", "'data'", ",", "'headers'", ",", "'scripts'", ")", ":", "if", "key", "not", "in", "paths", ":", "continue", "path", "=", "paths", "[", "key", "]", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "fn", "in", "files", ":", "p", "=", "fsdecode", "(", "os", ".", "path", ".", "join", "(", "root", ",", "fn", ")", ")", "rp", "=", "os", ".", "path", ".", "relpath", "(", "p", ",", "path", ")", "ap", "=", "to_posix", "(", "os", ".", "path", ".", "join", "(", "data_dir", ",", "key", ",", "rp", ")", ")", "archive_paths", ".", "append", "(", "(", "ap", ",", "p", ")", ")", "if", "key", "==", "'scripts'", "and", "not", "p", ".", "endswith", "(", "'.exe'", ")", ":", "with", "open", "(", "p", ",", "'rb'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "data", "=", "self", ".", "process_shebang", "(", "data", ")", "with", "open", "(", "p", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "data", ")", "# Now, stuff which is in site-packages, other than the", "# distinfo stuff.", "path", "=", "libdir", "distinfo", "=", "None", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "if", "root", "==", "path", ":", "# At the top level only, save distinfo for later", "# and skip it for now", "for", "i", ",", "dn", "in", "enumerate", "(", "dirs", ")", ":", "dn", "=", "fsdecode", "(", "dn", ")", "if", "dn", ".", "endswith", "(", "'.dist-info'", ")", ":", "distinfo", "=", "os", ".", "path", ".", "join", "(", "root", ",", "dn", ")", "del", "dirs", "[", "i", "]", "break", "assert", "distinfo", ",", "'.dist-info directory expected, not found'", "for", "fn", "in", "files", ":", "# comment out next suite to leave .pyc files in", "if", "fsdecode", "(", "fn", ")", ".", "endswith", "(", "(", "'.pyc'", ",", "'.pyo'", ")", ")", ":", "continue", "p", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fn", ")", "rp", "=", "to_posix", "(", "os", ".", "path", ".", "relpath", "(", "p", ",", "path", ")", ")", "archive_paths", ".", "append", "(", "(", "rp", ",", "p", ")", ")", "# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.", "files", "=", "os", ".", "listdir", "(", "distinfo", ")", "for", "fn", "in", "files", ":", "if", "fn", "not", "in", "(", "'RECORD'", ",", "'INSTALLER'", ",", "'SHARED'", ",", "'WHEEL'", ")", ":", "p", "=", "fsdecode", "(", "os", ".", "path", ".", "join", "(", "distinfo", ",", "fn", ")", ")", "ap", "=", "to_posix", "(", "os", ".", "path", ".", "join", "(", "info_dir", ",", "fn", ")", ")", "archive_paths", ".", "append", "(", "(", "ap", ",", "p", ")", ")", "wheel_metadata", "=", "[", "'Wheel-Version: %d.%d'", "%", "(", "wheel_version", "or", "self", ".", "wheel_version", ")", ",", "'Generator: distlib %s'", "%", "__version__", ",", "'Root-Is-Purelib: %s'", "%", "is_pure", ",", "]", "for", "pyver", ",", "abi", ",", "arch", "in", "self", ".", "tags", ":", "wheel_metadata", ".", "append", "(", "'Tag: %s-%s-%s'", "%", "(", "pyver", ",", "abi", ",", "arch", ")", ")", "p", "=", "os", ".", "path", ".", "join", "(", "distinfo", ",", "'WHEEL'", ")", "with", "open", "(", "p", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'\\n'", ".", "join", "(", "wheel_metadata", ")", ")", "ap", "=", "to_posix", "(", "os", ".", "path", ".", "join", "(", "info_dir", ",", "'WHEEL'", ")", ")", "archive_paths", ".", "append", "(", "(", "ap", ",", "p", ")", ")", "# Now, at last, RECORD.", "# Paths in here are archive paths - nothing else makes sense.", "self", ".", "write_records", "(", "(", "distinfo", ",", "info_dir", ")", ",", "libdir", ",", "archive_paths", ")", "# Now, ready to build the zip file", "pathname", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dirname", ",", "self", ".", "filename", ")", "self", ".", "build_zip", "(", "pathname", ",", "archive_paths", ")", "return", "pathname" ]
39.126214
0.000484
[ "def build(self, paths, tags=None, wheel_version=None):\n", " \"\"\"\n", " Build a wheel from files in specified paths, and use any specified tags\n", " when determining the name of the wheel.\n", " \"\"\"\n", " if tags is None:\n", " tags = {}\n", "\n", " libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]\n", " if libkey == 'platlib':\n", " is_pure = 'false'\n", " default_pyver = [IMPVER]\n", " default_abi = [ABI]\n", " default_arch = [ARCH]\n", " else:\n", " is_pure = 'true'\n", " default_pyver = [PYVER]\n", " default_abi = ['none']\n", " default_arch = ['any']\n", "\n", " self.pyver = tags.get('pyver', default_pyver)\n", " self.abi = tags.get('abi', default_abi)\n", " self.arch = tags.get('arch', default_arch)\n", "\n", " libdir = paths[libkey]\n", "\n", " name_ver = '%s-%s' % (self.name, self.version)\n", " data_dir = '%s.data' % name_ver\n", " info_dir = '%s.dist-info' % name_ver\n", "\n", " archive_paths = []\n", "\n", " # First, stuff which is not in site-packages\n", " for key in ('data', 'headers', 'scripts'):\n", " if key not in paths:\n", " continue\n", " path = paths[key]\n", " if os.path.isdir(path):\n", " for root, dirs, files in os.walk(path):\n", " for fn in files:\n", " p = fsdecode(os.path.join(root, fn))\n", " rp = os.path.relpath(p, path)\n", " ap = to_posix(os.path.join(data_dir, key, rp))\n", " archive_paths.append((ap, p))\n", " if key == 'scripts' and not p.endswith('.exe'):\n", " with open(p, 'rb') as f:\n", " data = f.read()\n", " data = self.process_shebang(data)\n", " with open(p, 'wb') as f:\n", " f.write(data)\n", "\n", " # Now, stuff which is in site-packages, other than the\n", " # distinfo stuff.\n", " path = libdir\n", " distinfo = None\n", " for root, dirs, files in os.walk(path):\n", " if root == path:\n", " # At the top level only, save distinfo for later\n", " # and skip it for now\n", " for i, dn in enumerate(dirs):\n", " dn = fsdecode(dn)\n", " if dn.endswith('.dist-info'):\n", " distinfo = os.path.join(root, dn)\n", " del dirs[i]\n", " break\n", " assert distinfo, '.dist-info directory expected, not found'\n", "\n", " for fn in files:\n", " # comment out next suite to leave .pyc files in\n", " if fsdecode(fn).endswith(('.pyc', '.pyo')):\n", " continue\n", " p = os.path.join(root, fn)\n", " rp = to_posix(os.path.relpath(p, path))\n", " archive_paths.append((rp, p))\n", "\n", " # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.\n", " files = os.listdir(distinfo)\n", " for fn in files:\n", " if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):\n", " p = fsdecode(os.path.join(distinfo, fn))\n", " ap = to_posix(os.path.join(info_dir, fn))\n", " archive_paths.append((ap, p))\n", "\n", " wheel_metadata = [\n", " 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),\n", " 'Generator: distlib %s' % __version__,\n", " 'Root-Is-Purelib: %s' % is_pure,\n", " ]\n", " for pyver, abi, arch in self.tags:\n", " wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))\n", " p = os.path.join(distinfo, 'WHEEL')\n", " with open(p, 'w') as f:\n", " f.write('\\n'.join(wheel_metadata))\n", " ap = to_posix(os.path.join(info_dir, 'WHEEL'))\n", " archive_paths.append((ap, p))\n", "\n", " # Now, at last, RECORD.\n", " # Paths in here are archive paths - nothing else makes sense.\n", " self.write_records((distinfo, info_dir), libdir, archive_paths)\n", " # Now, ready to build the zip file\n", " pathname = os.path.join(self.dirname, self.filename)\n", " self.build_zip(pathname, archive_paths)\n", " return pathname" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216 ]
103
0.001231
def remix(self, remix_dictionary=None, num_output_channels=None): '''Remix the channels of an audio file. Note: volume options are not yet implemented Parameters ---------- remix_dictionary : dict or None Dictionary mapping output channel to list of input channel(s). Empty lists indicate the corresponding output channel should be empty. If None, mixes all channels down to a single mono file. num_output_channels : int or None The number of channels in the output file. If None, the number of output channels is equal to the largest key in remix_dictionary. If remix_dictionary is None, this variable is ignored. Examples -------- Remix a 4-channel input file. The output file will have input channel 2 in channel 1, a mixdown of input channels 1 an 3 in channel 2, an empty channel 3, and a copy of input channel 4 in channel 4. >>> import sox >>> tfm = sox.Transformer() >>> remix_dictionary = {1: [2], 2: [1, 3], 4: [4]} >>> tfm.remix(remix_dictionary) ''' if not (isinstance(remix_dictionary, dict) or remix_dictionary is None): raise ValueError("remix_dictionary must be a dictionary or None.") if remix_dictionary is not None: if not all([isinstance(i, int) and i > 0 for i in remix_dictionary.keys()]): raise ValueError( "remix dictionary must have positive integer keys." ) if not all([isinstance(v, list) for v in remix_dictionary.values()]): raise ValueError("remix dictionary values must be lists.") for v_list in remix_dictionary.values(): if not all([isinstance(v, int) and v > 0 for v in v_list]): raise ValueError( "elements of remix dictionary values must " "be positive integers" ) if not ((isinstance(num_output_channels, int) and num_output_channels > 0) or num_output_channels is None): raise ValueError( "num_output_channels must be a positive integer or None." ) effect_args = ['remix'] if remix_dictionary is None: effect_args.append('-') else: if num_output_channels is None: num_output_channels = max(remix_dictionary.keys()) for channel in range(1, num_output_channels + 1): if channel in remix_dictionary.keys(): out_channel = ','.join( [str(i) for i in remix_dictionary[channel]] ) else: out_channel = '0' effect_args.append(out_channel) self.effects.extend(effect_args) self.effects_log.append('remix') return self
[ "def", "remix", "(", "self", ",", "remix_dictionary", "=", "None", ",", "num_output_channels", "=", "None", ")", ":", "if", "not", "(", "isinstance", "(", "remix_dictionary", ",", "dict", ")", "or", "remix_dictionary", "is", "None", ")", ":", "raise", "ValueError", "(", "\"remix_dictionary must be a dictionary or None.\"", ")", "if", "remix_dictionary", "is", "not", "None", ":", "if", "not", "all", "(", "[", "isinstance", "(", "i", ",", "int", ")", "and", "i", ">", "0", "for", "i", "in", "remix_dictionary", ".", "keys", "(", ")", "]", ")", ":", "raise", "ValueError", "(", "\"remix dictionary must have positive integer keys.\"", ")", "if", "not", "all", "(", "[", "isinstance", "(", "v", ",", "list", ")", "for", "v", "in", "remix_dictionary", ".", "values", "(", ")", "]", ")", ":", "raise", "ValueError", "(", "\"remix dictionary values must be lists.\"", ")", "for", "v_list", "in", "remix_dictionary", ".", "values", "(", ")", ":", "if", "not", "all", "(", "[", "isinstance", "(", "v", ",", "int", ")", "and", "v", ">", "0", "for", "v", "in", "v_list", "]", ")", ":", "raise", "ValueError", "(", "\"elements of remix dictionary values must \"", "\"be positive integers\"", ")", "if", "not", "(", "(", "isinstance", "(", "num_output_channels", ",", "int", ")", "and", "num_output_channels", ">", "0", ")", "or", "num_output_channels", "is", "None", ")", ":", "raise", "ValueError", "(", "\"num_output_channels must be a positive integer or None.\"", ")", "effect_args", "=", "[", "'remix'", "]", "if", "remix_dictionary", "is", "None", ":", "effect_args", ".", "append", "(", "'-'", ")", "else", ":", "if", "num_output_channels", "is", "None", ":", "num_output_channels", "=", "max", "(", "remix_dictionary", ".", "keys", "(", ")", ")", "for", "channel", "in", "range", "(", "1", ",", "num_output_channels", "+", "1", ")", ":", "if", "channel", "in", "remix_dictionary", ".", "keys", "(", ")", ":", "out_channel", "=", "','", ".", "join", "(", "[", "str", "(", "i", ")", "for", "i", "in", "remix_dictionary", "[", "channel", "]", "]", ")", "else", ":", "out_channel", "=", "'0'", "effect_args", ".", "append", "(", "out_channel", ")", "self", ".", "effects", ".", "extend", "(", "effect_args", ")", "self", ".", "effects_log", ".", "append", "(", "'remix'", ")", "return", "self" ]
37.810127
0.000653
[ "def remix(self, remix_dictionary=None, num_output_channels=None):\n", " '''Remix the channels of an audio file.\n", "\n", " Note: volume options are not yet implemented\n", "\n", " Parameters\n", " ----------\n", " remix_dictionary : dict or None\n", " Dictionary mapping output channel to list of input channel(s).\n", " Empty lists indicate the corresponding output channel should be\n", " empty. If None, mixes all channels down to a single mono file.\n", " num_output_channels : int or None\n", " The number of channels in the output file. If None, the number of\n", " output channels is equal to the largest key in remix_dictionary.\n", " If remix_dictionary is None, this variable is ignored.\n", "\n", " Examples\n", " --------\n", " Remix a 4-channel input file. The output file will have\n", " input channel 2 in channel 1, a mixdown of input channels 1 an 3 in\n", " channel 2, an empty channel 3, and a copy of input channel 4 in\n", " channel 4.\n", "\n", " >>> import sox\n", " >>> tfm = sox.Transformer()\n", " >>> remix_dictionary = {1: [2], 2: [1, 3], 4: [4]}\n", " >>> tfm.remix(remix_dictionary)\n", "\n", " '''\n", " if not (isinstance(remix_dictionary, dict) or\n", " remix_dictionary is None):\n", " raise ValueError(\"remix_dictionary must be a dictionary or None.\")\n", "\n", " if remix_dictionary is not None:\n", "\n", " if not all([isinstance(i, int) and i > 0 for i\n", " in remix_dictionary.keys()]):\n", " raise ValueError(\n", " \"remix dictionary must have positive integer keys.\"\n", " )\n", "\n", " if not all([isinstance(v, list) for v\n", " in remix_dictionary.values()]):\n", " raise ValueError(\"remix dictionary values must be lists.\")\n", "\n", " for v_list in remix_dictionary.values():\n", " if not all([isinstance(v, int) and v > 0 for v in v_list]):\n", " raise ValueError(\n", " \"elements of remix dictionary values must \"\n", " \"be positive integers\"\n", " )\n", "\n", " if not ((isinstance(num_output_channels, int) and\n", " num_output_channels > 0) or num_output_channels is None):\n", " raise ValueError(\n", " \"num_output_channels must be a positive integer or None.\"\n", " )\n", "\n", " effect_args = ['remix']\n", " if remix_dictionary is None:\n", " effect_args.append('-')\n", " else:\n", " if num_output_channels is None:\n", " num_output_channels = max(remix_dictionary.keys())\n", "\n", " for channel in range(1, num_output_channels + 1):\n", " if channel in remix_dictionary.keys():\n", " out_channel = ','.join(\n", " [str(i) for i in remix_dictionary[channel]]\n", " )\n", " else:\n", " out_channel = '0'\n", "\n", " effect_args.append(out_channel)\n", "\n", " self.effects.extend(effect_args)\n", " self.effects_log.append('remix')\n", "\n", " return self" ]
[ 0, 0.020833333333333332, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842 ]
79
0.00093
def alphafilter(request, queryset, template): """ Render the template with the filtered queryset """ qs_filter = {} for key in list(request.GET.keys()): if '__istartswith' in key: qs_filter[str(key)] = request.GET[key] break return render_to_response( template, {'objects': queryset.filter(**qs_filter), 'unfiltered_objects': queryset}, context_instance=RequestContext(request) )
[ "def", "alphafilter", "(", "request", ",", "queryset", ",", "template", ")", ":", "qs_filter", "=", "{", "}", "for", "key", "in", "list", "(", "request", ".", "GET", ".", "keys", "(", ")", ")", ":", "if", "'__istartswith'", "in", "key", ":", "qs_filter", "[", "str", "(", "key", ")", "]", "=", "request", ".", "GET", "[", "key", "]", "break", "return", "render_to_response", "(", "template", ",", "{", "'objects'", ":", "queryset", ".", "filter", "(", "*", "*", "qs_filter", ")", ",", "'unfiltered_objects'", ":", "queryset", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
26.941176
0.00211
[ "def alphafilter(request, queryset, template):\n", " \"\"\"\n", " Render the template with the filtered queryset\n", " \"\"\"\n", "\n", " qs_filter = {}\n", " for key in list(request.GET.keys()):\n", " if '__istartswith' in key:\n", " qs_filter[str(key)] = request.GET[key]\n", " break\n", "\n", " return render_to_response(\n", " template,\n", " {'objects': queryset.filter(**qs_filter),\n", " 'unfiltered_objects': queryset},\n", " context_instance=RequestContext(request)\n", " )" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2 ]
17
0.011765
def _dot_product(self, imgs_to_decode): """ Decoding using the dot product. """ return np.dot(imgs_to_decode.T, self.feature_images).T
[ "def", "_dot_product", "(", "self", ",", "imgs_to_decode", ")", ":", "return", "np", ".", "dot", "(", "imgs_to_decode", ".", "T", ",", "self", ".", "feature_images", ")", ".", "T" ]
38.75
0.012658
[ "def _dot_product(self, imgs_to_decode):\n", " \"\"\" Decoding using the dot product.\n", " \"\"\"\n", " return np.dot(imgs_to_decode.T, self.feature_images).T" ]
[ 0, 0.022727272727272728, 0, 0.016129032258064516 ]
4
0.009714
def p_duration_information_speed(self, p): 'duration : information AT speed' logger.debug('duration = information %s at speed %s', p[1], p[3]) p[0] = p[1].at_speed(p[3])
[ "def", "p_duration_information_speed", "(", "self", ",", "p", ")", ":", "logger", ".", "debug", "(", "'duration = information %s at speed %s'", ",", "p", "[", "1", "]", ",", "p", "[", "3", "]", ")", "p", "[", "0", "]", "=", "p", "[", "1", "]", ".", "at_speed", "(", "p", "[", "3", "]", ")" ]
47.5
0.010363
[ "def p_duration_information_speed(self, p):\n", " 'duration : information AT speed'\n", " logger.debug('duration = information %s at speed %s', p[1], p[3])\n", " p[0] = p[1].at_speed(p[3])" ]
[ 0, 0.023809523809523808, 0, 0.029411764705882353 ]
4
0.013305
def show(self, ax:plt.Axes=None, figsize:tuple=(3,3), title:Optional[str]=None, hide_axis:bool=True, cmap:str=None, y:Any=None, **kwargs): "Show image on `ax` with `title`, using `cmap` if single-channel, overlaid with optional `y`" cmap = ifnone(cmap, defaults.cmap) ax = show_image(self, ax=ax, hide_axis=hide_axis, cmap=cmap, figsize=figsize) if y is not None: y.show(ax=ax, **kwargs) if title is not None: ax.set_title(title)
[ "def", "show", "(", "self", ",", "ax", ":", "plt", ".", "Axes", "=", "None", ",", "figsize", ":", "tuple", "=", "(", "3", ",", "3", ")", ",", "title", ":", "Optional", "[", "str", "]", "=", "None", ",", "hide_axis", ":", "bool", "=", "True", ",", "cmap", ":", "str", "=", "None", ",", "y", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ")", ":", "cmap", "=", "ifnone", "(", "cmap", ",", "defaults", ".", "cmap", ")", "ax", "=", "show_image", "(", "self", ",", "ax", "=", "ax", ",", "hide_axis", "=", "hide_axis", ",", "cmap", "=", "cmap", ",", "figsize", "=", "figsize", ")", "if", "y", "is", "not", "None", ":", "y", ".", "show", "(", "ax", "=", "ax", ",", "*", "*", "kwargs", ")", "if", "title", "is", "not", "None", ":", "ax", ".", "set_title", "(", "title", ")" ]
68.142857
0.055901
[ "def show(self, ax:plt.Axes=None, figsize:tuple=(3,3), title:Optional[str]=None, hide_axis:bool=True,\n", " cmap:str=None, y:Any=None, **kwargs):\n", " \"Show image on `ax` with `title`, using `cmap` if single-channel, overlaid with optional `y`\"\n", " cmap = ifnone(cmap, defaults.cmap)\n", " ax = show_image(self, ax=ax, hide_axis=hide_axis, cmap=cmap, figsize=figsize)\n", " if y is not None: y.show(ax=ax, **kwargs)\n", " if title is not None: ax.set_title(title)" ]
[ 0.13861386138613863, 0.1346153846153846, 0.0196078431372549, 0, 0.011627906976744186, 0.02, 0.04081632653061224 ]
7
0.052183
def remove_dependency(id=None, name=None, dependency_id=None, dependency_name=None): """ Remove a BuildConfiguration from the dependency list of another BuildConfiguration """ data = remove_dependency_raw(id, name, dependency_id, dependency_name) if data: return utils.format_json_list(data)
[ "def", "remove_dependency", "(", "id", "=", "None", ",", "name", "=", "None", ",", "dependency_id", "=", "None", ",", "dependency_name", "=", "None", ")", ":", "data", "=", "remove_dependency_raw", "(", "id", ",", "name", ",", "dependency_id", ",", "dependency_name", ")", "if", "data", ":", "return", "utils", ".", "format_json_list", "(", "data", ")" ]
44.714286
0.009404
[ "def remove_dependency(id=None, name=None, dependency_id=None, dependency_name=None):\n", " \"\"\"\n", " Remove a BuildConfiguration from the dependency list of another BuildConfiguration\n", " \"\"\"\n", " data = remove_dependency_raw(id, name, dependency_id, dependency_name)\n", " if data:\n", " return utils.format_json_list(data)" ]
[ 0.011764705882352941, 0, 0.011494252873563218, 0, 0, 0, 0.023255813953488372 ]
7
0.006645
def create_role_from_templates(role_name=None, role_path=None, project_name=None, description=None): """ Create a new role with initial files from templates. :param role_name: Name of the role :param role_path: Full path to the role :param project_name: Name of the project, or the base path name. :param description: One line description of the role. :return: None """ context = locals() templates_path = os.path.join(conductor_dir, 'templates', 'role') timestamp = datetime.now().strftime('%Y%m%d%H%M%s') logger.debug('Role template location', path=templates_path) for rel_path, templates in [(os.path.relpath(path, templates_path), files) for (path, _, files) in os.walk(templates_path)]: target_dir = os.path.join(role_path, rel_path) dir_util.mkpath(target_dir) for template in templates: template_rel_path = os.path.join(rel_path, template) target_name = template.replace('.j2', '') target_path = os.path.join(target_dir, target_name) if os.path.exists(target_path): backup_path = u'%s_%s' % (target_path, timestamp) logger.debug(u'Found existing file. Backing target to backup', target=target_path, backup=backup_path) os.rename(target_path, backup_path) logger.debug("Rendering template for %s/%s" % (target_dir, template)) jinja_render_to_temp(templates_path, template_rel_path, target_dir, target_name, **context) new_file_name = "main_{}.yml".format(datetime.today().strftime('%y%m%d%H%M%S')) new_tasks_file = os.path.join(role_path, 'tasks', new_file_name) tasks_file = os.path.join(role_path, 'tasks', 'main.yml') if os.path.exists(tasks_file): os.rename(tasks_file, new_tasks_file)
[ "def", "create_role_from_templates", "(", "role_name", "=", "None", ",", "role_path", "=", "None", ",", "project_name", "=", "None", ",", "description", "=", "None", ")", ":", "context", "=", "locals", "(", ")", "templates_path", "=", "os", ".", "path", ".", "join", "(", "conductor_dir", ",", "'templates'", ",", "'role'", ")", "timestamp", "=", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y%m%d%H%M%s'", ")", "logger", ".", "debug", "(", "'Role template location'", ",", "path", "=", "templates_path", ")", "for", "rel_path", ",", "templates", "in", "[", "(", "os", ".", "path", ".", "relpath", "(", "path", ",", "templates_path", ")", ",", "files", ")", "for", "(", "path", ",", "_", ",", "files", ")", "in", "os", ".", "walk", "(", "templates_path", ")", "]", ":", "target_dir", "=", "os", ".", "path", ".", "join", "(", "role_path", ",", "rel_path", ")", "dir_util", ".", "mkpath", "(", "target_dir", ")", "for", "template", "in", "templates", ":", "template_rel_path", "=", "os", ".", "path", ".", "join", "(", "rel_path", ",", "template", ")", "target_name", "=", "template", ".", "replace", "(", "'.j2'", ",", "''", ")", "target_path", "=", "os", ".", "path", ".", "join", "(", "target_dir", ",", "target_name", ")", "if", "os", ".", "path", ".", "exists", "(", "target_path", ")", ":", "backup_path", "=", "u'%s_%s'", "%", "(", "target_path", ",", "timestamp", ")", "logger", ".", "debug", "(", "u'Found existing file. Backing target to backup'", ",", "target", "=", "target_path", ",", "backup", "=", "backup_path", ")", "os", ".", "rename", "(", "target_path", ",", "backup_path", ")", "logger", ".", "debug", "(", "\"Rendering template for %s/%s\"", "%", "(", "target_dir", ",", "template", ")", ")", "jinja_render_to_temp", "(", "templates_path", ",", "template_rel_path", ",", "target_dir", ",", "target_name", ",", "*", "*", "context", ")", "new_file_name", "=", "\"main_{}.yml\"", ".", "format", "(", "datetime", ".", "today", "(", ")", ".", "strftime", "(", "'%y%m%d%H%M%S'", ")", ")", "new_tasks_file", "=", "os", ".", "path", ".", "join", "(", "role_path", ",", "'tasks'", ",", "new_file_name", ")", "tasks_file", "=", "os", ".", "path", ".", "join", "(", "role_path", ",", "'tasks'", ",", "'main.yml'", ")", "if", "os", ".", "path", ".", "exists", "(", "tasks_file", ")", ":", "os", ".", "rename", "(", "tasks_file", ",", "new_tasks_file", ")" ]
48.609756
0.002459
[ "def create_role_from_templates(role_name=None, role_path=None,\n", " project_name=None, description=None):\n", " \"\"\"\n", " Create a new role with initial files from templates.\n", " :param role_name: Name of the role\n", " :param role_path: Full path to the role\n", " :param project_name: Name of the project, or the base path name.\n", " :param description: One line description of the role.\n", " :return: None\n", " \"\"\"\n", " context = locals()\n", " templates_path = os.path.join(conductor_dir, 'templates', 'role')\n", " timestamp = datetime.now().strftime('%Y%m%d%H%M%s')\n", "\n", " logger.debug('Role template location', path=templates_path)\n", " for rel_path, templates in [(os.path.relpath(path, templates_path), files)\n", " for (path, _, files) in os.walk(templates_path)]:\n", " target_dir = os.path.join(role_path, rel_path)\n", " dir_util.mkpath(target_dir)\n", " for template in templates:\n", " template_rel_path = os.path.join(rel_path, template)\n", " target_name = template.replace('.j2', '')\n", " target_path = os.path.join(target_dir, target_name)\n", " if os.path.exists(target_path):\n", " backup_path = u'%s_%s' % (target_path, timestamp)\n", " logger.debug(u'Found existing file. Backing target to backup',\n", " target=target_path, backup=backup_path)\n", " os.rename(target_path, backup_path)\n", " logger.debug(\"Rendering template for %s/%s\" % (target_dir, template))\n", " jinja_render_to_temp(templates_path,\n", " template_rel_path,\n", " target_dir,\n", " target_name,\n", " **context)\n", "\n", " new_file_name = \"main_{}.yml\".format(datetime.today().strftime('%y%m%d%H%M%S'))\n", " new_tasks_file = os.path.join(role_path, 'tasks', new_file_name)\n", " tasks_file = os.path.join(role_path, 'tasks', 'main.yml')\n", "\n", " if os.path.exists(tasks_file):\n", " os.rename(tasks_file, new_tasks_file)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016666666666666666, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0.022222222222222223 ]
41
0.001834
def find_file(self, path, tgt_env): ''' Find the specified file in the specified environment ''' tree = self.get_tree(tgt_env) if not tree: # Branch/tag/SHA not found in repo return None, None, None blob = None mode = None depth = 0 while True: depth += 1 if depth > SYMLINK_RECURSE_DEPTH: blob = None break try: entry = tree[path] mode = entry.filemode if stat.S_ISLNK(mode): # Path is a symlink. The blob data corresponding to this # path's object ID will be the target of the symlink. Follow # the symlink and set path to the location indicated # in the blob data. link_tgt = self.repo[entry.oid].data path = salt.utils.path.join( os.path.dirname(path), link_tgt, use_posixpath=True) else: blob = self.repo[entry.oid] if isinstance(blob, pygit2.Tree): # Path is a directory, not a file. blob = None break except KeyError: blob = None break if isinstance(blob, pygit2.Blob): return blob, blob.hex, mode return None, None, None
[ "def", "find_file", "(", "self", ",", "path", ",", "tgt_env", ")", ":", "tree", "=", "self", ".", "get_tree", "(", "tgt_env", ")", "if", "not", "tree", ":", "# Branch/tag/SHA not found in repo", "return", "None", ",", "None", ",", "None", "blob", "=", "None", "mode", "=", "None", "depth", "=", "0", "while", "True", ":", "depth", "+=", "1", "if", "depth", ">", "SYMLINK_RECURSE_DEPTH", ":", "blob", "=", "None", "break", "try", ":", "entry", "=", "tree", "[", "path", "]", "mode", "=", "entry", ".", "filemode", "if", "stat", ".", "S_ISLNK", "(", "mode", ")", ":", "# Path is a symlink. The blob data corresponding to this", "# path's object ID will be the target of the symlink. Follow", "# the symlink and set path to the location indicated", "# in the blob data.", "link_tgt", "=", "self", ".", "repo", "[", "entry", ".", "oid", "]", ".", "data", "path", "=", "salt", ".", "utils", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ",", "link_tgt", ",", "use_posixpath", "=", "True", ")", "else", ":", "blob", "=", "self", ".", "repo", "[", "entry", ".", "oid", "]", "if", "isinstance", "(", "blob", ",", "pygit2", ".", "Tree", ")", ":", "# Path is a directory, not a file.", "blob", "=", "None", "break", "except", "KeyError", ":", "blob", "=", "None", "break", "if", "isinstance", "(", "blob", ",", "pygit2", ".", "Blob", ")", ":", "return", "blob", ",", "blob", ".", "hex", ",", "mode", "return", "None", ",", "None", ",", "None" ]
36.974359
0.002027
[ "def find_file(self, path, tgt_env):\n", " '''\n", " Find the specified file in the specified environment\n", " '''\n", " tree = self.get_tree(tgt_env)\n", " if not tree:\n", " # Branch/tag/SHA not found in repo\n", " return None, None, None\n", " blob = None\n", " mode = None\n", " depth = 0\n", " while True:\n", " depth += 1\n", " if depth > SYMLINK_RECURSE_DEPTH:\n", " blob = None\n", " break\n", " try:\n", " entry = tree[path]\n", " mode = entry.filemode\n", " if stat.S_ISLNK(mode):\n", " # Path is a symlink. The blob data corresponding to this\n", " # path's object ID will be the target of the symlink. Follow\n", " # the symlink and set path to the location indicated\n", " # in the blob data.\n", " link_tgt = self.repo[entry.oid].data\n", " path = salt.utils.path.join(\n", " os.path.dirname(path), link_tgt, use_posixpath=True)\n", " else:\n", " blob = self.repo[entry.oid]\n", " if isinstance(blob, pygit2.Tree):\n", " # Path is a directory, not a file.\n", " blob = None\n", " break\n", " except KeyError:\n", " blob = None\n", " break\n", " if isinstance(blob, pygit2.Blob):\n", " return blob, blob.hex, mode\n", " return None, None, None" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903 ]
39
0.00328
def state(name, path=None): ''' Returns the state of a container. path path to the container parent directory (default: /var/lib/lxc) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' lxc.state name ''' # Don't use _ensure_exists() here, it will mess with _change_state() cachekey = 'lxc.state.{0}{1}'.format(name, path) try: return __context__[cachekey] except KeyError: if not exists(name, path=path): __context__[cachekey] = None else: cmd = 'lxc-info' if path: cmd += ' -P {0}'.format(pipes.quote(path)) cmd += ' -n {0}'.format(name) ret = __salt__['cmd.run_all'](cmd, python_shell=False) if ret['retcode'] != 0: _clear_context() raise CommandExecutionError( 'Unable to get state of container \'{0}\''.format(name) ) c_infos = ret['stdout'].splitlines() c_state = None for c_info in c_infos: stat = c_info.split(':') if stat[0].lower() == 'state': c_state = stat[1].strip().lower() break __context__[cachekey] = c_state return __context__[cachekey]
[ "def", "state", "(", "name", ",", "path", "=", "None", ")", ":", "# Don't use _ensure_exists() here, it will mess with _change_state()", "cachekey", "=", "'lxc.state.{0}{1}'", ".", "format", "(", "name", ",", "path", ")", "try", ":", "return", "__context__", "[", "cachekey", "]", "except", "KeyError", ":", "if", "not", "exists", "(", "name", ",", "path", "=", "path", ")", ":", "__context__", "[", "cachekey", "]", "=", "None", "else", ":", "cmd", "=", "'lxc-info'", "if", "path", ":", "cmd", "+=", "' -P {0}'", ".", "format", "(", "pipes", ".", "quote", "(", "path", ")", ")", "cmd", "+=", "' -n {0}'", ".", "format", "(", "name", ")", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "ret", "[", "'retcode'", "]", "!=", "0", ":", "_clear_context", "(", ")", "raise", "CommandExecutionError", "(", "'Unable to get state of container \\'{0}\\''", ".", "format", "(", "name", ")", ")", "c_infos", "=", "ret", "[", "'stdout'", "]", ".", "splitlines", "(", ")", "c_state", "=", "None", "for", "c_info", "in", "c_infos", ":", "stat", "=", "c_info", ".", "split", "(", "':'", ")", "if", "stat", "[", "0", "]", ".", "lower", "(", ")", "==", "'state'", ":", "c_state", "=", "stat", "[", "1", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "break", "__context__", "[", "cachekey", "]", "=", "c_state", "return", "__context__", "[", "cachekey", "]" ]
30.325581
0.000743
[ "def state(name, path=None):\n", " '''\n", " Returns the state of a container.\n", "\n", " path\n", " path to the container parent directory (default: /var/lib/lxc)\n", "\n", " .. versionadded:: 2015.8.0\n", "\n", " CLI Example:\n", "\n", " .. code-block:: bash\n", "\n", " salt '*' lxc.state name\n", " '''\n", " # Don't use _ensure_exists() here, it will mess with _change_state()\n", "\n", " cachekey = 'lxc.state.{0}{1}'.format(name, path)\n", " try:\n", " return __context__[cachekey]\n", " except KeyError:\n", " if not exists(name, path=path):\n", " __context__[cachekey] = None\n", " else:\n", " cmd = 'lxc-info'\n", " if path:\n", " cmd += ' -P {0}'.format(pipes.quote(path))\n", " cmd += ' -n {0}'.format(name)\n", " ret = __salt__['cmd.run_all'](cmd, python_shell=False)\n", " if ret['retcode'] != 0:\n", " _clear_context()\n", " raise CommandExecutionError(\n", " 'Unable to get state of container \\'{0}\\''.format(name)\n", " )\n", " c_infos = ret['stdout'].splitlines()\n", " c_state = None\n", " for c_info in c_infos:\n", " stat = c_info.split(':')\n", " if stat[0].lower() == 'state':\n", " c_state = stat[1].strip().lower()\n", " break\n", " __context__[cachekey] = c_state\n", " return __context__[cachekey]" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125 ]
43
0.000727
def func_interpolate_na(interpolator, x, y, **kwargs): '''helper function to apply interpolation along 1 dimension''' # it would be nice if this wasn't necessary, works around: # "ValueError: assignment destination is read-only" in assignment below out = y.copy() nans = pd.isnull(y) nonans = ~nans # fast track for no-nans and all-nans cases n_nans = nans.sum() if n_nans == 0 or n_nans == len(y): return y f = interpolator(x[nonans], y[nonans], **kwargs) out[nans] = f(x[nans]) return out
[ "def", "func_interpolate_na", "(", "interpolator", ",", "x", ",", "y", ",", "*", "*", "kwargs", ")", ":", "# it would be nice if this wasn't necessary, works around:", "# \"ValueError: assignment destination is read-only\" in assignment below", "out", "=", "y", ".", "copy", "(", ")", "nans", "=", "pd", ".", "isnull", "(", "y", ")", "nonans", "=", "~", "nans", "# fast track for no-nans and all-nans cases", "n_nans", "=", "nans", ".", "sum", "(", ")", "if", "n_nans", "==", "0", "or", "n_nans", "==", "len", "(", "y", ")", ":", "return", "y", "f", "=", "interpolator", "(", "x", "[", "nonans", "]", ",", "y", "[", "nonans", "]", ",", "*", "*", "kwargs", ")", "out", "[", "nans", "]", "=", "f", "(", "x", "[", "nans", "]", ")", "return", "out" ]
31.352941
0.001821
[ "def func_interpolate_na(interpolator, x, y, **kwargs):\n", " '''helper function to apply interpolation along 1 dimension'''\n", " # it would be nice if this wasn't necessary, works around:\n", " # \"ValueError: assignment destination is read-only\" in assignment below\n", " out = y.copy()\n", "\n", " nans = pd.isnull(y)\n", " nonans = ~nans\n", "\n", " # fast track for no-nans and all-nans cases\n", " n_nans = nans.sum()\n", " if n_nans == 0 or n_nans == len(y):\n", " return y\n", "\n", " f = interpolator(x[nonans], y[nonans], **kwargs)\n", " out[nans] = f(x[nans])\n", " return out" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142 ]
17
0.004202
def logparse(*args, **kwargs): """ Parse access log on the terminal application. If list of files are given, parse each file. Otherwise, parse standard input. :param args: supporting functions after processed raw log line :type: list of callables :rtype: tuple of (statistics, key/value report) """ from clitool.cli import clistream from clitool.processor import SimpleDictReporter lst = [parse] + args reporter = SimpleDictReporter() stats = clistream(reporter, *lst, **kwargs) return stats, reporter.report()
[ "def", "logparse", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "clitool", ".", "cli", "import", "clistream", "from", "clitool", ".", "processor", "import", "SimpleDictReporter", "lst", "=", "[", "parse", "]", "+", "args", "reporter", "=", "SimpleDictReporter", "(", ")", "stats", "=", "clistream", "(", "reporter", ",", "*", "lst", ",", "*", "*", "kwargs", ")", "return", "stats", ",", "reporter", ".", "report", "(", ")" ]
34.3125
0.001773
[ "def logparse(*args, **kwargs):\n", " \"\"\" Parse access log on the terminal application.\n", " If list of files are given, parse each file. Otherwise, parse standard\n", " input.\n", "\n", " :param args: supporting functions after processed raw log line\n", " :type: list of callables\n", " :rtype: tuple of (statistics, key/value report)\n", " \"\"\"\n", " from clitool.cli import clistream\n", " from clitool.processor import SimpleDictReporter\n", "\n", " lst = [parse] + args\n", " reporter = SimpleDictReporter()\n", " stats = clistream(reporter, *lst, **kwargs)\n", " return stats, reporter.report()" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857 ]
16
0.001786
def call_historic(self, result_callback=None, kwargs=None, proc=None): """Call the hook with given ``kwargs`` for all registered plugins and for all plugins which will be registered afterwards. If ``result_callback`` is not ``None`` it will be called for for each non-None result obtained from a hook implementation. .. note:: The ``proc`` argument is now deprecated. """ if proc is not None: warnings.warn( "Support for `proc` argument is now deprecated and will be" "removed in an upcoming release.", DeprecationWarning, ) result_callback = proc self._call_history.append((kwargs or {}, result_callback)) # historizing hooks don't return results res = self._hookexec(self, self.get_hookimpls(), kwargs) if result_callback is None: return # XXX: remember firstresult isn't compat with historic for x in res or []: result_callback(x)
[ "def", "call_historic", "(", "self", ",", "result_callback", "=", "None", ",", "kwargs", "=", "None", ",", "proc", "=", "None", ")", ":", "if", "proc", "is", "not", "None", ":", "warnings", ".", "warn", "(", "\"Support for `proc` argument is now deprecated and will be\"", "\"removed in an upcoming release.\"", ",", "DeprecationWarning", ",", ")", "result_callback", "=", "proc", "self", ".", "_call_history", ".", "append", "(", "(", "kwargs", "or", "{", "}", ",", "result_callback", ")", ")", "# historizing hooks don't return results", "res", "=", "self", ".", "_hookexec", "(", "self", ",", "self", ".", "get_hookimpls", "(", ")", ",", "kwargs", ")", "if", "result_callback", "is", "None", ":", "return", "# XXX: remember firstresult isn't compat with historic", "for", "x", "in", "res", "or", "[", "]", ":", "result_callback", "(", "x", ")" ]
39.807692
0.001887
[ "def call_historic(self, result_callback=None, kwargs=None, proc=None):\n", " \"\"\"Call the hook with given ``kwargs`` for all registered plugins and\n", " for all plugins which will be registered afterwards.\n", "\n", " If ``result_callback`` is not ``None`` it will be called for for each\n", " non-None result obtained from a hook implementation.\n", "\n", " .. note::\n", " The ``proc`` argument is now deprecated.\n", " \"\"\"\n", " if proc is not None:\n", " warnings.warn(\n", " \"Support for `proc` argument is now deprecated and will be\"\n", " \"removed in an upcoming release.\",\n", " DeprecationWarning,\n", " )\n", " result_callback = proc\n", "\n", " self._call_history.append((kwargs or {}, result_callback))\n", " # historizing hooks don't return results\n", " res = self._hookexec(self, self.get_hookimpls(), kwargs)\n", " if result_callback is None:\n", " return\n", " # XXX: remember firstresult isn't compat with historic\n", " for x in res or []:\n", " result_callback(x)" ]
[ 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333 ]
26
0.001775
def delete(self, key): """ Remove a key from the cache. """ if key in self.cache: self.cache.pop(key, None)
[ "def", "delete", "(", "self", ",", "key", ")", ":", "if", "key", "in", "self", ".", "cache", ":", "self", ".", "cache", ".", "pop", "(", "key", ",", "None", ")" ]
24.333333
0.013245
[ "def delete(self, key):\n", " \"\"\"\n", " Remove a key from the cache.\n", " \"\"\"\n", " if key in self.cache:\n", " self.cache.pop(key, None)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0.02702702702702703 ]
6
0.018393
def guest_reboot(self, userid): """Reboot a guest vm.""" LOG.info("Begin to reboot vm %s", userid) self._smtclient.guest_reboot(userid) LOG.info("Complete reboot vm %s", userid)
[ "def", "guest_reboot", "(", "self", ",", "userid", ")", ":", "LOG", ".", "info", "(", "\"Begin to reboot vm %s\"", ",", "userid", ")", "self", ".", "_smtclient", ".", "guest_reboot", "(", "userid", ")", "LOG", ".", "info", "(", "\"Complete reboot vm %s\"", ",", "userid", ")" ]
41
0.009569
[ "def guest_reboot(self, userid):\n", " \"\"\"Reboot a guest vm.\"\"\"\n", " LOG.info(\"Begin to reboot vm %s\", userid)\n", " self._smtclient.guest_reboot(userid)\n", " LOG.info(\"Complete reboot vm %s\", userid)" ]
[ 0, 0.030303030303030304, 0, 0, 0.02040816326530612 ]
5
0.010142
def partition(iterable, chunk_size, pad_none=False): """adapted from Toolz. Breaks an iterable into n iterables up to the certain chunk size, padding with Nones if availble. Example: >>> from searchtweets.utils import partition >>> iter_ = range(10) >>> list(partition(iter_, 3)) [(0, 1, 2), (3, 4, 5), (6, 7, 8)] >>> list(partition(iter_, 3, pad_none=True)) [(0, 1, 2), (3, 4, 5), (6, 7, 8), (9, None, None)] """ args = [iter(iterable)] * chunk_size if not pad_none: return zip(*args) else: return it.zip_longest(*args)
[ "def", "partition", "(", "iterable", ",", "chunk_size", ",", "pad_none", "=", "False", ")", ":", "args", "=", "[", "iter", "(", "iterable", ")", "]", "*", "chunk_size", "if", "not", "pad_none", ":", "return", "zip", "(", "*", "args", ")", "else", ":", "return", "it", ".", "zip_longest", "(", "*", "args", ")" ]
35.117647
0.001631
[ "def partition(iterable, chunk_size, pad_none=False):\n", " \"\"\"adapted from Toolz. Breaks an iterable into n iterables up to the\n", " certain chunk size, padding with Nones if availble.\n", "\n", " Example:\n", " >>> from searchtweets.utils import partition\n", " >>> iter_ = range(10)\n", " >>> list(partition(iter_, 3))\n", " [(0, 1, 2), (3, 4, 5), (6, 7, 8)]\n", " >>> list(partition(iter_, 3, pad_none=True))\n", " [(0, 1, 2), (3, 4, 5), (6, 7, 8), (9, None, None)]\n", " \"\"\"\n", " args = [iter(iterable)] * chunk_size\n", " if not pad_none:\n", " return zip(*args)\n", " else:\n", " return it.zip_longest(*args)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776 ]
17
0.001634
def interp3d_core(x,y,t,Z,xout,yout,tout,**kwargs): """ INTERP3D : Interpolate values from a 3D matrix along a 1D trajectory @param x: 1st dimension vector of size NX @param y: 2nd dimension vector of size NY @param t: 3rd dimension vector of size NT @author: Renaud DUSSURGET, LER/PAC, Ifremer La Seyne """ #this below can take a very LOOOOOONG time gx = np.reshape(np.repeat(x,y.size*t.size),(x.size,y.size,t.size)) gy = np.reshape(np.repeat(y,x.size*t.size),(y.size,x.size,t.size)).transpose((1,0,2)) gt = np.reshape(np.repeat(t,x.size*y.size),(t.size,x.size,y.size)).transpose((1,2,0)) gz = Z.flatten() points = zip(*(gx.flatten(),gy.flatten(),gt.flatten())) xi = zip(*(xout,yout,tout)) Zout = scipy.interpolate.griddata(points, gz, xi, **kwargs) return Zout
[ "def", "interp3d_core", "(", "x", ",", "y", ",", "t", ",", "Z", ",", "xout", ",", "yout", ",", "tout", ",", "*", "*", "kwargs", ")", ":", "#this below can take a very LOOOOOONG time\r", "gx", "=", "np", ".", "reshape", "(", "np", ".", "repeat", "(", "x", ",", "y", ".", "size", "*", "t", ".", "size", ")", ",", "(", "x", ".", "size", ",", "y", ".", "size", ",", "t", ".", "size", ")", ")", "gy", "=", "np", ".", "reshape", "(", "np", ".", "repeat", "(", "y", ",", "x", ".", "size", "*", "t", ".", "size", ")", ",", "(", "y", ".", "size", ",", "x", ".", "size", ",", "t", ".", "size", ")", ")", ".", "transpose", "(", "(", "1", ",", "0", ",", "2", ")", ")", "gt", "=", "np", ".", "reshape", "(", "np", ".", "repeat", "(", "t", ",", "x", ".", "size", "*", "y", ".", "size", ")", ",", "(", "t", ".", "size", ",", "x", ".", "size", ",", "y", ".", "size", ")", ")", ".", "transpose", "(", "(", "1", ",", "2", ",", "0", ")", ")", "gz", "=", "Z", ".", "flatten", "(", ")", "points", "=", "zip", "(", "*", "(", "gx", ".", "flatten", "(", ")", ",", "gy", ".", "flatten", "(", ")", ",", "gt", ".", "flatten", "(", ")", ")", ")", "xi", "=", "zip", "(", "*", "(", "xout", ",", "yout", ",", "tout", ")", ")", "Zout", "=", "scipy", ".", "interpolate", ".", "griddata", "(", "points", ",", "gz", ",", "xi", ",", "*", "*", "kwargs", ")", "return", "Zout" ]
37.304348
0.043182
[ "def interp3d_core(x,y,t,Z,xout,yout,tout,**kwargs):\r\n", " \"\"\" \r\n", " INTERP3D : Interpolate values from a 3D matrix along a 1D trajectory\r\n", " \r\n", " @param x: 1st dimension vector of size NX\r\n", " @param y: 2nd dimension vector of size NY\r\n", " @param t: 3rd dimension vector of size NT\r\n", " \r\n", " @author: Renaud DUSSURGET, LER/PAC, Ifremer La Seyne\r\n", " \"\"\"\r\n", " \r\n", " #this below can take a very LOOOOOONG time\r\n", " gx = np.reshape(np.repeat(x,y.size*t.size),(x.size,y.size,t.size))\r\n", " gy = np.reshape(np.repeat(y,x.size*t.size),(y.size,x.size,t.size)).transpose((1,0,2))\r\n", " gt = np.reshape(np.repeat(t,x.size*y.size),(t.size,x.size,y.size)).transpose((1,2,0))\r\n", "\r\n", " gz = Z.flatten()\r\n", " \r\n", " points = zip(*(gx.flatten(),gy.flatten(),gt.flatten())) \r\n", " xi = zip(*(xout,yout,tout))\r\n", " \r\n", " Zout = scipy.interpolate.griddata(points, gz, xi, **kwargs)\r\n", " return Zout" ]
[ 0.1320754716981132, 0.07692307692307693, 0, 0.16666666666666666, 0, 0, 0, 0.16666666666666666, 0, 0, 0.16666666666666666, 0.020833333333333332, 0.05555555555555555, 0.07692307692307693, 0.07692307692307693, 0, 0, 0.14285714285714285, 0.04838709677419355, 0.06060606060606061, 0.16666666666666666, 0, 0.06666666666666667 ]
23
0.061931
def Cache(fn): """ Function cache decorator """ def fnCache(*args, **kwargs): """ Cache function """ key = (args and tuple(args) or None, kwargs and frozenset(kwargs.items()) or None) if key not in fn.__cached__: fn.__cached__[key] = cache = fn(*args, **kwargs) else: cache = fn.__cached__[key] return cache def ResetCache(): """ Reset cache """ fn.__cached__ = {} setattr(fn, "__cached__", {}) setattr(fn, "__resetcache__", ResetCache) fnCache.__name__ = fn.__name__ fnCache.__doc__ = fn.__doc__ fnCache.__dict__.update(fn.__dict__) return fnCache
[ "def", "Cache", "(", "fn", ")", ":", "def", "fnCache", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\" Cache function \"\"\"", "key", "=", "(", "args", "and", "tuple", "(", "args", ")", "or", "None", ",", "kwargs", "and", "frozenset", "(", "kwargs", ".", "items", "(", ")", ")", "or", "None", ")", "if", "key", "not", "in", "fn", ".", "__cached__", ":", "fn", ".", "__cached__", "[", "key", "]", "=", "cache", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "cache", "=", "fn", ".", "__cached__", "[", "key", "]", "return", "cache", "def", "ResetCache", "(", ")", ":", "\"\"\" Reset cache \"\"\"", "fn", ".", "__cached__", "=", "{", "}", "setattr", "(", "fn", ",", "\"__cached__\"", ",", "{", "}", ")", "setattr", "(", "fn", ",", "\"__resetcache__\"", ",", "ResetCache", ")", "fnCache", ".", "__name__", "=", "fn", ".", "__name__", "fnCache", ".", "__doc__", "=", "fn", ".", "__doc__", "fnCache", ".", "__dict__", ".", "update", "(", "fn", ".", "__dict__", ")", "return", "fnCache" ]
28.590909
0.029231
[ "def Cache(fn):\n", " \"\"\" Function cache decorator \"\"\"\n", " def fnCache(*args, **kwargs):\n", " \"\"\" Cache function \"\"\"\n", " key = (args and tuple(args) or None,\n", " kwargs and frozenset(kwargs.items()) or None)\n", " if key not in fn.__cached__:\n", " fn.__cached__[key] = cache = fn(*args, **kwargs)\n", " else:\n", " cache = fn.__cached__[key]\n", " return cache\n", "\n", " def ResetCache():\n", " \"\"\" Reset cache \"\"\"\n", " fn.__cached__ = {}\n", "\n", " setattr(fn, \"__cached__\", {})\n", " setattr(fn, \"__resetcache__\", ResetCache)\n", " fnCache.__name__ = fn.__name__\n", " fnCache.__doc__ = fn.__doc__\n", " fnCache.__dict__.update(fn.__dict__)\n", " return fnCache" ]
[ 0, 0.027777777777777776, 0.030303030303030304, 0.034482758620689655, 0.023255813953488372, 0, 0.02857142857142857, 0.017241379310344827, 0.08333333333333333, 0.027777777777777776, 0.05263157894736842, 0, 0.047619047619047616, 0.038461538461538464, 0.04, 0, 0.030303030303030304, 0.022222222222222223, 0.029411764705882353, 0.03125, 0.025, 0.11764705882352941 ]
22
0.03215
def parse_int_list(string): """ Parses a string of numbers and ranges into a list of integers. Ranges are separated by dashes and inclusive of both the start and end number. Example: parse_int_list("8 9 10,11-13") == [8,9,10,11,12,13] """ integers = [] for comma_part in string.split(","): for substring in comma_part.split(" "): if len(substring) == 0: continue if "-" in substring: left, right = substring.split("-") left_val = int(left.strip()) right_val = int(right.strip()) integers.extend(range(left_val, right_val + 1)) else: integers.append(int(substring.strip())) return integers
[ "def", "parse_int_list", "(", "string", ")", ":", "integers", "=", "[", "]", "for", "comma_part", "in", "string", ".", "split", "(", "\",\"", ")", ":", "for", "substring", "in", "comma_part", ".", "split", "(", "\" \"", ")", ":", "if", "len", "(", "substring", ")", "==", "0", ":", "continue", "if", "\"-\"", "in", "substring", ":", "left", ",", "right", "=", "substring", ".", "split", "(", "\"-\"", ")", "left_val", "=", "int", "(", "left", ".", "strip", "(", ")", ")", "right_val", "=", "int", "(", "right", ".", "strip", "(", ")", ")", "integers", ".", "extend", "(", "range", "(", "left_val", ",", "right_val", "+", "1", ")", ")", "else", ":", "integers", ".", "append", "(", "int", "(", "substring", ".", "strip", "(", ")", ")", ")", "return", "integers" ]
35.666667
0.0013
[ "def parse_int_list(string):\n", " \"\"\"\n", " Parses a string of numbers and ranges into a list of integers. Ranges\n", " are separated by dashes and inclusive of both the start and end number.\n", "\n", " Example:\n", " parse_int_list(\"8 9 10,11-13\") == [8,9,10,11,12,13]\n", " \"\"\"\n", " integers = []\n", " for comma_part in string.split(\",\"):\n", " for substring in comma_part.split(\" \"):\n", " if len(substring) == 0:\n", " continue\n", " if \"-\" in substring:\n", " left, right = substring.split(\"-\")\n", " left_val = int(left.strip())\n", " right_val = int(right.strip())\n", " integers.extend(range(left_val, right_val + 1))\n", " else:\n", " integers.append(int(substring.strip()))\n", " return integers" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842 ]
21
0.002506
def hailstone(n): """Return the 'hailstone sequence' from n to 1 n: The starting point of the hailstone sequence """ sequence = [n] while n > 1: if n%2 != 0: n = 3*n + 1 else: n = int(n/2) sequence.append(n) return sequence
[ "def", "hailstone", "(", "n", ")", ":", "sequence", "=", "[", "n", "]", "while", "n", ">", "1", ":", "if", "n", "%", "2", "!=", "0", ":", "n", "=", "3", "*", "n", "+", "1", "else", ":", "n", "=", "int", "(", "n", "/", "2", ")", "sequence", ".", "append", "(", "n", ")", "return", "sequence" ]
19.384615
0.034091
[ "def hailstone(n):\n", " \"\"\"Return the 'hailstone sequence' from n to 1\n", " n: The starting point of the hailstone sequence\n", " \"\"\"\n", "\n", " sequence = [n]\n", " while n > 1:\n", " if n%2 != 0:\n", " n = 3*n + 1\n", " else: \n", " n = int(n/2)\n", " sequence.append(n)\n", " return sequence" ]
[ 0, 0.02040816326530612, 0, 0, 0, 0.058823529411764705, 0.06666666666666667, 0.058823529411764705, 0.05555555555555555, 0.09090909090909091, 0.05263157894736842, 0, 0.11764705882352941 ]
13
0.040113
def create_role_config_group(self, name, display_name, role_type): """ Create a role config group. @param name: The name of the new group. @param display_name: The display name of the new group. @param role_type: The role type of the new group. @return: New ApiRoleConfigGroup object. @since: API v3 """ return role_config_groups.create_role_config_group( self._get_resource_root(), self.name, name, display_name, role_type, self._get_cluster_name())
[ "def", "create_role_config_group", "(", "self", ",", "name", ",", "display_name", ",", "role_type", ")", ":", "return", "role_config_groups", ".", "create_role_config_group", "(", "self", ".", "_get_resource_root", "(", ")", ",", "self", ".", "name", ",", "name", ",", "display_name", ",", "role_type", ",", "self", ".", "_get_cluster_name", "(", ")", ")" ]
37.769231
0.001988
[ "def create_role_config_group(self, name, display_name, role_type):\n", " \"\"\"\n", " Create a role config group.\n", "\n", " @param name: The name of the new group.\n", " @param display_name: The display name of the new group.\n", " @param role_type: The role type of the new group.\n", " @return: New ApiRoleConfigGroup object.\n", " @since: API v3\n", " \"\"\"\n", " return role_config_groups.create_role_config_group(\n", " self._get_resource_root(), self.name, name, display_name, role_type,\n", " self._get_cluster_name())" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.030303030303030304 ]
13
0.002331
def initialTrendSmoothingFactors(self, timeSeries): """ Calculate the initial Trend smoothing Factor b0. Explanation: http://en.wikipedia.org/wiki/Exponential_smoothing#Triple_exponential_smoothing :return: Returns the initial trend smoothing factor b0 """ result = 0.0 seasonLength = self.get_parameter("seasonLength") k = min(len(timeSeries) - seasonLength, seasonLength) #In case of only one full season, use average trend of the months that we have twice for i in xrange(0, k): result += (timeSeries[seasonLength + i][1] - timeSeries[i][1]) / seasonLength return result / k
[ "def", "initialTrendSmoothingFactors", "(", "self", ",", "timeSeries", ")", ":", "result", "=", "0.0", "seasonLength", "=", "self", ".", "get_parameter", "(", "\"seasonLength\"", ")", "k", "=", "min", "(", "len", "(", "timeSeries", ")", "-", "seasonLength", ",", "seasonLength", ")", "#In case of only one full season, use average trend of the months that we have twice", "for", "i", "in", "xrange", "(", "0", ",", "k", ")", ":", "result", "+=", "(", "timeSeries", "[", "seasonLength", "+", "i", "]", "[", "1", "]", "-", "timeSeries", "[", "i", "]", "[", "1", "]", ")", "/", "seasonLength", "return", "result", "/", "k" ]
44.266667
0.00885
[ "def initialTrendSmoothingFactors(self, timeSeries):\n", " \"\"\" Calculate the initial Trend smoothing Factor b0.\n", "\n", " Explanation:\n", " http://en.wikipedia.org/wiki/Exponential_smoothing#Triple_exponential_smoothing\n", "\n", " :return: Returns the initial trend smoothing factor b0\n", " \"\"\"\n", "\n", " result = 0.0\n", " seasonLength = self.get_parameter(\"seasonLength\")\n", " k = min(len(timeSeries) - seasonLength, seasonLength) #In case of only one full season, use average trend of the months that we have twice\n", " for i in xrange(0, k):\n", " result += (timeSeries[seasonLength + i][1] - timeSeries[i][1]) / seasonLength\n", " return result / k" ]
[ 0, 0.01639344262295082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612, 0, 0.011111111111111112, 0.04 ]
15
0.005861
def FixedUnPooling(x, shape, unpool_mat=None, data_format='channels_last'): """ Unpool the input with a fixed matrix to perform kronecker product with. Args: x (tf.Tensor): a 4D image tensor shape: int or (h, w) tuple unpool_mat: a tf.Tensor or np.ndarray 2D matrix with size=shape. If is None, will use a matrix with 1 at top-left corner. Returns: tf.Tensor: a 4D image tensor. """ data_format = get_data_format(data_format, keras_mode=False) shape = shape2d(shape) output_shape = StaticDynamicShape(x) output_shape.apply(1 if data_format == 'NHWC' else 2, lambda x: x * shape[0]) output_shape.apply(2 if data_format == 'NHWC' else 3, lambda x: x * shape[1]) # a faster implementation for this special case if shape[0] == 2 and shape[1] == 2 and unpool_mat is None and data_format == 'NHWC': ret = UnPooling2x2ZeroFilled(x) else: # check unpool_mat if unpool_mat is None: mat = np.zeros(shape, dtype='float32') mat[0][0] = 1 unpool_mat = tf.constant(mat, name='unpool_mat') elif isinstance(unpool_mat, np.ndarray): unpool_mat = tf.constant(unpool_mat, name='unpool_mat') assert unpool_mat.shape.as_list() == list(shape) if data_format == 'NHWC': x = tf.transpose(x, [0, 3, 1, 2]) # perform a tensor-matrix kronecker product x = tf.expand_dims(x, -1) # bchwx1 mat = tf.expand_dims(unpool_mat, 0) # 1xshxsw ret = tf.tensordot(x, mat, axes=1) # bxcxhxwxshxsw if data_format == 'NHWC': ret = tf.transpose(ret, [0, 2, 4, 3, 5, 1]) else: ret = tf.transpose(ret, [0, 1, 2, 4, 3, 5]) shape3_dyn = [output_shape.get_dynamic(k) for k in range(1, 4)] ret = tf.reshape(ret, tf.stack([-1] + shape3_dyn)) ret.set_shape(tf.TensorShape(output_shape.get_static())) return ret
[ "def", "FixedUnPooling", "(", "x", ",", "shape", ",", "unpool_mat", "=", "None", ",", "data_format", "=", "'channels_last'", ")", ":", "data_format", "=", "get_data_format", "(", "data_format", ",", "keras_mode", "=", "False", ")", "shape", "=", "shape2d", "(", "shape", ")", "output_shape", "=", "StaticDynamicShape", "(", "x", ")", "output_shape", ".", "apply", "(", "1", "if", "data_format", "==", "'NHWC'", "else", "2", ",", "lambda", "x", ":", "x", "*", "shape", "[", "0", "]", ")", "output_shape", ".", "apply", "(", "2", "if", "data_format", "==", "'NHWC'", "else", "3", ",", "lambda", "x", ":", "x", "*", "shape", "[", "1", "]", ")", "# a faster implementation for this special case", "if", "shape", "[", "0", "]", "==", "2", "and", "shape", "[", "1", "]", "==", "2", "and", "unpool_mat", "is", "None", "and", "data_format", "==", "'NHWC'", ":", "ret", "=", "UnPooling2x2ZeroFilled", "(", "x", ")", "else", ":", "# check unpool_mat", "if", "unpool_mat", "is", "None", ":", "mat", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", "=", "'float32'", ")", "mat", "[", "0", "]", "[", "0", "]", "=", "1", "unpool_mat", "=", "tf", ".", "constant", "(", "mat", ",", "name", "=", "'unpool_mat'", ")", "elif", "isinstance", "(", "unpool_mat", ",", "np", ".", "ndarray", ")", ":", "unpool_mat", "=", "tf", ".", "constant", "(", "unpool_mat", ",", "name", "=", "'unpool_mat'", ")", "assert", "unpool_mat", ".", "shape", ".", "as_list", "(", ")", "==", "list", "(", "shape", ")", "if", "data_format", "==", "'NHWC'", ":", "x", "=", "tf", ".", "transpose", "(", "x", ",", "[", "0", ",", "3", ",", "1", ",", "2", "]", ")", "# perform a tensor-matrix kronecker product", "x", "=", "tf", ".", "expand_dims", "(", "x", ",", "-", "1", ")", "# bchwx1", "mat", "=", "tf", ".", "expand_dims", "(", "unpool_mat", ",", "0", ")", "# 1xshxsw", "ret", "=", "tf", ".", "tensordot", "(", "x", ",", "mat", ",", "axes", "=", "1", ")", "# bxcxhxwxshxsw", "if", "data_format", "==", "'NHWC'", ":", "ret", "=", "tf", ".", "transpose", "(", "ret", ",", "[", "0", ",", "2", ",", "4", ",", "3", ",", "5", ",", "1", "]", ")", "else", ":", "ret", "=", "tf", ".", "transpose", "(", "ret", ",", "[", "0", ",", "1", ",", "2", ",", "4", ",", "3", ",", "5", "]", ")", "shape3_dyn", "=", "[", "output_shape", ".", "get_dynamic", "(", "k", ")", "for", "k", "in", "range", "(", "1", ",", "4", ")", "]", "ret", "=", "tf", ".", "reshape", "(", "ret", ",", "tf", ".", "stack", "(", "[", "-", "1", "]", "+", "shape3_dyn", ")", ")", "ret", ".", "set_shape", "(", "tf", ".", "TensorShape", "(", "output_shape", ".", "get_static", "(", ")", ")", ")", "return", "ret" ]
38.52
0.002025
[ "def FixedUnPooling(x, shape, unpool_mat=None, data_format='channels_last'):\n", " \"\"\"\n", " Unpool the input with a fixed matrix to perform kronecker product with.\n", "\n", " Args:\n", " x (tf.Tensor): a 4D image tensor\n", " shape: int or (h, w) tuple\n", " unpool_mat: a tf.Tensor or np.ndarray 2D matrix with size=shape.\n", " If is None, will use a matrix with 1 at top-left corner.\n", "\n", " Returns:\n", " tf.Tensor: a 4D image tensor.\n", " \"\"\"\n", " data_format = get_data_format(data_format, keras_mode=False)\n", " shape = shape2d(shape)\n", "\n", " output_shape = StaticDynamicShape(x)\n", " output_shape.apply(1 if data_format == 'NHWC' else 2, lambda x: x * shape[0])\n", " output_shape.apply(2 if data_format == 'NHWC' else 3, lambda x: x * shape[1])\n", "\n", " # a faster implementation for this special case\n", " if shape[0] == 2 and shape[1] == 2 and unpool_mat is None and data_format == 'NHWC':\n", " ret = UnPooling2x2ZeroFilled(x)\n", " else:\n", " # check unpool_mat\n", " if unpool_mat is None:\n", " mat = np.zeros(shape, dtype='float32')\n", " mat[0][0] = 1\n", " unpool_mat = tf.constant(mat, name='unpool_mat')\n", " elif isinstance(unpool_mat, np.ndarray):\n", " unpool_mat = tf.constant(unpool_mat, name='unpool_mat')\n", " assert unpool_mat.shape.as_list() == list(shape)\n", "\n", " if data_format == 'NHWC':\n", " x = tf.transpose(x, [0, 3, 1, 2])\n", " # perform a tensor-matrix kronecker product\n", " x = tf.expand_dims(x, -1) # bchwx1\n", " mat = tf.expand_dims(unpool_mat, 0) # 1xshxsw\n", " ret = tf.tensordot(x, mat, axes=1) # bxcxhxwxshxsw\n", "\n", " if data_format == 'NHWC':\n", " ret = tf.transpose(ret, [0, 2, 4, 3, 5, 1])\n", " else:\n", " ret = tf.transpose(ret, [0, 1, 2, 4, 3, 5])\n", "\n", " shape3_dyn = [output_shape.get_dynamic(k) for k in range(1, 4)]\n", " ret = tf.reshape(ret, tf.stack([-1] + shape3_dyn))\n", "\n", " ret.set_shape(tf.TensorShape(output_shape.get_static()))\n", " return ret" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0.012195121951219513, 0, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142 ]
50
0.002141
def run(name, onlyif=None, unless=None, creates=None, cwd=None, root=None, runas=None, shell=None, env=None, prepend_path=None, stateful=False, umask=None, output_loglevel='debug', hide_output=False, timeout=None, ignore_timeout=False, use_vt=False, success_retcodes=None, success_stdout=None, success_stderr=None, **kwargs): ''' Run a command if certain circumstances are met. Use ``cmd.wait`` if you want to use the ``watch`` requisite. name The command to execute, remember that the command will execute with the path and permissions of the salt-minion. onlyif A command to run as a check, run the named command only if the command passed to the ``onlyif`` option returns a zero exit status unless A command to run as a check, only run the named command if the command passed to the ``unless`` option returns a non-zero exit status cwd The current working directory to execute the command in, defaults to /root root Path to the root of the jail to use. If this parameter is set, the command will run inside a chroot runas The user name to run the command as shell The shell to use for execution, defaults to the shell grain env A list of environment variables to be set prior to execution. Example: .. code-block:: yaml script-foo: cmd.run: - env: - BATCH: 'yes' .. warning:: The above illustrates a common PyYAML pitfall, that **yes**, **no**, **on**, **off**, **true**, and **false** are all loaded as boolean ``True`` and ``False`` values, and must be enclosed in quotes to be used as strings. More info on this (and other) PyYAML idiosyncrasies can be found :ref:`here <yaml-idiosyncrasies>`. Variables as values are not evaluated. So $PATH in the following example is a literal '$PATH': .. code-block:: yaml script-bar: cmd.run: - env: "PATH=/some/path:$PATH" One can still use the existing $PATH by using a bit of Jinja: .. code-block:: jinja {% set current_path = salt['environ.get']('PATH', '/bin:/usr/bin') %} mycommand: cmd.run: - name: ls -l / - env: - PATH: {{ [current_path, '/my/special/bin']|join(':') }} prepend_path $PATH segment to prepend (trailing ':' not necessary) to $PATH. This is an easier alternative to the Jinja workaround. .. versionadded:: 2018.3.0 stateful The command being executed is expected to return data about executing a state. For more information, see the :ref:`stateful-argument` section. umask The umask (in octal) to use when running the command. output_loglevel : debug Control the loglevel at which the output from the command is logged to the minion log. .. note:: The command being run will still be logged at the ``debug`` loglevel regardless, unless ``quiet`` is used for this value. hide_output : False Suppress stdout and stderr in the state's results. .. note:: This is separate from ``output_loglevel``, which only handles how Salt logs to the minion log. .. versionadded:: 2018.3.0 quiet This option no longer has any functionality and will be removed, please set ``output_loglevel`` to ``quiet`` to suppress logging of the command. .. deprecated:: 2014.1.0 timeout If the command has not terminated after timeout seconds, send the subprocess sigterm, and if sigterm is ignored, follow up with sigkill ignore_timeout Ignore the timeout of commands, which is useful for running nohup processes. .. versionadded:: 2015.8.0 creates Only run if the file specified by ``creates`` do not exist. If you specify a list of files then this state will only run if **any** of the files does not exist. .. versionadded:: 2014.7.0 use_vt : False Use VT utils (saltstack) to stream the command output more interactively to the console and the logs. This is experimental. bg : False If ``True``, run command in background and do not await or deliver its results. .. versionadded:: 2016.3.6 success_retcodes: This parameter will be allow a list of non-zero return codes that should be considered a success. If the return code returned from the run matches any in the provided list, the return code will be overridden with zero. .. versionadded:: 2019.2.0 success_stdout: This parameter will be allow a list of strings that when found in standard out should be considered a success. If stdout returned from the run matches any in the provided list, the return code will be overridden with zero. .. versionadded:: Neon success_stderr: This parameter will be allow a list of strings that when found in standard error should be considered a success. If stderr returned from the run matches any in the provided list, the return code will be overridden with zero. .. versionadded:: Neon .. note:: cmd.run supports the usage of ``reload_modules``. This functionality allows you to force Salt to reload all modules. You should only use ``reload_modules`` if your cmd.run does some sort of installation (such as ``pip``), if you do not reload the modules future items in your state which rely on the software being installed will fail. .. code-block:: yaml getpip: cmd.run: - name: /usr/bin/python /usr/local/sbin/get-pip.py - unless: which pip - require: - pkg: python - file: /usr/local/sbin/get-pip.py - reload_modules: True ''' ### NOTE: The keyword arguments in **kwargs are passed directly to the ### ``cmd.run_all`` function and cannot be removed from the function ### definition, otherwise the use of unsupported arguments in a ### ``cmd.run`` state will result in a traceback. ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if 'quiet' in kwargs: quiet = kwargs.pop('quiet') msg = ( 'The \'quiet\' argument for cmd.run has been deprecated since ' '2014.1.0 and will be removed as of the Neon release. Please set ' '\'output_loglevel\' to \'quiet\' instead.' ) salt.utils.versions.warn_until('Neon', msg) ret.setdefault('warnings', []).append(msg) else: quiet = False test_name = None if not isinstance(stateful, list): stateful = stateful is True elif isinstance(stateful, list) and 'test_name' in stateful[0]: test_name = stateful[0]['test_name'] if __opts__['test'] and test_name: name = test_name # Need the check for None here, if env is not provided then it falls back # to None and it is assumed that the environment is not being overridden. if env is not None and not isinstance(env, (list, dict)): ret['comment'] = ('Invalidly-formatted \'env\' parameter. See ' 'documentation.') return ret cmd_kwargs = copy.deepcopy(kwargs) cmd_kwargs.update({'cwd': cwd, 'root': root, 'runas': runas, 'use_vt': use_vt, 'shell': shell or __grains__['shell'], 'env': env, 'prepend_path': prepend_path, 'umask': umask, 'output_loglevel': output_loglevel, 'hide_output': hide_output, 'quiet': quiet, 'success_retcodes': success_retcodes, 'success_stdout': success_stdout, 'success_stderr': success_stderr}) cret = mod_run_check(cmd_kwargs, onlyif, unless, creates) if isinstance(cret, dict): ret.update(cret) return ret if __opts__['test'] and not test_name: ret['result'] = None ret['comment'] = 'Command "{0}" would have been executed'.format(name) return _reinterpreted_state(ret) if stateful else ret if cwd and not os.path.isdir(cwd): ret['comment'] = ( 'Desired working directory "{0}" ' 'is not available' ).format(cwd) return ret # Wow, we passed the test, run this sucker! try: run_cmd = 'cmd.run_all' if not root else 'cmd.run_chroot' cmd_all = __salt__[run_cmd]( cmd=name, timeout=timeout, python_shell=True, **cmd_kwargs ) except Exception as err: ret['comment'] = six.text_type(err) return ret ret['changes'] = cmd_all ret['result'] = not bool(cmd_all['retcode']) ret['comment'] = 'Command "{0}" run'.format(name) # Ignore timeout errors if asked (for nohups) and treat cmd as a success if ignore_timeout: trigger = 'Timed out after' if ret['changes'].get('retcode') == 1 and trigger in ret['changes'].get('stdout'): ret['changes']['retcode'] = 0 ret['result'] = True if stateful: ret = _reinterpreted_state(ret) if __opts__['test'] and cmd_all['retcode'] == 0 and ret['changes']: ret['result'] = None return ret
[ "def", "run", "(", "name", ",", "onlyif", "=", "None", ",", "unless", "=", "None", ",", "creates", "=", "None", ",", "cwd", "=", "None", ",", "root", "=", "None", ",", "runas", "=", "None", ",", "shell", "=", "None", ",", "env", "=", "None", ",", "prepend_path", "=", "None", ",", "stateful", "=", "False", ",", "umask", "=", "None", ",", "output_loglevel", "=", "'debug'", ",", "hide_output", "=", "False", ",", "timeout", "=", "None", ",", "ignore_timeout", "=", "False", ",", "use_vt", "=", "False", ",", "success_retcodes", "=", "None", ",", "success_stdout", "=", "None", ",", "success_stderr", "=", "None", ",", "*", "*", "kwargs", ")", ":", "### NOTE: The keyword arguments in **kwargs are passed directly to the", "### ``cmd.run_all`` function and cannot be removed from the function", "### definition, otherwise the use of unsupported arguments in a", "### ``cmd.run`` state will result in a traceback.", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "if", "'quiet'", "in", "kwargs", ":", "quiet", "=", "kwargs", ".", "pop", "(", "'quiet'", ")", "msg", "=", "(", "'The \\'quiet\\' argument for cmd.run has been deprecated since '", "'2014.1.0 and will be removed as of the Neon release. Please set '", "'\\'output_loglevel\\' to \\'quiet\\' instead.'", ")", "salt", ".", "utils", ".", "versions", ".", "warn_until", "(", "'Neon'", ",", "msg", ")", "ret", ".", "setdefault", "(", "'warnings'", ",", "[", "]", ")", ".", "append", "(", "msg", ")", "else", ":", "quiet", "=", "False", "test_name", "=", "None", "if", "not", "isinstance", "(", "stateful", ",", "list", ")", ":", "stateful", "=", "stateful", "is", "True", "elif", "isinstance", "(", "stateful", ",", "list", ")", "and", "'test_name'", "in", "stateful", "[", "0", "]", ":", "test_name", "=", "stateful", "[", "0", "]", "[", "'test_name'", "]", "if", "__opts__", "[", "'test'", "]", "and", "test_name", ":", "name", "=", "test_name", "# Need the check for None here, if env is not provided then it falls back", "# to None and it is assumed that the environment is not being overridden.", "if", "env", "is", "not", "None", "and", "not", "isinstance", "(", "env", ",", "(", "list", ",", "dict", ")", ")", ":", "ret", "[", "'comment'", "]", "=", "(", "'Invalidly-formatted \\'env\\' parameter. See '", "'documentation.'", ")", "return", "ret", "cmd_kwargs", "=", "copy", ".", "deepcopy", "(", "kwargs", ")", "cmd_kwargs", ".", "update", "(", "{", "'cwd'", ":", "cwd", ",", "'root'", ":", "root", ",", "'runas'", ":", "runas", ",", "'use_vt'", ":", "use_vt", ",", "'shell'", ":", "shell", "or", "__grains__", "[", "'shell'", "]", ",", "'env'", ":", "env", ",", "'prepend_path'", ":", "prepend_path", ",", "'umask'", ":", "umask", ",", "'output_loglevel'", ":", "output_loglevel", ",", "'hide_output'", ":", "hide_output", ",", "'quiet'", ":", "quiet", ",", "'success_retcodes'", ":", "success_retcodes", ",", "'success_stdout'", ":", "success_stdout", ",", "'success_stderr'", ":", "success_stderr", "}", ")", "cret", "=", "mod_run_check", "(", "cmd_kwargs", ",", "onlyif", ",", "unless", ",", "creates", ")", "if", "isinstance", "(", "cret", ",", "dict", ")", ":", "ret", ".", "update", "(", "cret", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", "and", "not", "test_name", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Command \"{0}\" would have been executed'", ".", "format", "(", "name", ")", "return", "_reinterpreted_state", "(", "ret", ")", "if", "stateful", "else", "ret", "if", "cwd", "and", "not", "os", ".", "path", ".", "isdir", "(", "cwd", ")", ":", "ret", "[", "'comment'", "]", "=", "(", "'Desired working directory \"{0}\" '", "'is not available'", ")", ".", "format", "(", "cwd", ")", "return", "ret", "# Wow, we passed the test, run this sucker!", "try", ":", "run_cmd", "=", "'cmd.run_all'", "if", "not", "root", "else", "'cmd.run_chroot'", "cmd_all", "=", "__salt__", "[", "run_cmd", "]", "(", "cmd", "=", "name", ",", "timeout", "=", "timeout", ",", "python_shell", "=", "True", ",", "*", "*", "cmd_kwargs", ")", "except", "Exception", "as", "err", ":", "ret", "[", "'comment'", "]", "=", "six", ".", "text_type", "(", "err", ")", "return", "ret", "ret", "[", "'changes'", "]", "=", "cmd_all", "ret", "[", "'result'", "]", "=", "not", "bool", "(", "cmd_all", "[", "'retcode'", "]", ")", "ret", "[", "'comment'", "]", "=", "'Command \"{0}\" run'", ".", "format", "(", "name", ")", "# Ignore timeout errors if asked (for nohups) and treat cmd as a success", "if", "ignore_timeout", ":", "trigger", "=", "'Timed out after'", "if", "ret", "[", "'changes'", "]", ".", "get", "(", "'retcode'", ")", "==", "1", "and", "trigger", "in", "ret", "[", "'changes'", "]", ".", "get", "(", "'stdout'", ")", ":", "ret", "[", "'changes'", "]", "[", "'retcode'", "]", "=", "0", "ret", "[", "'result'", "]", "=", "True", "if", "stateful", ":", "ret", "=", "_reinterpreted_state", "(", "ret", ")", "if", "__opts__", "[", "'test'", "]", "and", "cmd_all", "[", "'retcode'", "]", "==", "0", "and", "ret", "[", "'changes'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "return", "ret" ]
33.150171
0.001
[ "def run(name,\n", " onlyif=None,\n", " unless=None,\n", " creates=None,\n", " cwd=None,\n", " root=None,\n", " runas=None,\n", " shell=None,\n", " env=None,\n", " prepend_path=None,\n", " stateful=False,\n", " umask=None,\n", " output_loglevel='debug',\n", " hide_output=False,\n", " timeout=None,\n", " ignore_timeout=False,\n", " use_vt=False,\n", " success_retcodes=None,\n", " success_stdout=None,\n", " success_stderr=None,\n", " **kwargs):\n", " '''\n", " Run a command if certain circumstances are met. Use ``cmd.wait`` if you\n", " want to use the ``watch`` requisite.\n", "\n", " name\n", " The command to execute, remember that the command will execute with the\n", " path and permissions of the salt-minion.\n", "\n", " onlyif\n", " A command to run as a check, run the named command only if the command\n", " passed to the ``onlyif`` option returns a zero exit status\n", "\n", " unless\n", " A command to run as a check, only run the named command if the command\n", " passed to the ``unless`` option returns a non-zero exit status\n", "\n", " cwd\n", " The current working directory to execute the command in, defaults to\n", " /root\n", "\n", " root\n", " Path to the root of the jail to use. If this parameter is set, the command\n", " will run inside a chroot\n", "\n", " runas\n", " The user name to run the command as\n", "\n", " shell\n", " The shell to use for execution, defaults to the shell grain\n", "\n", " env\n", " A list of environment variables to be set prior to execution.\n", " Example:\n", "\n", " .. code-block:: yaml\n", "\n", " script-foo:\n", " cmd.run:\n", " - env:\n", " - BATCH: 'yes'\n", "\n", " .. warning::\n", "\n", " The above illustrates a common PyYAML pitfall, that **yes**,\n", " **no**, **on**, **off**, **true**, and **false** are all loaded as\n", " boolean ``True`` and ``False`` values, and must be enclosed in\n", " quotes to be used as strings. More info on this (and other) PyYAML\n", " idiosyncrasies can be found :ref:`here <yaml-idiosyncrasies>`.\n", "\n", " Variables as values are not evaluated. So $PATH in the following\n", " example is a literal '$PATH':\n", "\n", " .. code-block:: yaml\n", "\n", " script-bar:\n", " cmd.run:\n", " - env: \"PATH=/some/path:$PATH\"\n", "\n", " One can still use the existing $PATH by using a bit of Jinja:\n", "\n", " .. code-block:: jinja\n", "\n", " {% set current_path = salt['environ.get']('PATH', '/bin:/usr/bin') %}\n", "\n", " mycommand:\n", " cmd.run:\n", " - name: ls -l /\n", " - env:\n", " - PATH: {{ [current_path, '/my/special/bin']|join(':') }}\n", "\n", " prepend_path\n", " $PATH segment to prepend (trailing ':' not necessary) to $PATH. This is\n", " an easier alternative to the Jinja workaround.\n", "\n", " .. versionadded:: 2018.3.0\n", "\n", " stateful\n", " The command being executed is expected to return data about executing\n", " a state. For more information, see the :ref:`stateful-argument` section.\n", "\n", " umask\n", " The umask (in octal) to use when running the command.\n", "\n", " output_loglevel : debug\n", " Control the loglevel at which the output from the command is logged to\n", " the minion log.\n", "\n", " .. note::\n", " The command being run will still be logged at the ``debug``\n", " loglevel regardless, unless ``quiet`` is used for this value.\n", "\n", " hide_output : False\n", " Suppress stdout and stderr in the state's results.\n", "\n", " .. note::\n", " This is separate from ``output_loglevel``, which only handles how\n", " Salt logs to the minion log.\n", "\n", " .. versionadded:: 2018.3.0\n", "\n", " quiet\n", " This option no longer has any functionality and will be removed, please\n", " set ``output_loglevel`` to ``quiet`` to suppress logging of the\n", " command.\n", "\n", " .. deprecated:: 2014.1.0\n", "\n", " timeout\n", " If the command has not terminated after timeout seconds, send the\n", " subprocess sigterm, and if sigterm is ignored, follow up with sigkill\n", "\n", " ignore_timeout\n", " Ignore the timeout of commands, which is useful for running nohup\n", " processes.\n", "\n", " .. versionadded:: 2015.8.0\n", "\n", " creates\n", " Only run if the file specified by ``creates`` do not exist. If you\n", " specify a list of files then this state will only run if **any** of\n", " the files does not exist.\n", "\n", " .. versionadded:: 2014.7.0\n", "\n", " use_vt : False\n", " Use VT utils (saltstack) to stream the command output more\n", " interactively to the console and the logs.\n", " This is experimental.\n", "\n", " bg : False\n", " If ``True``, run command in background and do not await or deliver its\n", " results.\n", "\n", " .. versionadded:: 2016.3.6\n", "\n", " success_retcodes: This parameter will be allow a list of\n", " non-zero return codes that should be considered a success. If the\n", " return code returned from the run matches any in the provided list,\n", " the return code will be overridden with zero.\n", "\n", " .. versionadded:: 2019.2.0\n", "\n", " success_stdout: This parameter will be allow a list of\n", " strings that when found in standard out should be considered a success.\n", " If stdout returned from the run matches any in the provided list,\n", " the return code will be overridden with zero.\n", "\n", " .. versionadded:: Neon\n", "\n", " success_stderr: This parameter will be allow a list of\n", " strings that when found in standard error should be considered a success.\n", " If stderr returned from the run matches any in the provided list,\n", " the return code will be overridden with zero.\n", "\n", " .. versionadded:: Neon\n", "\n", " .. note::\n", "\n", " cmd.run supports the usage of ``reload_modules``. This functionality\n", " allows you to force Salt to reload all modules. You should only use\n", " ``reload_modules`` if your cmd.run does some sort of installation\n", " (such as ``pip``), if you do not reload the modules future items in\n", " your state which rely on the software being installed will fail.\n", "\n", " .. code-block:: yaml\n", "\n", " getpip:\n", " cmd.run:\n", " - name: /usr/bin/python /usr/local/sbin/get-pip.py\n", " - unless: which pip\n", " - require:\n", " - pkg: python\n", " - file: /usr/local/sbin/get-pip.py\n", " - reload_modules: True\n", "\n", " '''\n", " ### NOTE: The keyword arguments in **kwargs are passed directly to the\n", " ### ``cmd.run_all`` function and cannot be removed from the function\n", " ### definition, otherwise the use of unsupported arguments in a\n", " ### ``cmd.run`` state will result in a traceback.\n", "\n", " ret = {'name': name,\n", " 'changes': {},\n", " 'result': False,\n", " 'comment': ''}\n", "\n", " if 'quiet' in kwargs:\n", " quiet = kwargs.pop('quiet')\n", " msg = (\n", " 'The \\'quiet\\' argument for cmd.run has been deprecated since '\n", " '2014.1.0 and will be removed as of the Neon release. Please set '\n", " '\\'output_loglevel\\' to \\'quiet\\' instead.'\n", " )\n", " salt.utils.versions.warn_until('Neon', msg)\n", " ret.setdefault('warnings', []).append(msg)\n", " else:\n", " quiet = False\n", "\n", " test_name = None\n", " if not isinstance(stateful, list):\n", " stateful = stateful is True\n", " elif isinstance(stateful, list) and 'test_name' in stateful[0]:\n", " test_name = stateful[0]['test_name']\n", " if __opts__['test'] and test_name:\n", " name = test_name\n", "\n", " # Need the check for None here, if env is not provided then it falls back\n", " # to None and it is assumed that the environment is not being overridden.\n", " if env is not None and not isinstance(env, (list, dict)):\n", " ret['comment'] = ('Invalidly-formatted \\'env\\' parameter. See '\n", " 'documentation.')\n", " return ret\n", "\n", " cmd_kwargs = copy.deepcopy(kwargs)\n", " cmd_kwargs.update({'cwd': cwd,\n", " 'root': root,\n", " 'runas': runas,\n", " 'use_vt': use_vt,\n", " 'shell': shell or __grains__['shell'],\n", " 'env': env,\n", " 'prepend_path': prepend_path,\n", " 'umask': umask,\n", " 'output_loglevel': output_loglevel,\n", " 'hide_output': hide_output,\n", " 'quiet': quiet,\n", " 'success_retcodes': success_retcodes,\n", " 'success_stdout': success_stdout,\n", " 'success_stderr': success_stderr})\n", "\n", " cret = mod_run_check(cmd_kwargs, onlyif, unless, creates)\n", " if isinstance(cret, dict):\n", " ret.update(cret)\n", " return ret\n", "\n", " if __opts__['test'] and not test_name:\n", " ret['result'] = None\n", " ret['comment'] = 'Command \"{0}\" would have been executed'.format(name)\n", " return _reinterpreted_state(ret) if stateful else ret\n", "\n", " if cwd and not os.path.isdir(cwd):\n", " ret['comment'] = (\n", " 'Desired working directory \"{0}\" '\n", " 'is not available'\n", " ).format(cwd)\n", " return ret\n", "\n", " # Wow, we passed the test, run this sucker!\n", " try:\n", " run_cmd = 'cmd.run_all' if not root else 'cmd.run_chroot'\n", " cmd_all = __salt__[run_cmd](\n", " cmd=name, timeout=timeout, python_shell=True, **cmd_kwargs\n", " )\n", " except Exception as err:\n", " ret['comment'] = six.text_type(err)\n", " return ret\n", "\n", " ret['changes'] = cmd_all\n", " ret['result'] = not bool(cmd_all['retcode'])\n", " ret['comment'] = 'Command \"{0}\" run'.format(name)\n", "\n", " # Ignore timeout errors if asked (for nohups) and treat cmd as a success\n", " if ignore_timeout:\n", " trigger = 'Timed out after'\n", " if ret['changes'].get('retcode') == 1 and trigger in ret['changes'].get('stdout'):\n", " ret['changes']['retcode'] = 0\n", " ret['result'] = True\n", "\n", " if stateful:\n", " ret = _reinterpreted_state(ret)\n", " if __opts__['test'] and cmd_all['retcode'] == 0 and ret['changes']:\n", " ret['result'] = None\n", " return ret" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013333333333333334, 0.012658227848101266, 0.013513513513513514, 0.016666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142 ]
293
0.000639
def _getPattern(self, ipattern, done=None): """Parses sort pattern. :ipattern: A pattern to parse. :done: If :ipattern: refers to done|undone, use this to indicate proper state. :returns: A pattern suitable for Model.modify. """ if ipattern is None: return None if ipattern is True: if done is not None: return ([(None, None, done)], {}) # REMEMBER: This False is for sort reverse! return ([(0, False)], {}) def _getReverse(pm): return pm == '-' def _getIndex(k): try: return int(k) except ValueError: raise InvalidPatternError(k, "Invalid level number") def _getDone(p): v = p.split('=') if len(v) == 2: try: return (Model.indexes[v[0]], v[1], done) except KeyError: raise InvalidPatternError(v[0], 'Invalid field name') return (None, v[0], done) ipattern1 = list() ipattern2 = dict() for s in ipattern.split(','): if done is not None: v = done else: v = _getReverse(s[-1]) k = s.split(':') if len(k) == 1: if done is not None: ipattern1.append(_getDone(k[0])) continue ko = k[0][:-1] try: if len(k[0]) == 1: k = 0 else: k = Model.indexes[ko] except KeyError: k = _getIndex(k[0][:-1]) else: ipattern1.append((k, v)) continue v = (0, v) elif len(k) == 2: try: if done is not None: v = _getDone(k[1]) else: v = (Model.indexes[k[1][:-1]], v) k = _getIndex(k[0]) except KeyError: raise InvalidPatternError(k[1][:-1], 'Invalid field name') else: raise InvalidPatternError(s, 'Unrecognized token in') ipattern2.setdefault(k, []).append(v) return (ipattern1, ipattern2)
[ "def", "_getPattern", "(", "self", ",", "ipattern", ",", "done", "=", "None", ")", ":", "if", "ipattern", "is", "None", ":", "return", "None", "if", "ipattern", "is", "True", ":", "if", "done", "is", "not", "None", ":", "return", "(", "[", "(", "None", ",", "None", ",", "done", ")", "]", ",", "{", "}", ")", "# REMEMBER: This False is for sort reverse!", "return", "(", "[", "(", "0", ",", "False", ")", "]", ",", "{", "}", ")", "def", "_getReverse", "(", "pm", ")", ":", "return", "pm", "==", "'-'", "def", "_getIndex", "(", "k", ")", ":", "try", ":", "return", "int", "(", "k", ")", "except", "ValueError", ":", "raise", "InvalidPatternError", "(", "k", ",", "\"Invalid level number\"", ")", "def", "_getDone", "(", "p", ")", ":", "v", "=", "p", ".", "split", "(", "'='", ")", "if", "len", "(", "v", ")", "==", "2", ":", "try", ":", "return", "(", "Model", ".", "indexes", "[", "v", "[", "0", "]", "]", ",", "v", "[", "1", "]", ",", "done", ")", "except", "KeyError", ":", "raise", "InvalidPatternError", "(", "v", "[", "0", "]", ",", "'Invalid field name'", ")", "return", "(", "None", ",", "v", "[", "0", "]", ",", "done", ")", "ipattern1", "=", "list", "(", ")", "ipattern2", "=", "dict", "(", ")", "for", "s", "in", "ipattern", ".", "split", "(", "','", ")", ":", "if", "done", "is", "not", "None", ":", "v", "=", "done", "else", ":", "v", "=", "_getReverse", "(", "s", "[", "-", "1", "]", ")", "k", "=", "s", ".", "split", "(", "':'", ")", "if", "len", "(", "k", ")", "==", "1", ":", "if", "done", "is", "not", "None", ":", "ipattern1", ".", "append", "(", "_getDone", "(", "k", "[", "0", "]", ")", ")", "continue", "ko", "=", "k", "[", "0", "]", "[", ":", "-", "1", "]", "try", ":", "if", "len", "(", "k", "[", "0", "]", ")", "==", "1", ":", "k", "=", "0", "else", ":", "k", "=", "Model", ".", "indexes", "[", "ko", "]", "except", "KeyError", ":", "k", "=", "_getIndex", "(", "k", "[", "0", "]", "[", ":", "-", "1", "]", ")", "else", ":", "ipattern1", ".", "append", "(", "(", "k", ",", "v", ")", ")", "continue", "v", "=", "(", "0", ",", "v", ")", "elif", "len", "(", "k", ")", "==", "2", ":", "try", ":", "if", "done", "is", "not", "None", ":", "v", "=", "_getDone", "(", "k", "[", "1", "]", ")", "else", ":", "v", "=", "(", "Model", ".", "indexes", "[", "k", "[", "1", "]", "[", ":", "-", "1", "]", "]", ",", "v", ")", "k", "=", "_getIndex", "(", "k", "[", "0", "]", ")", "except", "KeyError", ":", "raise", "InvalidPatternError", "(", "k", "[", "1", "]", "[", ":", "-", "1", "]", ",", "'Invalid field name'", ")", "else", ":", "raise", "InvalidPatternError", "(", "s", ",", "'Unrecognized token in'", ")", "ipattern2", ".", "setdefault", "(", "k", ",", "[", "]", ")", ".", "append", "(", "v", ")", "return", "(", "ipattern1", ",", "ipattern2", ")" ]
32.887324
0.000832
[ "def _getPattern(self, ipattern, done=None):\n", " \"\"\"Parses sort pattern.\n", "\n", " :ipattern: A pattern to parse.\n", " :done: If :ipattern: refers to done|undone,\n", " use this to indicate proper state.\n", " :returns: A pattern suitable for Model.modify.\n", "\n", " \"\"\"\n", " if ipattern is None:\n", " return None\n", " if ipattern is True:\n", " if done is not None:\n", " return ([(None, None, done)], {})\n", " # REMEMBER: This False is for sort reverse!\n", " return ([(0, False)], {})\n", "\n", " def _getReverse(pm):\n", " return pm == '-'\n", "\n", " def _getIndex(k):\n", " try:\n", " return int(k)\n", " except ValueError:\n", " raise InvalidPatternError(k, \"Invalid level number\")\n", "\n", " def _getDone(p):\n", " v = p.split('=')\n", " if len(v) == 2:\n", " try:\n", " return (Model.indexes[v[0]], v[1], done)\n", " except KeyError:\n", " raise InvalidPatternError(v[0], 'Invalid field name')\n", " return (None, v[0], done)\n", " ipattern1 = list()\n", " ipattern2 = dict()\n", " for s in ipattern.split(','):\n", " if done is not None:\n", " v = done\n", " else:\n", " v = _getReverse(s[-1])\n", " k = s.split(':')\n", " if len(k) == 1:\n", " if done is not None:\n", " ipattern1.append(_getDone(k[0]))\n", " continue\n", " ko = k[0][:-1]\n", " try:\n", " if len(k[0]) == 1:\n", " k = 0\n", " else:\n", " k = Model.indexes[ko]\n", " except KeyError:\n", " k = _getIndex(k[0][:-1])\n", " else:\n", " ipattern1.append((k, v))\n", " continue\n", " v = (0, v)\n", " elif len(k) == 2:\n", " try:\n", " if done is not None:\n", " v = _getDone(k[1])\n", " else:\n", " v = (Model.indexes[k[1][:-1]], v)\n", " k = _getIndex(k[0])\n", " except KeyError:\n", " raise InvalidPatternError(k[1][:-1], 'Invalid field name')\n", " else:\n", " raise InvalidPatternError(s, 'Unrecognized token in')\n", " ipattern2.setdefault(k, []).append(v)\n", " return (ipattern1, ipattern2)" ]
[ 0, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02702702702702703 ]
71
0.000821
def update_agent_db_refs(self, agent, agent_text, do_rename=True): """Update db_refs of agent using the grounding map If the grounding map is missing one of the HGNC symbol or Uniprot ID, attempts to reconstruct one from the other. Parameters ---------- agent : :py:class:`indra.statements.Agent` The agent whose db_refs will be updated agent_text : str The agent_text to find a grounding for in the grounding map dictionary. Typically this will be agent.db_refs['TEXT'] but there may be situations where a different value should be used. do_rename: Optional[bool] If True, the Agent name is updated based on the mapped grounding. If do_rename is True the priority for setting the name is FamPlex ID, HGNC symbol, then the gene name from Uniprot. Default: True Raises ------ ValueError If the the grounding map contains and HGNC symbol for agent_text but no HGNC ID can be found for it. ValueError If the grounding map contains both an HGNC symbol and a Uniprot ID, but the HGNC symbol and the gene name associated with the gene in Uniprot do not match or if there is no associated gene name in Uniprot. """ map_db_refs = deepcopy(self.gm.get(agent_text)) self.standardize_agent_db_refs(agent, map_db_refs, do_rename)
[ "def", "update_agent_db_refs", "(", "self", ",", "agent", ",", "agent_text", ",", "do_rename", "=", "True", ")", ":", "map_db_refs", "=", "deepcopy", "(", "self", ".", "gm", ".", "get", "(", "agent_text", ")", ")", "self", ".", "standardize_agent_db_refs", "(", "agent", ",", "map_db_refs", ",", "do_rename", ")" ]
44.727273
0.001326
[ "def update_agent_db_refs(self, agent, agent_text, do_rename=True):\n", " \"\"\"Update db_refs of agent using the grounding map\n", "\n", " If the grounding map is missing one of the HGNC symbol or Uniprot ID,\n", " attempts to reconstruct one from the other.\n", "\n", " Parameters\n", " ----------\n", " agent : :py:class:`indra.statements.Agent`\n", " The agent whose db_refs will be updated\n", " agent_text : str\n", " The agent_text to find a grounding for in the grounding map\n", " dictionary. Typically this will be agent.db_refs['TEXT'] but\n", " there may be situations where a different value should be used.\n", " do_rename: Optional[bool]\n", " If True, the Agent name is updated based on the mapped grounding.\n", " If do_rename is True the priority for setting the name is\n", " FamPlex ID, HGNC symbol, then the gene name\n", " from Uniprot. Default: True\n", "\n", " Raises\n", " ------\n", " ValueError\n", " If the the grounding map contains and HGNC symbol for\n", " agent_text but no HGNC ID can be found for it.\n", " ValueError\n", " If the grounding map contains both an HGNC symbol and a\n", " Uniprot ID, but the HGNC symbol and the gene name associated with\n", " the gene in Uniprot do not match or if there is no associated gene\n", " name in Uniprot.\n", " \"\"\"\n", " map_db_refs = deepcopy(self.gm.get(agent_text))\n", " self.standardize_agent_db_refs(agent, map_db_refs, do_rename)" ]
[ 0, 0.01694915254237288, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014492753623188406 ]
33
0.000953
def _filter_xpath_grouping(xpath): """ This method removes the outer parentheses for xpath grouping. The xpath converter will break otherwise. Example: "(//button[@type='submit'])[1]" becomes "//button[@type='submit'][1]" """ # First remove the first open parentheses xpath = xpath[1:] # Next remove the last closed parentheses index = xpath.rfind(')') if index == -1: raise XpathException("Invalid or unsupported Xpath: %s" % xpath) xpath = xpath[:index] + xpath[index + 1:] return xpath
[ "def", "_filter_xpath_grouping", "(", "xpath", ")", ":", "# First remove the first open parentheses", "xpath", "=", "xpath", "[", "1", ":", "]", "# Next remove the last closed parentheses", "index", "=", "xpath", ".", "rfind", "(", "')'", ")", "if", "index", "==", "-", "1", ":", "raise", "XpathException", "(", "\"Invalid or unsupported Xpath: %s\"", "%", "xpath", ")", "xpath", "=", "xpath", "[", ":", "index", "]", "+", "xpath", "[", "index", "+", "1", ":", "]", "return", "xpath" ]
31.411765
0.001818
[ "def _filter_xpath_grouping(xpath):\n", " \"\"\"\n", " This method removes the outer parentheses for xpath grouping.\n", " The xpath converter will break otherwise.\n", " Example:\n", " \"(//button[@type='submit'])[1]\" becomes \"//button[@type='submit'][1]\"\n", " \"\"\"\n", "\n", " # First remove the first open parentheses\n", " xpath = xpath[1:]\n", "\n", " # Next remove the last closed parentheses\n", " index = xpath.rfind(')')\n", " if index == -1:\n", " raise XpathException(\"Invalid or unsupported Xpath: %s\" % xpath)\n", " xpath = xpath[:index] + xpath[index + 1:]\n", " return xpath" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625 ]
17
0.003676
def iter_(obj): """A custom replacement for iter(), dispatching a few custom picklable iterators for known types. """ if six.PY2: file_types = file, # noqa if six.PY3: file_types = io.IOBase, dict_items = {}.items().__class__ dict_values = {}.values().__class__ dict_keys = {}.keys().__class__ dict_view = (dict_items, dict_values, dict_keys) if isinstance(obj, dict): return ordered_sequence_iterator(list(obj.keys())) if isinstance(obj, file_types): return file_iterator(obj) if six.PY2: if isinstance(obj, (list, tuple)): return ordered_sequence_iterator(obj) if isinstance(obj, xrange): # noqa return range_iterator(obj) if NUMPY_AVAILABLE and isinstance(obj, numpy.ndarray): return ordered_sequence_iterator(obj) if six.PY3 and isinstance(obj, dict_view): return ordered_sequence_iterator(list(obj)) return iter(obj)
[ "def", "iter_", "(", "obj", ")", ":", "if", "six", ".", "PY2", ":", "file_types", "=", "file", ",", "# noqa", "if", "six", ".", "PY3", ":", "file_types", "=", "io", ".", "IOBase", ",", "dict_items", "=", "{", "}", ".", "items", "(", ")", ".", "__class__", "dict_values", "=", "{", "}", ".", "values", "(", ")", ".", "__class__", "dict_keys", "=", "{", "}", ".", "keys", "(", ")", ".", "__class__", "dict_view", "=", "(", "dict_items", ",", "dict_values", ",", "dict_keys", ")", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "ordered_sequence_iterator", "(", "list", "(", "obj", ".", "keys", "(", ")", ")", ")", "if", "isinstance", "(", "obj", ",", "file_types", ")", ":", "return", "file_iterator", "(", "obj", ")", "if", "six", ".", "PY2", ":", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "ordered_sequence_iterator", "(", "obj", ")", "if", "isinstance", "(", "obj", ",", "xrange", ")", ":", "# noqa", "return", "range_iterator", "(", "obj", ")", "if", "NUMPY_AVAILABLE", "and", "isinstance", "(", "obj", ",", "numpy", ".", "ndarray", ")", ":", "return", "ordered_sequence_iterator", "(", "obj", ")", "if", "six", ".", "PY3", "and", "isinstance", "(", "obj", ",", "dict_view", ")", ":", "return", "ordered_sequence_iterator", "(", "list", "(", "obj", ")", ")", "return", "iter", "(", "obj", ")" ]
35.925926
0.001004
[ "def iter_(obj):\n", " \"\"\"A custom replacement for iter(), dispatching a few custom picklable\n", " iterators for known types.\n", " \"\"\"\n", " if six.PY2:\n", " file_types = file, # noqa\n", " if six.PY3:\n", " file_types = io.IOBase,\n", " dict_items = {}.items().__class__\n", " dict_values = {}.values().__class__\n", " dict_keys = {}.keys().__class__\n", " dict_view = (dict_items, dict_values, dict_keys)\n", "\n", " if isinstance(obj, dict):\n", " return ordered_sequence_iterator(list(obj.keys()))\n", " if isinstance(obj, file_types):\n", " return file_iterator(obj)\n", " if six.PY2:\n", " if isinstance(obj, (list, tuple)):\n", " return ordered_sequence_iterator(obj)\n", " if isinstance(obj, xrange): # noqa\n", " return range_iterator(obj)\n", " if NUMPY_AVAILABLE and isinstance(obj, numpy.ndarray):\n", " return ordered_sequence_iterator(obj)\n", " if six.PY3 and isinstance(obj, dict_view):\n", " return ordered_sequence_iterator(list(obj))\n", " return iter(obj)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05 ]
27
0.001852
def parse_xml_node(self, node): '''Parse an xml.dom Node object representing a data connector into this object. ''' self.connector_id = node.getAttributeNS(RTS_NS, 'connectorId') self.name = node.getAttributeNS(RTS_NS, 'name') self.data_type = node.getAttributeNS(RTS_NS, 'dataType') self.interface_type = node.getAttributeNS(RTS_NS, 'interfaceType') self.data_flow_type = node.getAttributeNS(RTS_NS, 'dataflowType') if node.hasAttributeNS(RTS_NS, 'subscriptionType'): self.subscription_type = node.getAttributeNS(RTS_NS, 'subscriptionType') else: self.subscription_type = '' if node.hasAttributeNS(RTS_NS, 'pushInterval'): self.push_interval = float(node.getAttributeNS(RTS_NS, 'pushInterval')) else: self.push_interval = 0.0 self.comment = node.getAttributeNS(RTS_EXT_NS, 'comment') if node.hasAttributeNS(RTS_EXT_NS, 'visible'): visible = node.getAttributeNS(RTS_EXT_NS, 'visible') if visible == 'true' or visible == '1': self.visible = True else: self.visible = False if node.getElementsByTagNameNS(RTS_NS, 'sourceDataPort').length != 1: raise InvalidDataPortConnectorNodeError self.source_data_port = TargetPort().parse_xml_node(\ node.getElementsByTagNameNS(RTS_NS, 'sourceDataPort')[0]) if node.getElementsByTagNameNS(RTS_NS, 'targetDataPort').length != 1: raise InvalidDataPortConnectorNodeError self.target_data_port = TargetPort().parse_xml_node(\ node.getElementsByTagNameNS(RTS_NS, 'targetDataPort')[0]) for c in get_direct_child_elements_xml(node, prefix=RTS_EXT_NS, local_name='Properties'): name, value = parse_properties_xml(c) self._properties[name] = value return self
[ "def", "parse_xml_node", "(", "self", ",", "node", ")", ":", "self", ".", "connector_id", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'connectorId'", ")", "self", ".", "name", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'name'", ")", "self", ".", "data_type", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'dataType'", ")", "self", ".", "interface_type", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'interfaceType'", ")", "self", ".", "data_flow_type", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'dataflowType'", ")", "if", "node", ".", "hasAttributeNS", "(", "RTS_NS", ",", "'subscriptionType'", ")", ":", "self", ".", "subscription_type", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'subscriptionType'", ")", "else", ":", "self", ".", "subscription_type", "=", "''", "if", "node", ".", "hasAttributeNS", "(", "RTS_NS", ",", "'pushInterval'", ")", ":", "self", ".", "push_interval", "=", "float", "(", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'pushInterval'", ")", ")", "else", ":", "self", ".", "push_interval", "=", "0.0", "self", ".", "comment", "=", "node", ".", "getAttributeNS", "(", "RTS_EXT_NS", ",", "'comment'", ")", "if", "node", ".", "hasAttributeNS", "(", "RTS_EXT_NS", ",", "'visible'", ")", ":", "visible", "=", "node", ".", "getAttributeNS", "(", "RTS_EXT_NS", ",", "'visible'", ")", "if", "visible", "==", "'true'", "or", "visible", "==", "'1'", ":", "self", ".", "visible", "=", "True", "else", ":", "self", ".", "visible", "=", "False", "if", "node", ".", "getElementsByTagNameNS", "(", "RTS_NS", ",", "'sourceDataPort'", ")", ".", "length", "!=", "1", ":", "raise", "InvalidDataPortConnectorNodeError", "self", ".", "source_data_port", "=", "TargetPort", "(", ")", ".", "parse_xml_node", "(", "node", ".", "getElementsByTagNameNS", "(", "RTS_NS", ",", "'sourceDataPort'", ")", "[", "0", "]", ")", "if", "node", ".", "getElementsByTagNameNS", "(", "RTS_NS", ",", "'targetDataPort'", ")", ".", "length", "!=", "1", ":", "raise", "InvalidDataPortConnectorNodeError", "self", ".", "target_data_port", "=", "TargetPort", "(", ")", ".", "parse_xml_node", "(", "node", ".", "getElementsByTagNameNS", "(", "RTS_NS", ",", "'targetDataPort'", ")", "[", "0", "]", ")", "for", "c", "in", "get_direct_child_elements_xml", "(", "node", ",", "prefix", "=", "RTS_EXT_NS", ",", "local_name", "=", "'Properties'", ")", ":", "name", ",", "value", "=", "parse_properties_xml", "(", "c", ")", "self", ".", "_properties", "[", "name", "]", "=", "value", "return", "self" ]
50.414634
0.001898
[ "def parse_xml_node(self, node):\n", " '''Parse an xml.dom Node object representing a data connector into this\n", " object.\n", "\n", " '''\n", " self.connector_id = node.getAttributeNS(RTS_NS, 'connectorId')\n", " self.name = node.getAttributeNS(RTS_NS, 'name')\n", " self.data_type = node.getAttributeNS(RTS_NS, 'dataType')\n", " self.interface_type = node.getAttributeNS(RTS_NS, 'interfaceType')\n", " self.data_flow_type = node.getAttributeNS(RTS_NS, 'dataflowType')\n", " if node.hasAttributeNS(RTS_NS, 'subscriptionType'):\n", " self.subscription_type = node.getAttributeNS(RTS_NS,\n", " 'subscriptionType')\n", " else:\n", " self.subscription_type = ''\n", " if node.hasAttributeNS(RTS_NS, 'pushInterval'):\n", " self.push_interval = float(node.getAttributeNS(RTS_NS,\n", " 'pushInterval'))\n", " else:\n", " self.push_interval = 0.0\n", " self.comment = node.getAttributeNS(RTS_EXT_NS, 'comment')\n", " if node.hasAttributeNS(RTS_EXT_NS, 'visible'):\n", " visible = node.getAttributeNS(RTS_EXT_NS, 'visible')\n", " if visible == 'true' or visible == '1':\n", " self.visible = True\n", " else:\n", " self.visible = False\n", "\n", " if node.getElementsByTagNameNS(RTS_NS, 'sourceDataPort').length != 1:\n", " raise InvalidDataPortConnectorNodeError\n", " self.source_data_port = TargetPort().parse_xml_node(\\\n", " node.getElementsByTagNameNS(RTS_NS, 'sourceDataPort')[0])\n", " if node.getElementsByTagNameNS(RTS_NS, 'targetDataPort').length != 1:\n", " raise InvalidDataPortConnectorNodeError\n", " self.target_data_port = TargetPort().parse_xml_node(\\\n", " node.getElementsByTagNameNS(RTS_NS, 'targetDataPort')[0])\n", " for c in get_direct_child_elements_xml(node, prefix=RTS_EXT_NS,\n", " local_name='Properties'):\n", " name, value = parse_properties_xml(c)\n", " self._properties[name] = value\n", " return self" ]
[ 0, 0.0125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016129032258064516, 0, 0, 0, 0.016129032258064516, 0, 0, 0, 0, 0, 0.05263157894736842 ]
41
0.002375
def all_features(): ''' Returns dictionary of all features in the module .. note:: Some of the features (hist4, corr) are relatively expensive to compute ''' features = {'mean': mean, 'median': median, 'gmean': gmean, 'hmean': hmean, 'vec_sum': vec_sum, 'abs_sum': abs_sum, 'abs_energy': abs_energy, 'std': std, 'var': var, 'variation': variation, 'min': minimum, 'max': maximum, 'skew': skew, 'kurt': kurt, 'mean_diff': mean_diff, 'mean_abs_diff': means_abs_diff, 'mse': mse, 'mnx': mean_crossings, 'hist4': hist(), 'corr': corr2, 'mean_abs_value': mean_abs, 'zero_crossings': zero_crossing(), 'slope_sign_changes': slope_sign_changes(), 'waveform_length': waveform_length, 'emg_var': emg_var, 'root_mean_square': root_mean_square, 'willison_amplitude': willison_amplitude()} return features
[ "def", "all_features", "(", ")", ":", "features", "=", "{", "'mean'", ":", "mean", ",", "'median'", ":", "median", ",", "'gmean'", ":", "gmean", ",", "'hmean'", ":", "hmean", ",", "'vec_sum'", ":", "vec_sum", ",", "'abs_sum'", ":", "abs_sum", ",", "'abs_energy'", ":", "abs_energy", ",", "'std'", ":", "std", ",", "'var'", ":", "var", ",", "'variation'", ":", "variation", ",", "'min'", ":", "minimum", ",", "'max'", ":", "maximum", ",", "'skew'", ":", "skew", ",", "'kurt'", ":", "kurt", ",", "'mean_diff'", ":", "mean_diff", ",", "'mean_abs_diff'", ":", "means_abs_diff", ",", "'mse'", ":", "mse", ",", "'mnx'", ":", "mean_crossings", ",", "'hist4'", ":", "hist", "(", ")", ",", "'corr'", ":", "corr2", ",", "'mean_abs_value'", ":", "mean_abs", ",", "'zero_crossings'", ":", "zero_crossing", "(", ")", ",", "'slope_sign_changes'", ":", "slope_sign_changes", "(", ")", ",", "'waveform_length'", ":", "waveform_length", ",", "'emg_var'", ":", "emg_var", ",", "'root_mean_square'", ":", "root_mean_square", ",", "'willison_amplitude'", ":", "willison_amplitude", "(", ")", "}", "return", "features" ]
36.272727
0.001627
[ "def all_features():\n", " ''' Returns dictionary of all features in the module\n", "\n", " .. note:: Some of the features (hist4, corr) are relatively expensive to compute\n", " '''\n", " features = {'mean': mean,\n", " 'median': median,\n", " 'gmean': gmean,\n", " 'hmean': hmean,\n", " 'vec_sum': vec_sum,\n", " 'abs_sum': abs_sum,\n", " 'abs_energy': abs_energy,\n", " 'std': std,\n", " 'var': var,\n", " 'variation': variation,\n", " 'min': minimum,\n", " 'max': maximum,\n", " 'skew': skew,\n", " 'kurt': kurt,\n", " 'mean_diff': mean_diff,\n", " 'mean_abs_diff': means_abs_diff,\n", " 'mse': mse,\n", " 'mnx': mean_crossings,\n", " 'hist4': hist(),\n", " 'corr': corr2,\n", " 'mean_abs_value': mean_abs,\n", " 'zero_crossings': zero_crossing(),\n", " 'slope_sign_changes': slope_sign_changes(),\n", " 'waveform_length': waveform_length,\n", " 'emg_var': emg_var,\n", " 'root_mean_square': root_mean_square,\n", " 'willison_amplitude': willison_amplitude()}\n", " return features" ]
[ 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842 ]
33
0.001951
def _append_html(self, html, before_prompt=False): """ Appends HTML at the end of the console buffer. """ self._append_custom(self._insert_html, html, before_prompt)
[ "def", "_append_html", "(", "self", ",", "html", ",", "before_prompt", "=", "False", ")", ":", "self", ".", "_append_custom", "(", "self", ".", "_insert_html", ",", "html", ",", "before_prompt", ")" ]
46.5
0.010582
[ "def _append_html(self, html, before_prompt=False):\n", " \"\"\" Appends HTML at the end of the console buffer.\n", " \"\"\"\n", " self._append_custom(self._insert_html, html, before_prompt)" ]
[ 0, 0.01694915254237288, 0, 0.014925373134328358 ]
4
0.007969
def format_rangefield_nodes(field_name, field, field_id, state, lineno): """Create a section node that documents a RangeField config field. Parameters ---------- field_name : `str` Name of the configuration field (the attribute name of on the config class). field : ``lsst.pex.config.RangeField`` A configuration field. field_id : `str` Unique identifier for this field. This is used as the id and name of the section node. with a -section suffix state : ``docutils.statemachine.State`` Usually the directive's ``state`` attribute. lineno (`int`) Usually the directive's ``lineno`` attribute. Returns ------- ``docutils.nodes.section`` Section containing documentation nodes for the RangeField. """ # Field type field_type_item = nodes.definition_list_item() field_type_item.append(nodes.term(text="Field type")) field_type_item_content = nodes.definition() field_type_item_content_p = nodes.paragraph() field_type_item_content_p += make_python_xref_nodes_for_type( field.dtype, state, hide_namespace=False)[0].children[0] field_type_item_content_p += nodes.Text(' ', ' ') field_type_item_content_p += make_python_xref_nodes_for_type( type(field), state, hide_namespace=True)[0].children[0] if field.optional: field_type_item_content_p += nodes.Text(' (optional)', ' (optional)') field_type_item_content += field_type_item_content_p field_type_item += field_type_item_content # Format definition list item for the range range_node = nodes.definition_list_item() range_node += nodes.term(text='Range') range_node_def = nodes.definition() range_node_def += nodes.paragraph(text=field.rangeString) range_node += range_node_def # Definition list for key-value metadata dl = nodes.definition_list() dl += create_default_item_node(field, state) dl += field_type_item dl += range_node # Doc for this field, parsed as rst desc_node = create_description_node(field, state) # Title for configuration field title = create_title_node(field_name, field, field_id, state, lineno) return [title, dl, desc_node]
[ "def", "format_rangefield_nodes", "(", "field_name", ",", "field", ",", "field_id", ",", "state", ",", "lineno", ")", ":", "# Field type", "field_type_item", "=", "nodes", ".", "definition_list_item", "(", ")", "field_type_item", ".", "append", "(", "nodes", ".", "term", "(", "text", "=", "\"Field type\"", ")", ")", "field_type_item_content", "=", "nodes", ".", "definition", "(", ")", "field_type_item_content_p", "=", "nodes", ".", "paragraph", "(", ")", "field_type_item_content_p", "+=", "make_python_xref_nodes_for_type", "(", "field", ".", "dtype", ",", "state", ",", "hide_namespace", "=", "False", ")", "[", "0", "]", ".", "children", "[", "0", "]", "field_type_item_content_p", "+=", "nodes", ".", "Text", "(", "' '", ",", "' '", ")", "field_type_item_content_p", "+=", "make_python_xref_nodes_for_type", "(", "type", "(", "field", ")", ",", "state", ",", "hide_namespace", "=", "True", ")", "[", "0", "]", ".", "children", "[", "0", "]", "if", "field", ".", "optional", ":", "field_type_item_content_p", "+=", "nodes", ".", "Text", "(", "' (optional)'", ",", "' (optional)'", ")", "field_type_item_content", "+=", "field_type_item_content_p", "field_type_item", "+=", "field_type_item_content", "# Format definition list item for the range", "range_node", "=", "nodes", ".", "definition_list_item", "(", ")", "range_node", "+=", "nodes", ".", "term", "(", "text", "=", "'Range'", ")", "range_node_def", "=", "nodes", ".", "definition", "(", ")", "range_node_def", "+=", "nodes", ".", "paragraph", "(", "text", "=", "field", ".", "rangeString", ")", "range_node", "+=", "range_node_def", "# Definition list for key-value metadata", "dl", "=", "nodes", ".", "definition_list", "(", ")", "dl", "+=", "create_default_item_node", "(", "field", ",", "state", ")", "dl", "+=", "field_type_item", "dl", "+=", "range_node", "# Doc for this field, parsed as rst", "desc_node", "=", "create_description_node", "(", "field", ",", "state", ")", "# Title for configuration field", "title", "=", "create_title_node", "(", "field_name", ",", "field", ",", "field_id", ",", "state", ",", "lineno", ")", "return", "[", "title", ",", "dl", ",", "desc_node", "]" ]
35.758065
0.000439
[ "def format_rangefield_nodes(field_name, field, field_id, state, lineno):\n", " \"\"\"Create a section node that documents a RangeField config field.\n", "\n", " Parameters\n", " ----------\n", " field_name : `str`\n", " Name of the configuration field (the attribute name of on the config\n", " class).\n", " field : ``lsst.pex.config.RangeField``\n", " A configuration field.\n", " field_id : `str`\n", " Unique identifier for this field. This is used as the id and name of\n", " the section node. with a -section suffix\n", " state : ``docutils.statemachine.State``\n", " Usually the directive's ``state`` attribute.\n", " lineno (`int`)\n", " Usually the directive's ``lineno`` attribute.\n", "\n", " Returns\n", " -------\n", " ``docutils.nodes.section``\n", " Section containing documentation nodes for the RangeField.\n", " \"\"\"\n", " # Field type\n", " field_type_item = nodes.definition_list_item()\n", " field_type_item.append(nodes.term(text=\"Field type\"))\n", " field_type_item_content = nodes.definition()\n", " field_type_item_content_p = nodes.paragraph()\n", " field_type_item_content_p += make_python_xref_nodes_for_type(\n", " field.dtype,\n", " state,\n", " hide_namespace=False)[0].children[0]\n", " field_type_item_content_p += nodes.Text(' ', ' ')\n", " field_type_item_content_p += make_python_xref_nodes_for_type(\n", " type(field),\n", " state,\n", " hide_namespace=True)[0].children[0]\n", " if field.optional:\n", " field_type_item_content_p += nodes.Text(' (optional)', ' (optional)')\n", " field_type_item_content += field_type_item_content_p\n", " field_type_item += field_type_item_content\n", "\n", " # Format definition list item for the range\n", " range_node = nodes.definition_list_item()\n", " range_node += nodes.term(text='Range')\n", " range_node_def = nodes.definition()\n", " range_node_def += nodes.paragraph(text=field.rangeString)\n", " range_node += range_node_def\n", "\n", " # Definition list for key-value metadata\n", " dl = nodes.definition_list()\n", " dl += create_default_item_node(field, state)\n", " dl += field_type_item\n", " dl += range_node\n", "\n", " # Doc for this field, parsed as rst\n", " desc_node = create_description_node(field, state)\n", "\n", " # Title for configuration field\n", " title = create_title_node(field_name, field, field_id, state, lineno)\n", "\n", " return [title, dl, desc_node]" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.030303030303030304 ]
62
0.000489
def _wrapped_call(wrap_controller, func): """ Wrap calling to a function with a generator which needs to yield exactly once. The yield point will trigger calling the wrapped function and return its ``_Result`` to the yield point. The generator then needs to finish (raise StopIteration) in order for the wrapped call to complete. """ try: next(wrap_controller) # first yield except StopIteration: _raise_wrapfail(wrap_controller, "did not yield") call_outcome = _Result.from_call(func) try: wrap_controller.send(call_outcome) _raise_wrapfail(wrap_controller, "has second yield") except StopIteration: pass return call_outcome.get_result()
[ "def", "_wrapped_call", "(", "wrap_controller", ",", "func", ")", ":", "try", ":", "next", "(", "wrap_controller", ")", "# first yield", "except", "StopIteration", ":", "_raise_wrapfail", "(", "wrap_controller", ",", "\"did not yield\"", ")", "call_outcome", "=", "_Result", ".", "from_call", "(", "func", ")", "try", ":", "wrap_controller", ".", "send", "(", "call_outcome", ")", "_raise_wrapfail", "(", "wrap_controller", ",", "\"has second yield\"", ")", "except", "StopIteration", ":", "pass", "return", "call_outcome", ".", "get_result", "(", ")" ]
41.705882
0.001379
[ "def _wrapped_call(wrap_controller, func):\n", " \"\"\" Wrap calling to a function with a generator which needs to yield\n", " exactly once. The yield point will trigger calling the wrapped function\n", " and return its ``_Result`` to the yield point. The generator then needs\n", " to finish (raise StopIteration) in order for the wrapped call to complete.\n", " \"\"\"\n", " try:\n", " next(wrap_controller) # first yield\n", " except StopIteration:\n", " _raise_wrapfail(wrap_controller, \"did not yield\")\n", " call_outcome = _Result.from_call(func)\n", " try:\n", " wrap_controller.send(call_outcome)\n", " _raise_wrapfail(wrap_controller, \"has second yield\")\n", " except StopIteration:\n", " pass\n", " return call_outcome.get_result()" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776 ]
17
0.001634
def container_delete(name, remote_addr=None, cert=None, key=None, verify_cert=True): ''' Delete a container name : Name of the container to delete remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 /var/lib/mysocket.sock cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates. ''' container = container_get( name, remote_addr, cert, key, verify_cert, _raw=True ) container.delete(wait=True) return True
[ "def", "container_delete", "(", "name", ",", "remote_addr", "=", "None", ",", "cert", "=", "None", ",", "key", "=", "None", ",", "verify_cert", "=", "True", ")", ":", "container", "=", "container_get", "(", "name", ",", "remote_addr", ",", "cert", ",", "key", ",", "verify_cert", ",", "_raw", "=", "True", ")", "container", ".", "delete", "(", "wait", "=", "True", ")", "return", "True" ]
24.763158
0.001022
[ "def container_delete(name, remote_addr=None,\n", " cert=None, key=None, verify_cert=True):\n", " '''\n", " Delete a container\n", "\n", " name :\n", " Name of the container to delete\n", "\n", " remote_addr :\n", " An URL to a remote Server, you also have to give cert and key if\n", " you provide remote_addr and its a TCP Address!\n", "\n", " Examples:\n", " https://myserver.lan:8443\n", " /var/lib/mysocket.sock\n", "\n", " cert :\n", " PEM Formatted SSL Certificate.\n", "\n", " Examples:\n", " ~/.config/lxc/client.crt\n", "\n", " key :\n", " PEM Formatted SSL Key.\n", "\n", " Examples:\n", " ~/.config/lxc/client.key\n", "\n", " verify_cert : True\n", " Wherever to verify the cert, this is by default True\n", " but in the most cases you want to set it off as LXD\n", " normaly uses self-signed certificates.\n", " '''\n", " container = container_get(\n", " name, remote_addr, cert, key, verify_cert, _raw=True\n", " )\n", " container.delete(wait=True)\n", " return True" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667 ]
38
0.001754
def set_timeout(self, timeout): """ Set Screen Timeout Duration """ if timeout > 0: self.timeout = timeout self.server.request("screen_set %s timeout %i" % (self.ref, (self.timeout * 8)))
[ "def", "set_timeout", "(", "self", ",", "timeout", ")", ":", "if", "timeout", ">", "0", ":", "self", ".", "timeout", "=", "timeout", "self", ".", "server", ".", "request", "(", "\"screen_set %s timeout %i\"", "%", "(", "self", ".", "ref", ",", "(", "self", ".", "timeout", "*", "8", ")", ")", ")" ]
37.166667
0.013158
[ "def set_timeout(self, timeout):\n", " \"\"\" Set Screen Timeout Duration \"\"\"\n", "\n", " if timeout > 0:\n", " self.timeout = timeout\n", " self.server.request(\"screen_set %s timeout %i\" % (self.ref, (self.timeout * 8)))" ]
[ 0, 0.022727272727272728, 0, 0, 0, 0.021739130434782608 ]
6
0.007411
def is_dir(value): """ This function checks whether given path as argument exists. :param str value: Assumed directory path :rtype: str :return: If given value is valid, retuning given value. """ value = os.path.expanduser(value) value = os.path.expandvars(value) value = os.path.abspath(value) if os.path.exists(value): if os.path.isdir(value): if os.access(value, os.R_OK): return value else: err_message = ('{path}: Permission denied.' ''.format(path=value) ) raise validate.VdtValueError(err_message) else: err_message = ('{value} is file.' ''.format(value=value) ) raise validate.VdtTypeError(err_message) else: err_message = ('{path}: No such file or directory.' ''.format(path=value) ) raise validate.VdtValueError(err_message)
[ "def", "is_dir", "(", "value", ")", ":", "value", "=", "os", ".", "path", ".", "expanduser", "(", "value", ")", "value", "=", "os", ".", "path", ".", "expandvars", "(", "value", ")", "value", "=", "os", ".", "path", ".", "abspath", "(", "value", ")", "if", "os", ".", "path", ".", "exists", "(", "value", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "value", ")", ":", "if", "os", ".", "access", "(", "value", ",", "os", ".", "R_OK", ")", ":", "return", "value", "else", ":", "err_message", "=", "(", "'{path}: Permission denied.'", "''", ".", "format", "(", "path", "=", "value", ")", ")", "raise", "validate", ".", "VdtValueError", "(", "err_message", ")", "else", ":", "err_message", "=", "(", "'{value} is file.'", "''", ".", "format", "(", "value", "=", "value", ")", ")", "raise", "validate", ".", "VdtTypeError", "(", "err_message", ")", "else", ":", "err_message", "=", "(", "'{path}: No such file or directory.'", "''", ".", "format", "(", "path", "=", "value", ")", ")", "raise", "validate", ".", "VdtValueError", "(", "err_message", ")" ]
33.548387
0.000935
[ "def is_dir(value):\n", " \"\"\"\n", " This function checks whether given path as argument exists.\n", " :param str value: Assumed directory path\n", " :rtype: str\n", " :return: If given value is valid, retuning given value.\n", " \"\"\"\n", " value = os.path.expanduser(value)\n", " value = os.path.expandvars(value)\n", " value = os.path.abspath(value)\n", "\n", " if os.path.exists(value):\n", " if os.path.isdir(value):\n", " if os.access(value, os.R_OK):\n", " return value\n", "\n", " else:\n", " err_message = ('{path}: Permission denied.'\n", " ''.format(path=value)\n", " )\n", " raise validate.VdtValueError(err_message)\n", " else:\n", " err_message = ('{value} is file.'\n", " ''.format(value=value)\n", " )\n", " raise validate.VdtTypeError(err_message)\n", " else:\n", " err_message = ('{path}: No such file or directory.'\n", " ''.format(path=value)\n", " )\n", " raise validate.VdtValueError(err_message)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612 ]
31
0.000658
def present( name, user=None, fingerprint=None, key=None, port=None, enc=None, config=None, hash_known_hosts=True, timeout=5, fingerprint_hash_type=None): ''' Verifies that the specified host is known by the specified user On many systems, specifically those running with openssh 4 or older, the ``enc`` option must be set, only openssh 5 and above can detect the key type. name The name of the remote host (e.g. "github.com") Note that only a single hostname is supported, if foo.example.com and bar.example.com have the same host you will need two separate Salt States to represent them. user The user who owns the ssh authorized keys file to modify fingerprint The fingerprint of the key which must be present in the known_hosts file (optional if key specified) key The public key which must be present in the known_hosts file (optional if fingerprint specified) port optional parameter, port which will be used to when requesting the public key from the remote host, defaults to port 22. enc Defines what type of key is being used, can be ed25519, ecdsa ssh-rsa or ssh-dss config The location of the authorized keys file relative to the user's home directory, defaults to ".ssh/known_hosts". If no user is specified, defaults to "/etc/ssh/ssh_known_hosts". If present, must be an absolute path when a user is not specified. hash_known_hosts : True Hash all hostnames and addresses in the known hosts file. timeout : int Set the timeout for connection attempts. If ``timeout`` seconds have elapsed since a connection was initiated to a host or since the last time anything was read from that host, then the connection is closed and the host in question considered unavailable. Default is 5 seconds. .. versionadded:: 2016.3.0 fingerprint_hash_type The public key fingerprint hash type that the public key fingerprint was originally hashed with. This defaults to ``sha256`` if not specified. .. versionadded:: 2016.11.4 .. versionchanged:: 2017.7.0: default changed from ``md5`` to ``sha256`` ''' ret = {'name': name, 'changes': {}, 'result': None if __opts__['test'] else True, 'comment': ''} if not user: config = config or '/etc/ssh/ssh_known_hosts' else: config = config or '.ssh/known_hosts' if not user and not os.path.isabs(config): comment = 'If not specifying a "user", specify an absolute "config".' ret['result'] = False return dict(ret, comment=comment) if __opts__['test']: if key and fingerprint: comment = 'Specify either "key" or "fingerprint", not both.' ret['result'] = False return dict(ret, comment=comment) elif key and not enc: comment = 'Required argument "enc" if using "key" argument.' ret['result'] = False return dict(ret, comment=comment) try: result = __salt__['ssh.check_known_host'](user, name, key=key, fingerprint=fingerprint, config=config, port=port, fingerprint_hash_type=fingerprint_hash_type) except CommandNotFoundError as err: ret['result'] = False ret['comment'] = 'ssh.check_known_host error: {0}'.format(err) return ret if result == 'exists': comment = 'Host {0} is already in {1}'.format(name, config) ret['result'] = True return dict(ret, comment=comment) elif result == 'add': comment = 'Key for {0} is set to be added to {1}'.format(name, config) return dict(ret, comment=comment) else: # 'update' comment = 'Key for {0} is set to be updated in {1}'.format(name, config) return dict(ret, comment=comment) result = __salt__['ssh.set_known_host']( user=user, hostname=name, fingerprint=fingerprint, key=key, port=port, enc=enc, config=config, hash_known_hosts=hash_known_hosts, timeout=timeout, fingerprint_hash_type=fingerprint_hash_type) if result['status'] == 'exists': return dict(ret, comment='{0} already exists in {1}'.format(name, config)) elif result['status'] == 'error': return dict(ret, result=False, comment=result['error']) else: # 'updated' if key: new_key = result['new'][0]['key'] return dict(ret, changes={'old': result['old'], 'new': result['new']}, comment='{0}\'s key saved to {1} (key: {2})'.format( name, config, new_key)) else: fingerprint = result['new'][0]['fingerprint'] return dict(ret, changes={'old': result['old'], 'new': result['new']}, comment='{0}\'s key saved to {1} (fingerprint: {2})'.format( name, config, fingerprint))
[ "def", "present", "(", "name", ",", "user", "=", "None", ",", "fingerprint", "=", "None", ",", "key", "=", "None", ",", "port", "=", "None", ",", "enc", "=", "None", ",", "config", "=", "None", ",", "hash_known_hosts", "=", "True", ",", "timeout", "=", "5", ",", "fingerprint_hash_type", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", "if", "__opts__", "[", "'test'", "]", "else", "True", ",", "'comment'", ":", "''", "}", "if", "not", "user", ":", "config", "=", "config", "or", "'/etc/ssh/ssh_known_hosts'", "else", ":", "config", "=", "config", "or", "'.ssh/known_hosts'", "if", "not", "user", "and", "not", "os", ".", "path", ".", "isabs", "(", "config", ")", ":", "comment", "=", "'If not specifying a \"user\", specify an absolute \"config\".'", "ret", "[", "'result'", "]", "=", "False", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "if", "__opts__", "[", "'test'", "]", ":", "if", "key", "and", "fingerprint", ":", "comment", "=", "'Specify either \"key\" or \"fingerprint\", not both.'", "ret", "[", "'result'", "]", "=", "False", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "elif", "key", "and", "not", "enc", ":", "comment", "=", "'Required argument \"enc\" if using \"key\" argument.'", "ret", "[", "'result'", "]", "=", "False", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "try", ":", "result", "=", "__salt__", "[", "'ssh.check_known_host'", "]", "(", "user", ",", "name", ",", "key", "=", "key", ",", "fingerprint", "=", "fingerprint", ",", "config", "=", "config", ",", "port", "=", "port", ",", "fingerprint_hash_type", "=", "fingerprint_hash_type", ")", "except", "CommandNotFoundError", "as", "err", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'ssh.check_known_host error: {0}'", ".", "format", "(", "err", ")", "return", "ret", "if", "result", "==", "'exists'", ":", "comment", "=", "'Host {0} is already in {1}'", ".", "format", "(", "name", ",", "config", ")", "ret", "[", "'result'", "]", "=", "True", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "elif", "result", "==", "'add'", ":", "comment", "=", "'Key for {0} is set to be added to {1}'", ".", "format", "(", "name", ",", "config", ")", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "else", ":", "# 'update'", "comment", "=", "'Key for {0} is set to be updated in {1}'", ".", "format", "(", "name", ",", "config", ")", "return", "dict", "(", "ret", ",", "comment", "=", "comment", ")", "result", "=", "__salt__", "[", "'ssh.set_known_host'", "]", "(", "user", "=", "user", ",", "hostname", "=", "name", ",", "fingerprint", "=", "fingerprint", ",", "key", "=", "key", ",", "port", "=", "port", ",", "enc", "=", "enc", ",", "config", "=", "config", ",", "hash_known_hosts", "=", "hash_known_hosts", ",", "timeout", "=", "timeout", ",", "fingerprint_hash_type", "=", "fingerprint_hash_type", ")", "if", "result", "[", "'status'", "]", "==", "'exists'", ":", "return", "dict", "(", "ret", ",", "comment", "=", "'{0} already exists in {1}'", ".", "format", "(", "name", ",", "config", ")", ")", "elif", "result", "[", "'status'", "]", "==", "'error'", ":", "return", "dict", "(", "ret", ",", "result", "=", "False", ",", "comment", "=", "result", "[", "'error'", "]", ")", "else", ":", "# 'updated'", "if", "key", ":", "new_key", "=", "result", "[", "'new'", "]", "[", "0", "]", "[", "'key'", "]", "return", "dict", "(", "ret", ",", "changes", "=", "{", "'old'", ":", "result", "[", "'old'", "]", ",", "'new'", ":", "result", "[", "'new'", "]", "}", ",", "comment", "=", "'{0}\\'s key saved to {1} (key: {2})'", ".", "format", "(", "name", ",", "config", ",", "new_key", ")", ")", "else", ":", "fingerprint", "=", "result", "[", "'new'", "]", "[", "0", "]", "[", "'fingerprint'", "]", "return", "dict", "(", "ret", ",", "changes", "=", "{", "'old'", ":", "result", "[", "'old'", "]", ",", "'new'", ":", "result", "[", "'new'", "]", "}", ",", "comment", "=", "'{0}\\'s key saved to {1} (fingerprint: {2})'", ".", "format", "(", "name", ",", "config", ",", "fingerprint", ")", ")" ]
37.666667
0.00176
[ "def present(\n", " name,\n", " user=None,\n", " fingerprint=None,\n", " key=None,\n", " port=None,\n", " enc=None,\n", " config=None,\n", " hash_known_hosts=True,\n", " timeout=5,\n", " fingerprint_hash_type=None):\n", " '''\n", " Verifies that the specified host is known by the specified user\n", "\n", " On many systems, specifically those running with openssh 4 or older, the\n", " ``enc`` option must be set, only openssh 5 and above can detect the key\n", " type.\n", "\n", " name\n", " The name of the remote host (e.g. \"github.com\")\n", " Note that only a single hostname is supported, if foo.example.com and\n", " bar.example.com have the same host you will need two separate Salt\n", " States to represent them.\n", "\n", " user\n", " The user who owns the ssh authorized keys file to modify\n", "\n", " fingerprint\n", " The fingerprint of the key which must be present in the known_hosts\n", " file (optional if key specified)\n", "\n", " key\n", " The public key which must be present in the known_hosts file\n", " (optional if fingerprint specified)\n", "\n", " port\n", " optional parameter, port which will be used to when requesting the\n", " public key from the remote host, defaults to port 22.\n", "\n", " enc\n", " Defines what type of key is being used, can be ed25519, ecdsa ssh-rsa\n", " or ssh-dss\n", "\n", " config\n", " The location of the authorized keys file relative to the user's home\n", " directory, defaults to \".ssh/known_hosts\". If no user is specified,\n", " defaults to \"/etc/ssh/ssh_known_hosts\". If present, must be an\n", " absolute path when a user is not specified.\n", "\n", " hash_known_hosts : True\n", " Hash all hostnames and addresses in the known hosts file.\n", "\n", " timeout : int\n", " Set the timeout for connection attempts. If ``timeout`` seconds have\n", " elapsed since a connection was initiated to a host or since the last\n", " time anything was read from that host, then the connection is closed\n", " and the host in question considered unavailable. Default is 5 seconds.\n", "\n", " .. versionadded:: 2016.3.0\n", "\n", " fingerprint_hash_type\n", " The public key fingerprint hash type that the public key fingerprint\n", " was originally hashed with. This defaults to ``sha256`` if not specified.\n", "\n", " .. versionadded:: 2016.11.4\n", " .. versionchanged:: 2017.7.0: default changed from ``md5`` to ``sha256``\n", "\n", " '''\n", " ret = {'name': name,\n", " 'changes': {},\n", " 'result': None if __opts__['test'] else True,\n", " 'comment': ''}\n", "\n", " if not user:\n", " config = config or '/etc/ssh/ssh_known_hosts'\n", " else:\n", " config = config or '.ssh/known_hosts'\n", "\n", " if not user and not os.path.isabs(config):\n", " comment = 'If not specifying a \"user\", specify an absolute \"config\".'\n", " ret['result'] = False\n", " return dict(ret, comment=comment)\n", "\n", " if __opts__['test']:\n", " if key and fingerprint:\n", " comment = 'Specify either \"key\" or \"fingerprint\", not both.'\n", " ret['result'] = False\n", " return dict(ret, comment=comment)\n", " elif key and not enc:\n", " comment = 'Required argument \"enc\" if using \"key\" argument.'\n", " ret['result'] = False\n", " return dict(ret, comment=comment)\n", "\n", " try:\n", " result = __salt__['ssh.check_known_host'](user, name,\n", " key=key,\n", " fingerprint=fingerprint,\n", " config=config,\n", " port=port,\n", " fingerprint_hash_type=fingerprint_hash_type)\n", " except CommandNotFoundError as err:\n", " ret['result'] = False\n", " ret['comment'] = 'ssh.check_known_host error: {0}'.format(err)\n", " return ret\n", "\n", " if result == 'exists':\n", " comment = 'Host {0} is already in {1}'.format(name, config)\n", " ret['result'] = True\n", " return dict(ret, comment=comment)\n", " elif result == 'add':\n", " comment = 'Key for {0} is set to be added to {1}'.format(name,\n", " config)\n", " return dict(ret, comment=comment)\n", " else: # 'update'\n", " comment = 'Key for {0} is set to be updated in {1}'.format(name,\n", " config)\n", " return dict(ret, comment=comment)\n", "\n", " result = __salt__['ssh.set_known_host'](\n", " user=user,\n", " hostname=name,\n", " fingerprint=fingerprint,\n", " key=key,\n", " port=port,\n", " enc=enc,\n", " config=config,\n", " hash_known_hosts=hash_known_hosts,\n", " timeout=timeout,\n", " fingerprint_hash_type=fingerprint_hash_type)\n", " if result['status'] == 'exists':\n", " return dict(ret,\n", " comment='{0} already exists in {1}'.format(name, config))\n", " elif result['status'] == 'error':\n", " return dict(ret, result=False, comment=result['error'])\n", " else: # 'updated'\n", " if key:\n", " new_key = result['new'][0]['key']\n", " return dict(ret,\n", " changes={'old': result['old'], 'new': result['new']},\n", " comment='{0}\\'s key saved to {1} (key: {2})'.format(\n", " name, config, new_key))\n", " else:\n", " fingerprint = result['new'][0]['fingerprint']\n", " return dict(ret,\n", " changes={'old': result['old'], 'new': result['new']},\n", " comment='{0}\\'s key saved to {1} (fingerprint: {2})'.format(\n", " name, config, fingerprint))" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010101010101010102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013513513513513514, 0.0136986301369863, 0, 0, 0, 0, 0.013513513513513514, 0.024691358024691357, 0.017857142857142856 ]
147
0.00089
def onerror(self, emitter, message, source, lineno, colno): """ WebPage Event that occurs on webpage errors """ self._log.debug("""App.onerror event occurred in webpage: \nMESSAGE:%s\nSOURCE:%s\nLINENO:%s\nCOLNO:%s\n"""%(message, source, lineno, colno))
[ "def", "onerror", "(", "self", ",", "emitter", ",", "message", ",", "source", ",", "lineno", ",", "colno", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"\"\"App.onerror event occurred in webpage: \n \\nMESSAGE:%s\\nSOURCE:%s\\nLINENO:%s\\nCOLNO:%s\\n\"\"\"", "%", "(", "message", ",", "source", ",", "lineno", ",", "colno", ")", ")" ]
57.2
0.017241
[ "def onerror(self, emitter, message, source, lineno, colno):\n", " \"\"\" WebPage Event that occurs on webpage errors\n", " \"\"\"\n", " self._log.debug(\"\"\"App.onerror event occurred in webpage: \n", " \\nMESSAGE:%s\\nSOURCE:%s\\nLINENO:%s\\nCOLNO:%s\\n\"\"\"%(message, source, lineno, colno))" ]
[ 0, 0.017857142857142856, 0, 0.014925373134328358, 0.031578947368421054 ]
5
0.012872
def release_client(self, cb): """ Return a Connection object to the pool :param Connection cb: the client to release """ cb.stop_using() self._q.put(cb, True)
[ "def", "release_client", "(", "self", ",", "cb", ")", ":", "cb", ".", "stop_using", "(", ")", "self", ".", "_q", ".", "put", "(", "cb", ",", "True", ")" ]
28.571429
0.009709
[ "def release_client(self, cb):\n", " \"\"\"\n", " Return a Connection object to the pool\n", " :param Connection cb: the client to release\n", " \"\"\"\n", " cb.stop_using()\n", " self._q.put(cb, True)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0.034482758620689655 ]
7
0.016831
def angularjs(parser, token): """ Conditionally switch between AngularJS and Django variable expansion for ``{{`` and ``}}`` keeping Django's expansion for ``{%`` and ``%}`` Usage:: {% angularjs 1 %} or simply {% angularjs %} {% process variables through the AngularJS template engine %} {% endangularjs %} {% angularjs 0 %} {% process variables through the Django template engine %} {% endangularjs %} Instead of 0 and 1, it is possible to use a context variable. """ bits = token.contents.split() if len(bits) < 2: bits.append('1') values = [parser.compile_filter(bit) for bit in bits[1:]] django_nodelist = parser.parse(('endangularjs',)) angular_nodelist = NodeList() for node in django_nodelist: # convert all occurrences of VariableNode into a TextNode using the # AngularJS double curly bracket notation if isinstance(node, VariableNode): # convert Django's array notation into JS array notation tokens = node.filter_expression.token.split('.') token = tokens[0] for part in tokens[1:]: if part.isdigit(): token += '[%s]' % part else: token += '.%s' % part node = TextNode('{{ %s }}' % token) angular_nodelist.append(node) parser.delete_first_token() return AngularJsNode(django_nodelist, angular_nodelist, values[0])
[ "def", "angularjs", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "bits", ".", "append", "(", "'1'", ")", "values", "=", "[", "parser", ".", "compile_filter", "(", "bit", ")", "for", "bit", "in", "bits", "[", "1", ":", "]", "]", "django_nodelist", "=", "parser", ".", "parse", "(", "(", "'endangularjs'", ",", ")", ")", "angular_nodelist", "=", "NodeList", "(", ")", "for", "node", "in", "django_nodelist", ":", "# convert all occurrences of VariableNode into a TextNode using the", "# AngularJS double curly bracket notation", "if", "isinstance", "(", "node", ",", "VariableNode", ")", ":", "# convert Django's array notation into JS array notation", "tokens", "=", "node", ".", "filter_expression", ".", "token", ".", "split", "(", "'.'", ")", "token", "=", "tokens", "[", "0", "]", "for", "part", "in", "tokens", "[", "1", ":", "]", ":", "if", "part", ".", "isdigit", "(", ")", ":", "token", "+=", "'[%s]'", "%", "part", "else", ":", "token", "+=", "'.%s'", "%", "part", "node", "=", "TextNode", "(", "'{{ %s }}'", "%", "token", ")", "angular_nodelist", ".", "append", "(", "node", ")", "parser", ".", "delete_first_token", "(", ")", "return", "AngularJsNode", "(", "django_nodelist", ",", "angular_nodelist", ",", "values", "[", "0", "]", ")" ]
37.897436
0.001319
[ "def angularjs(parser, token):\n", " \"\"\"\n", " Conditionally switch between AngularJS and Django variable expansion for ``{{`` and ``}}``\n", " keeping Django's expansion for ``{%`` and ``%}``\n", "\n", " Usage::\n", "\n", " {% angularjs 1 %} or simply {% angularjs %}\n", " {% process variables through the AngularJS template engine %}\n", " {% endangularjs %}\n", "\n", " {% angularjs 0 %}\n", " {% process variables through the Django template engine %}\n", " {% endangularjs %}\n", "\n", " Instead of 0 and 1, it is possible to use a context variable.\n", " \"\"\"\n", " bits = token.contents.split()\n", " if len(bits) < 2:\n", " bits.append('1')\n", " values = [parser.compile_filter(bit) for bit in bits[1:]]\n", " django_nodelist = parser.parse(('endangularjs',))\n", " angular_nodelist = NodeList()\n", " for node in django_nodelist:\n", " # convert all occurrences of VariableNode into a TextNode using the\n", " # AngularJS double curly bracket notation\n", " if isinstance(node, VariableNode):\n", " # convert Django's array notation into JS array notation\n", " tokens = node.filter_expression.token.split('.')\n", " token = tokens[0]\n", " for part in tokens[1:]:\n", " if part.isdigit():\n", " token += '[%s]' % part\n", " else:\n", " token += '.%s' % part\n", " node = TextNode('{{ %s }}' % token)\n", " angular_nodelist.append(node)\n", " parser.delete_first_token()\n", " return AngularJsNode(django_nodelist, angular_nodelist, values[0])" ]
[ 0, 0, 0.010526315789473684, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014285714285714285 ]
39
0.000636
def reduce_dict(input_dict, average=True): """ Args: input_dict (dict): all the values will be reduced average (bool): whether to do average or sum Reduce the values in the dictionary from all processes so that process with rank 0 has the averaged results. Returns a dict with the same fields as input_dict, after reduction. """ world_size = get_world_size() if world_size < 2: return input_dict with torch.no_grad(): names = [] values = [] # sort the keys so that they are consistent across processes for k in sorted(input_dict.keys()): names.append(k) values.append(input_dict[k]) values = torch.stack(values, dim=0) dist.reduce(values, dst=0) if dist.get_rank() == 0 and average: # only main process gets accumulated, so only divide by # world_size in this case values /= world_size reduced_dict = {k: v for k, v in zip(names, values)} return reduced_dict
[ "def", "reduce_dict", "(", "input_dict", ",", "average", "=", "True", ")", ":", "world_size", "=", "get_world_size", "(", ")", "if", "world_size", "<", "2", ":", "return", "input_dict", "with", "torch", ".", "no_grad", "(", ")", ":", "names", "=", "[", "]", "values", "=", "[", "]", "# sort the keys so that they are consistent across processes", "for", "k", "in", "sorted", "(", "input_dict", ".", "keys", "(", ")", ")", ":", "names", ".", "append", "(", "k", ")", "values", ".", "append", "(", "input_dict", "[", "k", "]", ")", "values", "=", "torch", ".", "stack", "(", "values", ",", "dim", "=", "0", ")", "dist", ".", "reduce", "(", "values", ",", "dst", "=", "0", ")", "if", "dist", ".", "get_rank", "(", ")", "==", "0", "and", "average", ":", "# only main process gets accumulated, so only divide by", "# world_size in this case", "values", "/=", "world_size", "reduced_dict", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "zip", "(", "names", ",", "values", ")", "}", "return", "reduced_dict" ]
37.777778
0.001912
[ "def reduce_dict(input_dict, average=True):\n", " \"\"\"\n", " Args:\n", " input_dict (dict): all the values will be reduced\n", " average (bool): whether to do average or sum\n", " Reduce the values in the dictionary from all processes so that process with rank\n", " 0 has the averaged results. Returns a dict with the same fields as\n", " input_dict, after reduction.\n", " \"\"\"\n", " world_size = get_world_size()\n", " if world_size < 2:\n", " return input_dict\n", " with torch.no_grad():\n", " names = []\n", " values = []\n", " # sort the keys so that they are consistent across processes\n", " for k in sorted(input_dict.keys()):\n", " names.append(k)\n", " values.append(input_dict[k])\n", " values = torch.stack(values, dim=0)\n", " dist.reduce(values, dst=0)\n", " if dist.get_rank() == 0 and average:\n", " # only main process gets accumulated, so only divide by\n", " # world_size in this case\n", " values /= world_size\n", " reduced_dict = {k: v for k, v in zip(names, values)}\n", " return reduced_dict" ]
[ 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216 ]
27
0.002046
def best_sell_3(self): """三日均價由上往下 """ return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
[ "def", "best_sell_3", "(", "self", ")", ":", "return", "self", ".", "data", ".", "continuous", "(", "self", ".", "data", ".", "moving_average", "(", "self", ".", "data", ".", "price", ",", "3", ")", ")", "==", "-", "1" ]
34.75
0.021127
[ "def best_sell_3(self):\n", " \"\"\"三日均價由上往下\n", " \"\"\"\n", " return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1" ]
[ 0, 0.05, 0, 0.022988505747126436 ]
4
0.018247
def noise(params, amplitude=1, offset=0): ''' Generate a noise signal :param params: buffer parameters, controls length of signal created :param amplitude: wave amplitude (array or value) :param offset: offset of wave mean from zero (array or value) :return: array of resulting signal ''' amplitude = create_buffer(params, amplitude) offset = create_buffer(params, offset) output = offset + amplitude * (np.random.random(params.length)*2 - 1) return output
[ "def", "noise", "(", "params", ",", "amplitude", "=", "1", ",", "offset", "=", "0", ")", ":", "amplitude", "=", "create_buffer", "(", "params", ",", "amplitude", ")", "offset", "=", "create_buffer", "(", "params", ",", "offset", ")", "output", "=", "offset", "+", "amplitude", "*", "(", "np", ".", "random", ".", "random", "(", "params", ".", "length", ")", "*", "2", "-", "1", ")", "return", "output" ]
40.75
0.002
[ "def noise(params, amplitude=1, offset=0):\n", " '''\n", " Generate a noise signal\n", " :param params: buffer parameters, controls length of signal created\n", " :param amplitude: wave amplitude (array or value)\n", " :param offset: offset of wave mean from zero (array or value)\n", " :return: array of resulting signal\n", " '''\n", " amplitude = create_buffer(params, amplitude)\n", " offset = create_buffer(params, offset)\n", " output = offset + amplitude * (np.random.random(params.length)*2 - 1)\n", " return output" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705 ]
12
0.004902
def line_segment_intersection_2D(p12arg, p34arg): ''' line_segment_intersection((a, b), (c, d)) yields the intersection point between the line passing through points a and b and the line segment that passes from point c to point d. If there is no intersection point, then (numpy.nan, numpy.nan) is returned. ''' (p1,p2) = p12arg (p3,p4) = p34arg pi = np.asarray(line_intersection_2D(p12arg, p34arg)) p3 = np.asarray(p3) u34 = p4 - p3 cfn = lambda px,iis: (px if iis is None or len(px.shape) == 1 or px.shape[1] == len(iis) else px[:,iis]) dfn = lambda a,b: a[0]*b[0] + a[1]*b[1] sfn = lambda a,b: ((a-b) if len(a.shape) == len(b.shape) else (np.transpose([a])-b) if len(a.shape) < len(b.shape) else (a - np.transpose([b]))) fn = lambda px,iis: (1 - ((dfn(cfn(u34,iis), sfn( px, cfn(p3,iis))) > 0) * (dfn(cfn(u34,iis), sfn(cfn(p4,iis), px)) > 0))) if len(pi.shape) == 1: if not np.isfinite(pi[0]): return (np.nan, np.nan) bad = fn(pi, None) return (np.nan, np.nan) if bad else pi else: nonpar = np.where(np.isfinite(pi[0]))[0] bad = fn(cfn(pi, nonpar), nonpar) (xi,yi) = pi bad = nonpar[np.where(bad)[0]] xi[bad] = np.nan yi[bad] = np.nan return (xi,yi)
[ "def", "line_segment_intersection_2D", "(", "p12arg", ",", "p34arg", ")", ":", "(", "p1", ",", "p2", ")", "=", "p12arg", "(", "p3", ",", "p4", ")", "=", "p34arg", "pi", "=", "np", ".", "asarray", "(", "line_intersection_2D", "(", "p12arg", ",", "p34arg", ")", ")", "p3", "=", "np", ".", "asarray", "(", "p3", ")", "u34", "=", "p4", "-", "p3", "cfn", "=", "lambda", "px", ",", "iis", ":", "(", "px", "if", "iis", "is", "None", "or", "len", "(", "px", ".", "shape", ")", "==", "1", "or", "px", ".", "shape", "[", "1", "]", "==", "len", "(", "iis", ")", "else", "px", "[", ":", ",", "iis", "]", ")", "dfn", "=", "lambda", "a", ",", "b", ":", "a", "[", "0", "]", "*", "b", "[", "0", "]", "+", "a", "[", "1", "]", "*", "b", "[", "1", "]", "sfn", "=", "lambda", "a", ",", "b", ":", "(", "(", "a", "-", "b", ")", "if", "len", "(", "a", ".", "shape", ")", "==", "len", "(", "b", ".", "shape", ")", "else", "(", "np", ".", "transpose", "(", "[", "a", "]", ")", "-", "b", ")", "if", "len", "(", "a", ".", "shape", ")", "<", "len", "(", "b", ".", "shape", ")", "else", "(", "a", "-", "np", ".", "transpose", "(", "[", "b", "]", ")", ")", ")", "fn", "=", "lambda", "px", ",", "iis", ":", "(", "1", "-", "(", "(", "dfn", "(", "cfn", "(", "u34", ",", "iis", ")", ",", "sfn", "(", "px", ",", "cfn", "(", "p3", ",", "iis", ")", ")", ")", ">", "0", ")", "*", "(", "dfn", "(", "cfn", "(", "u34", ",", "iis", ")", ",", "sfn", "(", "cfn", "(", "p4", ",", "iis", ")", ",", "px", ")", ")", ">", "0", ")", ")", ")", "if", "len", "(", "pi", ".", "shape", ")", "==", "1", ":", "if", "not", "np", ".", "isfinite", "(", "pi", "[", "0", "]", ")", ":", "return", "(", "np", ".", "nan", ",", "np", ".", "nan", ")", "bad", "=", "fn", "(", "pi", ",", "None", ")", "return", "(", "np", ".", "nan", ",", "np", ".", "nan", ")", "if", "bad", "else", "pi", "else", ":", "nonpar", "=", "np", ".", "where", "(", "np", ".", "isfinite", "(", "pi", "[", "0", "]", ")", ")", "[", "0", "]", "bad", "=", "fn", "(", "cfn", "(", "pi", ",", "nonpar", ")", ",", "nonpar", ")", "(", "xi", ",", "yi", ")", "=", "pi", "bad", "=", "nonpar", "[", "np", ".", "where", "(", "bad", ")", "[", "0", "]", "]", "xi", "[", "bad", "]", "=", "np", ".", "nan", "yi", "[", "bad", "]", "=", "np", ".", "nan", "return", "(", "xi", ",", "yi", ")" ]
45.870968
0.020661
[ "def line_segment_intersection_2D(p12arg, p34arg):\n", " '''\n", " line_segment_intersection((a, b), (c, d)) yields the intersection point between the line\n", " passing through points a and b and the line segment that passes from point c to point d. If\n", " there is no intersection point, then (numpy.nan, numpy.nan) is returned.\n", " '''\n", " (p1,p2) = p12arg\n", " (p3,p4) = p34arg\n", " pi = np.asarray(line_intersection_2D(p12arg, p34arg))\n", " p3 = np.asarray(p3)\n", " u34 = p4 - p3\n", " cfn = lambda px,iis: (px if iis is None or len(px.shape) == 1 or px.shape[1] == len(iis) else\n", " px[:,iis])\n", " dfn = lambda a,b: a[0]*b[0] + a[1]*b[1]\n", " sfn = lambda a,b: ((a-b) if len(a.shape) == len(b.shape) else\n", " (np.transpose([a])-b) if len(a.shape) < len(b.shape) else\n", " (a - np.transpose([b])))\n", " fn = lambda px,iis: (1 - ((dfn(cfn(u34,iis), sfn( px, cfn(p3,iis))) > 0) *\n", " (dfn(cfn(u34,iis), sfn(cfn(p4,iis), px)) > 0)))\n", " if len(pi.shape) == 1:\n", " if not np.isfinite(pi[0]): return (np.nan, np.nan)\n", " bad = fn(pi, None)\n", " return (np.nan, np.nan) if bad else pi\n", " else:\n", " nonpar = np.where(np.isfinite(pi[0]))[0]\n", " bad = fn(cfn(pi, nonpar), nonpar)\n", " (xi,yi) = pi\n", " bad = nonpar[np.where(bad)[0]]\n", " xi[bad] = np.nan\n", " yi[bad] = np.nan\n", " return (xi,yi)" ]
[ 0, 0, 0.010752688172043012, 0.010416666666666666, 0, 0, 0.047619047619047616, 0.047619047619047616, 0, 0, 0, 0.030612244897959183, 0.02702702702702703, 0.041666666666666664, 0.046511627906976744, 0.023255813953488372, 0, 0.07865168539325842, 0.033707865168539325, 0, 0.01694915254237288, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0.09090909090909091 ]
31
0.017849
def demux2(data, chunkfiles, cutters, longbar, matchdict, ipyclient): """ Submit chunks to be sorted by the barmatch() function then calls putstats(). """ ## parallel stuff, limit to 1/4 of available cores for RAM limits. start = time.time() printstr = ' sorting reads | {} | s1 |' lbview = ipyclient.load_balanced_view(targets=ipyclient.ids[::4]) ## store statcounters and async results in dicts perfile = {} filesort = {} total = 0 done = 0 ## chunkfiles is a dict with {handle: chunkslist, ...}. The func barmatch ## writes results to samplename files with PID number, and also writes a ## pickle for chunk specific results with fidx suffix, which it returns. for handle, rawtuplist in chunkfiles.items(): ## get args for job for fidx, rawtuple in enumerate(rawtuplist): #handle = os.path.splitext(os.path.basename(rawtuple[0]))[0] args = (data, rawtuple, cutters, longbar, matchdict, fidx) ## submit the job async = lbview.apply(barmatch, *args) filesort[total] = (handle, async) total += 1 ## get ready to receive stats: 'total', 'cutfound', 'matched' perfile[handle] = np.zeros(3, dtype=np.int) ## stats for each sample fdbars = {} fsamplehits = Counter() fbarhits = Counter() fmisses = Counter() ## a tuple to hold my dictionaries statdicts = perfile, fsamplehits, fbarhits, fmisses, fdbars ## wait for jobs to finish while 1: fin = [i for i, j in filesort.items() if j[1].ready()] #fin = [i for i in jobs if i[1].ready()] elapsed = datetime.timedelta(seconds=int(time.time()-start)) progressbar(total, done, printstr.format(elapsed), spacer=data._spacer) time.sleep(0.1) ## should we break? if total == done: print("") break ## cleanup for key in fin: tup = filesort[key] if tup[1].successful(): pfile = tup[1].result() handle = tup[0] if pfile: ## check if this needs to return data putstats(pfile, handle, statdicts) ## purge to conserve memory del filesort[key] done += 1 return statdicts
[ "def", "demux2", "(", "data", ",", "chunkfiles", ",", "cutters", ",", "longbar", ",", "matchdict", ",", "ipyclient", ")", ":", "## parallel stuff, limit to 1/4 of available cores for RAM limits.", "start", "=", "time", ".", "time", "(", ")", "printstr", "=", "' sorting reads | {} | s1 |'", "lbview", "=", "ipyclient", ".", "load_balanced_view", "(", "targets", "=", "ipyclient", ".", "ids", "[", ":", ":", "4", "]", ")", "## store statcounters and async results in dicts", "perfile", "=", "{", "}", "filesort", "=", "{", "}", "total", "=", "0", "done", "=", "0", "## chunkfiles is a dict with {handle: chunkslist, ...}. The func barmatch", "## writes results to samplename files with PID number, and also writes a ", "## pickle for chunk specific results with fidx suffix, which it returns.", "for", "handle", ",", "rawtuplist", "in", "chunkfiles", ".", "items", "(", ")", ":", "## get args for job", "for", "fidx", ",", "rawtuple", "in", "enumerate", "(", "rawtuplist", ")", ":", "#handle = os.path.splitext(os.path.basename(rawtuple[0]))[0]", "args", "=", "(", "data", ",", "rawtuple", ",", "cutters", ",", "longbar", ",", "matchdict", ",", "fidx", ")", "## submit the job", "async", "=", "lbview", ".", "apply", "(", "barmatch", ",", "*", "args", ")", "filesort", "[", "total", "]", "=", "(", "handle", ",", "async", ")", "total", "+=", "1", "## get ready to receive stats: 'total', 'cutfound', 'matched'", "perfile", "[", "handle", "]", "=", "np", ".", "zeros", "(", "3", ",", "dtype", "=", "np", ".", "int", ")", "## stats for each sample", "fdbars", "=", "{", "}", "fsamplehits", "=", "Counter", "(", ")", "fbarhits", "=", "Counter", "(", ")", "fmisses", "=", "Counter", "(", ")", "## a tuple to hold my dictionaries", "statdicts", "=", "perfile", ",", "fsamplehits", ",", "fbarhits", ",", "fmisses", ",", "fdbars", "## wait for jobs to finish", "while", "1", ":", "fin", "=", "[", "i", "for", "i", ",", "j", "in", "filesort", ".", "items", "(", ")", "if", "j", "[", "1", "]", ".", "ready", "(", ")", "]", "#fin = [i for i in jobs if i[1].ready()]", "elapsed", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "int", "(", "time", ".", "time", "(", ")", "-", "start", ")", ")", "progressbar", "(", "total", ",", "done", ",", "printstr", ".", "format", "(", "elapsed", ")", ",", "spacer", "=", "data", ".", "_spacer", ")", "time", ".", "sleep", "(", "0.1", ")", "## should we break?", "if", "total", "==", "done", ":", "print", "(", "\"\"", ")", "break", "## cleanup", "for", "key", "in", "fin", ":", "tup", "=", "filesort", "[", "key", "]", "if", "tup", "[", "1", "]", ".", "successful", "(", ")", ":", "pfile", "=", "tup", "[", "1", "]", ".", "result", "(", ")", "handle", "=", "tup", "[", "0", "]", "if", "pfile", ":", "## check if this needs to return data", "putstats", "(", "pfile", ",", "handle", ",", "statdicts", ")", "## purge to conserve memory", "del", "filesort", "[", "key", "]", "done", "+=", "1", "return", "statdicts" ]
33.927536
0.009963
[ "def demux2(data, chunkfiles, cutters, longbar, matchdict, ipyclient):\n", " \"\"\" \n", " Submit chunks to be sorted by the barmatch() function then \n", " calls putstats().\n", " \"\"\"\n", "\n", " ## parallel stuff, limit to 1/4 of available cores for RAM limits.\n", " start = time.time()\n", " printstr = ' sorting reads | {} | s1 |'\n", " lbview = ipyclient.load_balanced_view(targets=ipyclient.ids[::4])\n", "\n", " ## store statcounters and async results in dicts\n", " perfile = {}\n", " filesort = {}\n", " total = 0\n", " done = 0 \n", "\n", " ## chunkfiles is a dict with {handle: chunkslist, ...}. The func barmatch\n", " ## writes results to samplename files with PID number, and also writes a \n", " ## pickle for chunk specific results with fidx suffix, which it returns.\n", " for handle, rawtuplist in chunkfiles.items():\n", " ## get args for job\n", " for fidx, rawtuple in enumerate(rawtuplist):\n", " #handle = os.path.splitext(os.path.basename(rawtuple[0]))[0]\n", " args = (data, rawtuple, cutters, longbar, matchdict, fidx)\n", "\n", " ## submit the job\n", " async = lbview.apply(barmatch, *args)\n", " filesort[total] = (handle, async)\n", " total += 1\n", "\n", " ## get ready to receive stats: 'total', 'cutfound', 'matched'\n", " perfile[handle] = np.zeros(3, dtype=np.int)\n", "\n", " ## stats for each sample\n", " fdbars = {}\n", " fsamplehits = Counter()\n", " fbarhits = Counter()\n", " fmisses = Counter()\n", " ## a tuple to hold my dictionaries\n", " statdicts = perfile, fsamplehits, fbarhits, fmisses, fdbars\n", "\n", " ## wait for jobs to finish\n", " while 1:\n", " fin = [i for i, j in filesort.items() if j[1].ready()]\n", " #fin = [i for i in jobs if i[1].ready()]\n", " elapsed = datetime.timedelta(seconds=int(time.time()-start))\n", " progressbar(total, done, printstr.format(elapsed), spacer=data._spacer)\n", " time.sleep(0.1)\n", "\n", " ## should we break?\n", " if total == done:\n", " print(\"\")\n", " break\n", "\n", " ## cleanup\n", " for key in fin:\n", " tup = filesort[key]\n", " if tup[1].successful():\n", " pfile = tup[1].result()\n", " handle = tup[0]\n", " if pfile:\n", " ## check if this needs to return data\n", " putstats(pfile, handle, statdicts)\n", " ## purge to conserve memory\n", " del filesort[key]\n", " done += 1\n", "\n", " return statdicts" ]
[ 0, 0.1111111111111111, 0.015625, 0, 0, 0, 0.014084507042253521, 0, 0, 0, 0, 0.018867924528301886, 0, 0, 0, 0.07142857142857142, 0, 0.01282051282051282, 0.02564102564102564, 0.012987012987012988, 0, 0.03571428571428571, 0, 0.0136986301369863, 0, 0, 0.03333333333333333, 0.02, 0.021739130434782608, 0, 0, 0.013513513513513514, 0, 0, 0.034482758620689655, 0, 0, 0, 0, 0.02564102564102564, 0, 0, 0.03225806451612903, 0, 0, 0.02040816326530612, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0.017241379310344827, 0, 0.020833333333333332, 0, 0, 0, 0.05 ]
69
0.010287
def parse_formula(fml_file): """ Parse and return MaxSAT formula. """ if re.search('\.wcnf(\.(gz|bz2|lzma|xz))?$', fml_file): fml = WCNF(from_file=fml_file) else: # expecting '*.cnf' fml = CNF(from_file=fml_file).weighted() return fml
[ "def", "parse_formula", "(", "fml_file", ")", ":", "if", "re", ".", "search", "(", "'\\.wcnf(\\.(gz|bz2|lzma|xz))?$'", ",", "fml_file", ")", ":", "fml", "=", "WCNF", "(", "from_file", "=", "fml_file", ")", "else", ":", "# expecting '*.cnf'", "fml", "=", "CNF", "(", "from_file", "=", "fml_file", ")", ".", "weighted", "(", ")", "return", "fml" ]
24.636364
0.010676
[ "def parse_formula(fml_file):\n", " \"\"\"\n", " Parse and return MaxSAT formula.\n", " \"\"\"\n", "\n", " if re.search('\\.wcnf(\\.(gz|bz2|lzma|xz))?$', fml_file):\n", " fml = WCNF(from_file=fml_file)\n", " else: # expecting '*.cnf'\n", " fml = CNF(from_file=fml_file).weighted()\n", "\n", " return fml" ]
[ 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0.07142857142857142 ]
11
0.009524
def __expire_files(self): """Because files are always unclean""" self.__files = OrderedDict( item for item in self.__files.items() if not item[1].expired )
[ "def", "__expire_files", "(", "self", ")", ":", "self", ".", "__files", "=", "OrderedDict", "(", "item", "for", "item", "in", "self", ".", "__files", ".", "items", "(", ")", "if", "not", "item", "[", "1", "]", ".", "expired", ")" ]
31.166667
0.010417
[ "def __expire_files(self):\n", " \"\"\"Because files are always unclean\"\"\"\n", "\n", " self.__files = OrderedDict(\n", " item for item in self.__files.items() if not item[1].expired\n", " )" ]
[ 0, 0.02127659574468085, 0, 0, 0, 0.1111111111111111 ]
6
0.022065
def _load_permissions(self): """Load permissions associated to actions.""" result = _P(needs=set(), excludes=set()) if not self.allow_by_default: result.needs.update(self.explicit_needs) for explicit_need in self.explicit_needs: if explicit_need.method == 'action': action = current_access.get_action_cache( self._cache_key(explicit_need) ) if action is None: action = _P(needs=set(), excludes=set()) actionsusers = ActionUsers.query_by_action( explicit_need ).all() actionsroles = ActionRoles.query_by_action( explicit_need ).join( ActionRoles.role ).all() actionssystem = ActionSystemRoles.query_by_action( explicit_need ).all() for db_action in chain( actionsusers, actionsroles, actionssystem): if db_action.exclude: action.excludes.add(db_action.need) else: action.needs.add(db_action.need) current_access.set_action_cache( self._cache_key(explicit_need), action ) # in-place update of results result.update(action) elif self.allow_by_default: result.needs.add(explicit_need) self._permissions = result
[ "def", "_load_permissions", "(", "self", ")", ":", "result", "=", "_P", "(", "needs", "=", "set", "(", ")", ",", "excludes", "=", "set", "(", ")", ")", "if", "not", "self", ".", "allow_by_default", ":", "result", ".", "needs", ".", "update", "(", "self", ".", "explicit_needs", ")", "for", "explicit_need", "in", "self", ".", "explicit_needs", ":", "if", "explicit_need", ".", "method", "==", "'action'", ":", "action", "=", "current_access", ".", "get_action_cache", "(", "self", ".", "_cache_key", "(", "explicit_need", ")", ")", "if", "action", "is", "None", ":", "action", "=", "_P", "(", "needs", "=", "set", "(", ")", ",", "excludes", "=", "set", "(", ")", ")", "actionsusers", "=", "ActionUsers", ".", "query_by_action", "(", "explicit_need", ")", ".", "all", "(", ")", "actionsroles", "=", "ActionRoles", ".", "query_by_action", "(", "explicit_need", ")", ".", "join", "(", "ActionRoles", ".", "role", ")", ".", "all", "(", ")", "actionssystem", "=", "ActionSystemRoles", ".", "query_by_action", "(", "explicit_need", ")", ".", "all", "(", ")", "for", "db_action", "in", "chain", "(", "actionsusers", ",", "actionsroles", ",", "actionssystem", ")", ":", "if", "db_action", ".", "exclude", ":", "action", ".", "excludes", ".", "add", "(", "db_action", ".", "need", ")", "else", ":", "action", ".", "needs", ".", "add", "(", "db_action", ".", "need", ")", "current_access", ".", "set_action_cache", "(", "self", ".", "_cache_key", "(", "explicit_need", ")", ",", "action", ")", "# in-place update of results", "result", ".", "update", "(", "action", ")", "elif", "self", ".", "allow_by_default", ":", "result", ".", "needs", ".", "add", "(", "explicit_need", ")", "self", ".", "_permissions", "=", "result" ]
37.681818
0.001176
[ "def _load_permissions(self):\n", " \"\"\"Load permissions associated to actions.\"\"\"\n", " result = _P(needs=set(), excludes=set())\n", " if not self.allow_by_default:\n", " result.needs.update(self.explicit_needs)\n", "\n", " for explicit_need in self.explicit_needs:\n", " if explicit_need.method == 'action':\n", " action = current_access.get_action_cache(\n", " self._cache_key(explicit_need)\n", " )\n", " if action is None:\n", " action = _P(needs=set(), excludes=set())\n", "\n", " actionsusers = ActionUsers.query_by_action(\n", " explicit_need\n", " ).all()\n", "\n", " actionsroles = ActionRoles.query_by_action(\n", " explicit_need\n", " ).join(\n", " ActionRoles.role\n", " ).all()\n", "\n", " actionssystem = ActionSystemRoles.query_by_action(\n", " explicit_need\n", " ).all()\n", "\n", " for db_action in chain(\n", " actionsusers, actionsroles, actionssystem):\n", " if db_action.exclude:\n", " action.excludes.add(db_action.need)\n", " else:\n", " action.needs.add(db_action.need)\n", "\n", " current_access.set_action_cache(\n", " self._cache_key(explicit_need),\n", " action\n", " )\n", " # in-place update of results\n", " result.update(action)\n", " elif self.allow_by_default:\n", " result.needs.add(explicit_need)\n", " self._permissions = result" ]
[ 0, 0.018518518518518517, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.029411764705882353 ]
44
0.001089
def images(self, name=None, os=None, version=None, public=None, state=None, owner=None, type=None): """ :: GET /:login/images :param name: match on the listed name :type name: :py:class:`basestring` :param os: match on the selected os :type os: :py:class:`basestring` :param version: match on the selected version :type version: :py:class:`basestring` :param public: match on the visibility :type public: :py:class:`basestring` ("public"/"private") :param state: Filter on image state. By default only active images are shown. Use "all" to list all images. :type state: :py:class:`basestring` :param owner: match on the owner UUID :type owner: :py:class:`basestring` :param machinetype: match on the selected type (e.g., "smartmachine") :type machinetype: :py:class:`basestring` :Returns: available machine images in this datacenter :rtype: :py:class:`list` of :py:class:`dict`\s """ params = {} if name: params['name'] = name if os: params['os'] = os if version: params['version'] = version if public: params['public'] = public if state: params['state'] = state if owner: params['owner'] = owner if machinetype: params['type'] = machinetype j, _ = self.request('GET', '/images', params=params) return j
[ "def", "images", "(", "self", ",", "name", "=", "None", ",", "os", "=", "None", ",", "version", "=", "None", ",", "public", "=", "None", ",", "state", "=", "None", ",", "owner", "=", "None", ",", "type", "=", "None", ")", ":", "params", "=", "{", "}", "if", "name", ":", "params", "[", "'name'", "]", "=", "name", "if", "os", ":", "params", "[", "'os'", "]", "=", "os", "if", "version", ":", "params", "[", "'version'", "]", "=", "version", "if", "public", ":", "params", "[", "'public'", "]", "=", "public", "if", "state", ":", "params", "[", "'state'", "]", "=", "state", "if", "owner", ":", "params", "[", "'owner'", "]", "=", "owner", "if", "machinetype", ":", "params", "[", "'type'", "]", "=", "machinetype", "j", ",", "_", "=", "self", ".", "request", "(", "'GET'", ",", "'/images'", ",", "params", "=", "params", ")", "return", "j" ]
31.96
0.010322
[ "def images(self, name=None, os=None, version=None, \n", " public=None, state=None, owner=None, type=None):\n", " \"\"\"\n", " ::\n", " \n", " GET /:login/images\n", " \n", " :param name: match on the listed name\n", " :type name: :py:class:`basestring`\n", " \n", " :param os: match on the selected os\n", " :type os: :py:class:`basestring`\n", " \n", " :param version: match on the selected version\n", " :type version: :py:class:`basestring`\n", " \n", " :param public: match on the visibility\n", " :type public: :py:class:`basestring` (\"public\"/\"private\")\n", " \n", " :param state: Filter on image state. By default only active images are shown. Use \"all\" to list all images.\n", " :type state: :py:class:`basestring`\n", " \n", " :param owner: match on the owner UUID\n", " :type owner: :py:class:`basestring`\n", " \n", " :param machinetype: match on the selected type (e.g., \"smartmachine\")\n", " :type machinetype: :py:class:`basestring`\n", " \n", " :Returns: available machine images in this datacenter\n", " :rtype: :py:class:`list` of :py:class:`dict`\\s\n", " \"\"\"\n", " \n", " params = {}\n", " if name:\n", " params['name'] = name\n", " if os:\n", " params['os'] = os\n", " if version:\n", " params['version'] = version\n", " if public:\n", " params['public'] = public\n", " if state:\n", " params['state'] = state\n", " if owner:\n", " params['owner'] = owner\n", " if machinetype:\n", " params['type'] = machinetype\n", " j, _ = self.request('GET', '/images', params=params)\n", " \n", " return j" ]
[ 0.019230769230769232, 0.015384615384615385, 0.08333333333333333, 0, 0.1111111111111111, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0.008620689655172414, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0.01818181818181818, 0, 0.1111111111111111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0.0625 ]
50
0.028589
def convert_exception(from_exception, to_exception, *to_args, **to_kw): """ Decorator: Catch exception ``from_exception`` and instead raise ``to_exception(*to_args, **to_kw)``. Useful when modules you're using in a method throw their own errors that you want to convert to your own exceptions that you handle higher in the stack. Example: :: class FooError(Exception): pass class BarError(Exception): def __init__(self, message): self.message = message @convert_exception(FooError, BarError, message='bar') def throw_foo(): raise FooError('foo') try: throw_foo() except BarError as e: assert e.message == 'bar' """ def wrapper(fn): def fn_new(*args, **kw): try: return fn(*args, **kw) except from_exception: new_exception = to_exception(*to_args, **to_kw) traceback = sys.exc_info()[2] if PY3: value = new_exception else: value = None reraise(new_exception, value, traceback) fn_new.__doc__ = fn.__doc__ return fn_new return wrapper
[ "def", "convert_exception", "(", "from_exception", ",", "to_exception", ",", "*", "to_args", ",", "*", "*", "to_kw", ")", ":", "def", "wrapper", "(", "fn", ")", ":", "def", "fn_new", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "try", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kw", ")", "except", "from_exception", ":", "new_exception", "=", "to_exception", "(", "*", "to_args", ",", "*", "*", "to_kw", ")", "traceback", "=", "sys", ".", "exc_info", "(", ")", "[", "2", "]", "if", "PY3", ":", "value", "=", "new_exception", "else", ":", "value", "=", "None", "reraise", "(", "new_exception", ",", "value", ",", "traceback", ")", "fn_new", ".", "__doc__", "=", "fn", ".", "__doc__", "return", "fn_new", "return", "wrapper" ]
28.813953
0.002342
[ "def convert_exception(from_exception, to_exception, *to_args, **to_kw):\n", " \"\"\"\n", " Decorator: Catch exception ``from_exception`` and instead raise ``to_exception(*to_args, **to_kw)``.\n", "\n", " Useful when modules you're using in a method throw their own errors that you want to\n", " convert to your own exceptions that you handle higher in the stack.\n", "\n", " Example: ::\n", "\n", " class FooError(Exception):\n", " pass\n", "\n", " class BarError(Exception):\n", " def __init__(self, message):\n", " self.message = message\n", "\n", " @convert_exception(FooError, BarError, message='bar')\n", " def throw_foo():\n", " raise FooError('foo')\n", "\n", " try:\n", " throw_foo()\n", " except BarError as e:\n", " assert e.message == 'bar'\n", " \"\"\"\n", " def wrapper(fn):\n", "\n", " def fn_new(*args, **kw):\n", " try:\n", " return fn(*args, **kw)\n", " except from_exception:\n", " new_exception = to_exception(*to_args, **to_kw)\n", " traceback = sys.exc_info()[2]\n", " if PY3:\n", " value = new_exception\n", " else:\n", " value = None\n", " reraise(new_exception, value, traceback)\n", "\n", " fn_new.__doc__ = fn.__doc__\n", " return fn_new\n", "\n", " return wrapper" ]
[ 0, 0, 0.009523809523809525, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05555555555555555 ]
43
0.001775
def fix_multi_T1w_source_name(in_files): """ Make up a generic source name when there are multiple T1s >>> fix_multi_T1w_source_name([ ... '/path/to/sub-045_ses-test_T1w.nii.gz', ... '/path/to/sub-045_ses-retest_T1w.nii.gz']) '/path/to/sub-045_T1w.nii.gz' """ import os from nipype.utils.filemanip import filename_to_list base, in_file = os.path.split(filename_to_list(in_files)[0]) subject_label = in_file.split("_", 1)[0].split("-")[1] return os.path.join(base, "sub-%s_T1w.nii.gz" % subject_label)
[ "def", "fix_multi_T1w_source_name", "(", "in_files", ")", ":", "import", "os", "from", "nipype", ".", "utils", ".", "filemanip", "import", "filename_to_list", "base", ",", "in_file", "=", "os", ".", "path", ".", "split", "(", "filename_to_list", "(", "in_files", ")", "[", "0", "]", ")", "subject_label", "=", "in_file", ".", "split", "(", "\"_\"", ",", "1", ")", "[", "0", "]", ".", "split", "(", "\"-\"", ")", "[", "1", "]", "return", "os", ".", "path", ".", "join", "(", "base", ",", "\"sub-%s_T1w.nii.gz\"", "%", "subject_label", ")" ]
36.266667
0.001792
[ "def fix_multi_T1w_source_name(in_files):\n", " \"\"\"\n", " Make up a generic source name when there are multiple T1s\n", "\n", " >>> fix_multi_T1w_source_name([\n", " ... '/path/to/sub-045_ses-test_T1w.nii.gz',\n", " ... '/path/to/sub-045_ses-retest_T1w.nii.gz'])\n", " '/path/to/sub-045_T1w.nii.gz'\n", "\n", " \"\"\"\n", " import os\n", " from nipype.utils.filemanip import filename_to_list\n", " base, in_file = os.path.split(filename_to_list(in_files)[0])\n", " subject_label = in_file.split(\"_\", 1)[0].split(\"-\")[1]\n", " return os.path.join(base, \"sub-%s_T1w.nii.gz\" % subject_label)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015151515151515152 ]
15
0.00101
def _repr_tty_(self) -> str: """Return a summary of this sample sheet in a TTY compatible codec.""" header_description = ['Sample_ID', 'Description'] header_samples = [ 'Sample_ID', 'Sample_Name', 'Library_ID', 'index', 'index2', ] header = SingleTable([], 'Header') setting = SingleTable([], 'Settings') sample_main = SingleTable([header_samples], 'Identifiers') sample_desc = SingleTable([header_description], 'Descriptions') # All key:value pairs found in the [Header] section. max_header_width = max(MIN_WIDTH, sample_desc.column_max_width(-1)) for key in self.Header.keys(): if 'Description' in key: value = '\n'.join( wrap(getattr(self.Header, key), max_header_width) ) else: value = getattr(self.Header, key) header.table_data.append([key, value]) # All key:value pairs found in the [Settings] and [Reads] sections. for key in self.Settings.keys(): setting.table_data.append((key, getattr(self.Settings, key) or '')) setting.table_data.append(('Reads', ', '.join(map(str, self.Reads)))) # Descriptions are wrapped to the allowable space remaining. description_width = max(MIN_WIDTH, sample_desc.column_max_width(-1)) for sample in self.samples: # Add all key:value pairs for this sample sample_main.table_data.append( [getattr(sample, title) or '' for title in header_samples] ) # Wrap and add the sample descrption sample_desc.table_data.append( ( sample.Sample_ID, '\n'.join( wrap(sample.Description or '', description_width) ), ) ) # These tables do not have horizontal headers so remove the frame. header.inner_heading_row_border = False setting.inner_heading_row_border = False table = '\n'.join( [header.table, setting.table, sample_main.table, sample_desc.table] ) return table
[ "def", "_repr_tty_", "(", "self", ")", "->", "str", ":", "header_description", "=", "[", "'Sample_ID'", ",", "'Description'", "]", "header_samples", "=", "[", "'Sample_ID'", ",", "'Sample_Name'", ",", "'Library_ID'", ",", "'index'", ",", "'index2'", ",", "]", "header", "=", "SingleTable", "(", "[", "]", ",", "'Header'", ")", "setting", "=", "SingleTable", "(", "[", "]", ",", "'Settings'", ")", "sample_main", "=", "SingleTable", "(", "[", "header_samples", "]", ",", "'Identifiers'", ")", "sample_desc", "=", "SingleTable", "(", "[", "header_description", "]", ",", "'Descriptions'", ")", "# All key:value pairs found in the [Header] section.", "max_header_width", "=", "max", "(", "MIN_WIDTH", ",", "sample_desc", ".", "column_max_width", "(", "-", "1", ")", ")", "for", "key", "in", "self", ".", "Header", ".", "keys", "(", ")", ":", "if", "'Description'", "in", "key", ":", "value", "=", "'\\n'", ".", "join", "(", "wrap", "(", "getattr", "(", "self", ".", "Header", ",", "key", ")", ",", "max_header_width", ")", ")", "else", ":", "value", "=", "getattr", "(", "self", ".", "Header", ",", "key", ")", "header", ".", "table_data", ".", "append", "(", "[", "key", ",", "value", "]", ")", "# All key:value pairs found in the [Settings] and [Reads] sections.", "for", "key", "in", "self", ".", "Settings", ".", "keys", "(", ")", ":", "setting", ".", "table_data", ".", "append", "(", "(", "key", ",", "getattr", "(", "self", ".", "Settings", ",", "key", ")", "or", "''", ")", ")", "setting", ".", "table_data", ".", "append", "(", "(", "'Reads'", ",", "', '", ".", "join", "(", "map", "(", "str", ",", "self", ".", "Reads", ")", ")", ")", ")", "# Descriptions are wrapped to the allowable space remaining.", "description_width", "=", "max", "(", "MIN_WIDTH", ",", "sample_desc", ".", "column_max_width", "(", "-", "1", ")", ")", "for", "sample", "in", "self", ".", "samples", ":", "# Add all key:value pairs for this sample", "sample_main", ".", "table_data", ".", "append", "(", "[", "getattr", "(", "sample", ",", "title", ")", "or", "''", "for", "title", "in", "header_samples", "]", ")", "# Wrap and add the sample descrption", "sample_desc", ".", "table_data", ".", "append", "(", "(", "sample", ".", "Sample_ID", ",", "'\\n'", ".", "join", "(", "wrap", "(", "sample", ".", "Description", "or", "''", ",", "description_width", ")", ")", ",", ")", ")", "# These tables do not have horizontal headers so remove the frame.", "header", ".", "inner_heading_row_border", "=", "False", "setting", ".", "inner_heading_row_border", "=", "False", "table", "=", "'\\n'", ".", "join", "(", "[", "header", ".", "table", ",", "setting", ".", "table", ",", "sample_main", ".", "table", ",", "sample_desc", ".", "table", "]", ")", "return", "table" ]
38.224138
0.00088
[ "def _repr_tty_(self) -> str:\n", " \"\"\"Return a summary of this sample sheet in a TTY compatible codec.\"\"\"\n", " header_description = ['Sample_ID', 'Description']\n", " header_samples = [\n", " 'Sample_ID',\n", " 'Sample_Name',\n", " 'Library_ID',\n", " 'index',\n", " 'index2',\n", " ]\n", "\n", " header = SingleTable([], 'Header')\n", " setting = SingleTable([], 'Settings')\n", " sample_main = SingleTable([header_samples], 'Identifiers')\n", " sample_desc = SingleTable([header_description], 'Descriptions')\n", "\n", " # All key:value pairs found in the [Header] section.\n", " max_header_width = max(MIN_WIDTH, sample_desc.column_max_width(-1))\n", " for key in self.Header.keys():\n", " if 'Description' in key:\n", " value = '\\n'.join(\n", " wrap(getattr(self.Header, key), max_header_width)\n", " )\n", " else:\n", " value = getattr(self.Header, key)\n", " header.table_data.append([key, value])\n", "\n", " # All key:value pairs found in the [Settings] and [Reads] sections.\n", " for key in self.Settings.keys():\n", " setting.table_data.append((key, getattr(self.Settings, key) or ''))\n", " setting.table_data.append(('Reads', ', '.join(map(str, self.Reads))))\n", "\n", " # Descriptions are wrapped to the allowable space remaining.\n", " description_width = max(MIN_WIDTH, sample_desc.column_max_width(-1))\n", " for sample in self.samples:\n", " # Add all key:value pairs for this sample\n", " sample_main.table_data.append(\n", " [getattr(sample, title) or '' for title in header_samples]\n", " )\n", " # Wrap and add the sample descrption\n", " sample_desc.table_data.append(\n", " (\n", " sample.Sample_ID,\n", " '\\n'.join(\n", " wrap(sample.Description or '', description_width)\n", " ),\n", " )\n", " )\n", "\n", " # These tables do not have horizontal headers so remove the frame.\n", " header.inner_heading_row_border = False\n", " setting.inner_heading_row_border = False\n", "\n", " table = '\\n'.join(\n", " [header.table, setting.table, sample_main.table, sample_desc.table]\n", " )\n", "\n", " return table" ]
[ 0, 0.012658227848101266, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05 ]
58
0.00108
def repos(self): """View or enabled or disabled repositories """ def_cnt, cus_cnt = 0, 0 print("") self.msg.template(78) print("{0}{1}{2}{3}{4}{5}{6}".format( "| Repo id", " " * 2, "Repo URL", " " * 44, "Default", " " * 3, "Status")) self.msg.template(78) for repo_id, repo_URL in sorted(self.all_repos.iteritems()): status, COLOR = "disabled", self.meta.color["RED"] default = "yes" if len(repo_URL) > 49: repo_URL = repo_URL[:48] + "~" if repo_id in self.meta.repositories: def_cnt += 1 status, COLOR = "enabled", self.meta.color["GREEN"] if repo_id not in self.meta.default_repositories: cus_cnt += 1 default = "no" print(" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}".format( repo_id, " " * (9 - len(repo_id)), repo_URL, " " * (52 - len(repo_URL)), default, " " * (8 - len(default)), COLOR, status, self.meta.color["ENDC"])) print("\nRepositories summary") print("=" * 79) print("{0}{1}/{2} enabled default repositories and {3} custom.".format( self.meta.color["GREY"], def_cnt, len(self.all_repos), cus_cnt)) print("Edit the file '/etc/slpkg/repositories.conf' for enable " "and disable default\nrepositories or run 'slpkg " "repo-enable' command.\n{0}".format(self.meta.color["ENDC"])) raise SystemExit()
[ "def", "repos", "(", "self", ")", ":", "def_cnt", ",", "cus_cnt", "=", "0", ",", "0", "print", "(", "\"\"", ")", "self", ".", "msg", ".", "template", "(", "78", ")", "print", "(", "\"{0}{1}{2}{3}{4}{5}{6}\"", ".", "format", "(", "\"| Repo id\"", ",", "\" \"", "*", "2", ",", "\"Repo URL\"", ",", "\" \"", "*", "44", ",", "\"Default\"", ",", "\" \"", "*", "3", ",", "\"Status\"", ")", ")", "self", ".", "msg", ".", "template", "(", "78", ")", "for", "repo_id", ",", "repo_URL", "in", "sorted", "(", "self", ".", "all_repos", ".", "iteritems", "(", ")", ")", ":", "status", ",", "COLOR", "=", "\"disabled\"", ",", "self", ".", "meta", ".", "color", "[", "\"RED\"", "]", "default", "=", "\"yes\"", "if", "len", "(", "repo_URL", ")", ">", "49", ":", "repo_URL", "=", "repo_URL", "[", ":", "48", "]", "+", "\"~\"", "if", "repo_id", "in", "self", ".", "meta", ".", "repositories", ":", "def_cnt", "+=", "1", "status", ",", "COLOR", "=", "\"enabled\"", ",", "self", ".", "meta", ".", "color", "[", "\"GREEN\"", "]", "if", "repo_id", "not", "in", "self", ".", "meta", ".", "default_repositories", ":", "cus_cnt", "+=", "1", "default", "=", "\"no\"", "print", "(", "\" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}\"", ".", "format", "(", "repo_id", ",", "\" \"", "*", "(", "9", "-", "len", "(", "repo_id", ")", ")", ",", "repo_URL", ",", "\" \"", "*", "(", "52", "-", "len", "(", "repo_URL", ")", ")", ",", "default", ",", "\" \"", "*", "(", "8", "-", "len", "(", "default", ")", ")", ",", "COLOR", ",", "status", ",", "self", ".", "meta", ".", "color", "[", "\"ENDC\"", "]", ")", ")", "print", "(", "\"\\nRepositories summary\"", ")", "print", "(", "\"=\"", "*", "79", ")", "print", "(", "\"{0}{1}/{2} enabled default repositories and {3} custom.\"", ".", "format", "(", "self", ".", "meta", ".", "color", "[", "\"GREY\"", "]", ",", "def_cnt", ",", "len", "(", "self", ".", "all_repos", ")", ",", "cus_cnt", ")", ")", "print", "(", "\"Edit the file '/etc/slpkg/repositories.conf' for enable \"", "\"and disable default\\nrepositories or run 'slpkg \"", "\"repo-enable' command.\\n{0}\"", ".", "format", "(", "self", ".", "meta", ".", "color", "[", "\"ENDC\"", "]", ")", ")", "raise", "SystemExit", "(", ")" ]
42.459459
0.001245
[ "def repos(self):\n", " \"\"\"View or enabled or disabled repositories\n", " \"\"\"\n", " def_cnt, cus_cnt = 0, 0\n", " print(\"\")\n", " self.msg.template(78)\n", " print(\"{0}{1}{2}{3}{4}{5}{6}\".format(\n", " \"| Repo id\", \" \" * 2,\n", " \"Repo URL\", \" \" * 44,\n", " \"Default\", \" \" * 3,\n", " \"Status\"))\n", " self.msg.template(78)\n", " for repo_id, repo_URL in sorted(self.all_repos.iteritems()):\n", " status, COLOR = \"disabled\", self.meta.color[\"RED\"]\n", " default = \"yes\"\n", " if len(repo_URL) > 49:\n", " repo_URL = repo_URL[:48] + \"~\"\n", " if repo_id in self.meta.repositories:\n", " def_cnt += 1\n", " status, COLOR = \"enabled\", self.meta.color[\"GREEN\"]\n", " if repo_id not in self.meta.default_repositories:\n", " cus_cnt += 1\n", " default = \"no\"\n", " print(\" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}\".format(\n", " repo_id, \" \" * (9 - len(repo_id)),\n", " repo_URL, \" \" * (52 - len(repo_URL)),\n", " default, \" \" * (8 - len(default)),\n", " COLOR, status, self.meta.color[\"ENDC\"]))\n", "\n", " print(\"\\nRepositories summary\")\n", " print(\"=\" * 79)\n", " print(\"{0}{1}/{2} enabled default repositories and {3} custom.\".format(\n", " self.meta.color[\"GREY\"], def_cnt, len(self.all_repos), cus_cnt))\n", " print(\"Edit the file '/etc/slpkg/repositories.conf' for enable \"\n", " \"and disable default\\nrepositories or run 'slpkg \"\n", " \"repo-enable' command.\\n{0}\".format(self.meta.color[\"ENDC\"]))\n", " raise SystemExit()" ]
[ 0, 0.019230769230769232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464 ]
37
0.001559
def setbit(self, key, offset, bit): """Sets or clears the bit at offset in the string value stored at key. The bit is either set or cleared depending on value, which can be either 0 or 1. When key does not exist, a new string value is created. The string is grown to make sure it can hold a bit at offset. The offset argument is required to be greater than or equal to 0, and smaller than 2 :sup:`32` (this limits bitmaps to 512MB). When the string at key is grown, added bits are set to 0. .. warning:: When setting the last possible bit (offset equal to 2 :sup:`32` -1) and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting bit number 2 :sup:`32` -1 (512MB allocation) takes ~300ms, setting bit number 2 :sup:`30` -1 (128MB allocation) takes ~80ms, setting bit number 2 :sup:`28` -1 (32MB allocation) takes ~30ms and setting bit number 2 :sup:`26` -1 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setbit` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :param int bit: The value (``0`` or ``1``) to set for the bit :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) return self._execute([b'SETBIT', key, ascii(offset), ascii(bit)])
[ "def", "setbit", "(", "self", ",", "key", ",", "offset", ",", "bit", ")", ":", "if", "0", "<", "bit", ">", "1", ":", "raise", "ValueError", "(", "'bit must be 1 or 0, not {}'", ".", "format", "(", "bit", ")", ")", "return", "self", ".", "_execute", "(", "[", "b'SETBIT'", ",", "key", ",", "ascii", "(", "offset", ")", ",", "ascii", "(", "bit", ")", "]", ")" ]
51.72973
0.001026
[ "def setbit(self, key, offset, bit):\n", " \"\"\"Sets or clears the bit at offset in the string value stored at key.\n", "\n", " The bit is either set or cleared depending on value, which can be\n", " either 0 or 1. When key does not exist, a new string value is created.\n", " The string is grown to make sure it can hold a bit at offset. The\n", " offset argument is required to be greater than or equal to 0, and\n", " smaller than 2 :sup:`32` (this limits bitmaps to 512MB). When the\n", " string at key is grown, added bits are set to 0.\n", "\n", " .. warning:: When setting the last possible bit (offset equal to\n", " 2 :sup:`32` -1) and the string value stored at key does not yet hold\n", " a string value, or holds a small string value, Redis needs to\n", " allocate all intermediate memory which can block the server for some\n", " time. On a 2010 MacBook Pro, setting bit number 2 :sup:`32` -1\n", " (512MB allocation) takes ~300ms, setting bit number 2 :sup:`30` -1\n", " (128MB allocation) takes ~80ms, setting bit number 2 :sup:`28` -1\n", " (32MB allocation) takes ~30ms and setting bit number 2 :sup:`26` -1\n", " (8MB allocation) takes ~8ms. Note that once this first allocation is\n", " done, subsequent calls to :meth:`~tredis.RedisClient.setbit` for the\n", " same key will not have the allocation overhead.\n", "\n", " .. versionadded:: 0.2.0\n", "\n", " .. note:: **Time complexity**: ``O(1)``\n", "\n", " :param key: The key to get the bit from\n", " :type key: :class:`str`, :class:`bytes`\n", " :param int offset: The bit offset to fetch the bit from\n", " :param int bit: The value (``0`` or ``1``) to set for the bit\n", " :rtype: int\n", " :raises: :exc:`~tredis.exceptions.RedisError`\n", "\n", " \"\"\"\n", " if 0 < bit > 1:\n", " raise ValueError('bit must be 1 or 0, not {}'.format(bit))\n", " return self._execute([b'SETBIT', key, ascii(offset), ascii(bit)])" ]
[ 0, 0.012658227848101266, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0136986301369863 ]
37
0.000712
def complete_watch(self, text, *_): """ Autocomplete for watch """ return [t + " " for t in self.engine.cached_descriptions if t.startswith(text)]
[ "def", "complete_watch", "(", "self", ",", "text", ",", "*", "_", ")", ":", "return", "[", "t", "+", "\" \"", "for", "t", "in", "self", ".", "engine", ".", "cached_descriptions", "if", "t", ".", "startswith", "(", "text", ")", "]" ]
53.333333
0.018519
[ "def complete_watch(self, text, *_):\n", " \"\"\" Autocomplete for watch \"\"\"\n", " return [t + \" \" for t in self.engine.cached_descriptions if t.startswith(text)]" ]
[ 0, 0.02564102564102564, 0.022988505747126436 ]
3
0.01621
def is_prime(n, mr_rounds=25): """Test whether n is probably prime See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests> Arguments: n (int): the number to be tested mr_rounds (int, optional): number of Miller-Rabin iterations to run; defaults to 25 iterations, which is what the GMP library uses Returns: bool: when this function returns False, `n` is composite (not prime); when it returns True, `n` is prime with overwhelming probability """ # as an optimization we quickly detect small primes using the list above if n <= first_primes[-1]: return n in first_primes # for small dividors (relatively frequent), euclidean division is best for p in first_primes: if n % p == 0: return False # the actual generic test; give a false prime with probability 2⁻⁵⁰ return miller_rabin(n, mr_rounds)
[ "def", "is_prime", "(", "n", ",", "mr_rounds", "=", "25", ")", ":", "# as an optimization we quickly detect small primes using the list above", "if", "n", "<=", "first_primes", "[", "-", "1", "]", ":", "return", "n", "in", "first_primes", "# for small dividors (relatively frequent), euclidean division is best", "for", "p", "in", "first_primes", ":", "if", "n", "%", "p", "==", "0", ":", "return", "False", "# the actual generic test; give a false prime with probability 2⁻⁵⁰", "return", "miller_rabin", "(", "n", ",", "mr_rounds", ")" ]
39.347826
0.001079
[ "def is_prime(n, mr_rounds=25):\n", " \"\"\"Test whether n is probably prime\n", "\n", " See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests>\n", "\n", " Arguments:\n", " n (int): the number to be tested\n", " mr_rounds (int, optional): number of Miller-Rabin iterations to run;\n", " defaults to 25 iterations, which is what the GMP library uses\n", "\n", " Returns:\n", " bool: when this function returns False, `n` is composite (not prime);\n", " when it returns True, `n` is prime with overwhelming probability\n", " \"\"\"\n", " # as an optimization we quickly detect small primes using the list above\n", " if n <= first_primes[-1]:\n", " return n in first_primes\n", " # for small dividors (relatively frequent), euclidean division is best\n", " for p in first_primes:\n", " if n % p == 0:\n", " return False\n", " # the actual generic test; give a false prime with probability 2⁻⁵⁰\n", " return miller_rabin(n, mr_rounds)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02702702702702703 ]
23
0.001175
def update_pipe_channel(self, uid, channel_name, label): # pylint: disable=unused-argument ''' Update this consumer to listen on channel_name for the js widget associated with uid ''' pipe_group_name = _form_pipe_channel_name(channel_name) if self.channel_layer: current = self.channel_maps.get(uid, None) if current != pipe_group_name: if current: async_to_sync(self.channel_layer.group_discard)(current, self.channel_name) self.channel_maps[uid] = pipe_group_name async_to_sync(self.channel_layer.group_add)(pipe_group_name, self.channel_name)
[ "def", "update_pipe_channel", "(", "self", ",", "uid", ",", "channel_name", ",", "label", ")", ":", "# pylint: disable=unused-argument", "pipe_group_name", "=", "_form_pipe_channel_name", "(", "channel_name", ")", "if", "self", ".", "channel_layer", ":", "current", "=", "self", ".", "channel_maps", ".", "get", "(", "uid", ",", "None", ")", "if", "current", "!=", "pipe_group_name", ":", "if", "current", ":", "async_to_sync", "(", "self", ".", "channel_layer", ".", "group_discard", ")", "(", "current", ",", "self", ".", "channel_name", ")", "self", ".", "channel_maps", "[", "uid", "]", "=", "pipe_group_name", "async_to_sync", "(", "self", ".", "channel_layer", ".", "group_add", ")", "(", "pipe_group_name", ",", "self", ".", "channel_name", ")" ]
47.571429
0.010309
[ "def update_pipe_channel(self, uid, channel_name, label): # pylint: disable=unused-argument\n", " '''\n", " Update this consumer to listen on channel_name for the js widget associated with uid\n", " '''\n", " pipe_group_name = _form_pipe_channel_name(channel_name)\n", "\n", " if self.channel_layer:\n", " current = self.channel_maps.get(uid, None)\n", " if current != pipe_group_name:\n", " if current:\n", " async_to_sync(self.channel_layer.group_discard)(current, self.channel_name)\n", "\n", " self.channel_maps[uid] = pipe_group_name\n", " async_to_sync(self.channel_layer.group_add)(pipe_group_name, self.channel_name)" ]
[ 0.02197802197802198, 0.08333333333333333, 0.010752688172043012, 0, 0, 0, 0, 0, 0, 0, 0.010416666666666666, 0, 0, 0.021052631578947368 ]
14
0.010538
def unpurge(*packages): ''' Change package selection for each package specified to 'install' CLI Example: .. code-block:: bash salt '*' lowpkg.unpurge curl ''' if not packages: return {} old = __salt__['pkg.list_pkgs'](purge_desired=True) ret = {} __salt__['cmd.run']( ['dpkg', '--set-selections'], stdin=r'\n'.join(['{0} install'.format(x) for x in packages]), python_shell=False, output_loglevel='trace' ) __context__.pop('pkg.list_pkgs', None) new = __salt__['pkg.list_pkgs'](purge_desired=True) return salt.utils.data.compare_dicts(old, new)
[ "def", "unpurge", "(", "*", "packages", ")", ":", "if", "not", "packages", ":", "return", "{", "}", "old", "=", "__salt__", "[", "'pkg.list_pkgs'", "]", "(", "purge_desired", "=", "True", ")", "ret", "=", "{", "}", "__salt__", "[", "'cmd.run'", "]", "(", "[", "'dpkg'", ",", "'--set-selections'", "]", ",", "stdin", "=", "r'\\n'", ".", "join", "(", "[", "'{0} install'", ".", "format", "(", "x", ")", "for", "x", "in", "packages", "]", ")", ",", "python_shell", "=", "False", ",", "output_loglevel", "=", "'trace'", ")", "__context__", ".", "pop", "(", "'pkg.list_pkgs'", ",", "None", ")", "new", "=", "__salt__", "[", "'pkg.list_pkgs'", "]", "(", "purge_desired", "=", "True", ")", "return", "salt", ".", "utils", ".", "data", ".", "compare_dicts", "(", "old", ",", "new", ")" ]
27.217391
0.001543
[ "def unpurge(*packages):\n", " '''\n", " Change package selection for each package specified to 'install'\n", "\n", " CLI Example:\n", "\n", " .. code-block:: bash\n", "\n", " salt '*' lowpkg.unpurge curl\n", " '''\n", " if not packages:\n", " return {}\n", " old = __salt__['pkg.list_pkgs'](purge_desired=True)\n", " ret = {}\n", " __salt__['cmd.run'](\n", " ['dpkg', '--set-selections'],\n", " stdin=r'\\n'.join(['{0} install'.format(x) for x in packages]),\n", " python_shell=False,\n", " output_loglevel='trace'\n", " )\n", " __context__.pop('pkg.list_pkgs', None)\n", " new = __salt__['pkg.list_pkgs'](purge_desired=True)\n", " return salt.utils.data.compare_dicts(old, new)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02 ]
23
0.00087
def setPrivates(self, fieldDict) : """will set self._id, self._rev and self._key field.""" for priv in self.privates : if priv in fieldDict : setattr(self, priv, fieldDict[priv]) else : setattr(self, priv, None) if self._id is not None : self.URL = "%s/%s" % (self.documentsURL, self._id)
[ "def", "setPrivates", "(", "self", ",", "fieldDict", ")", ":", "for", "priv", "in", "self", ".", "privates", ":", "if", "priv", "in", "fieldDict", ":", "setattr", "(", "self", ",", "priv", ",", "fieldDict", "[", "priv", "]", ")", "else", ":", "setattr", "(", "self", ",", "priv", ",", "None", ")", "if", "self", ".", "_id", "is", "not", "None", ":", "self", ".", "URL", "=", "\"%s/%s\"", "%", "(", "self", ".", "documentsURL", ",", "self", ".", "_id", ")" ]
35.272727
0.022613
[ "def setPrivates(self, fieldDict) :\n", " \"\"\"will set self._id, self._rev and self._key field.\"\"\"\n", " \n", " for priv in self.privates :\n", " if priv in fieldDict :\n", " setattr(self, priv, fieldDict[priv])\n", " else :\n", " setattr(self, priv, None)\n", " \n", " if self._id is not None :\n", " self.URL = \"%s/%s\" % (self.documentsURL, self._id)" ]
[ 0.02857142857142857, 0.015625, 0.1111111111111111, 0.027777777777777776, 0.02857142857142857, 0, 0.05263157894736842, 0, 0.1111111111111111, 0.029411764705882353, 0.016129032258064516 ]
11
0.038267
def _write_csv(filepath, data, kwargs): """See documentation of mpu.io.write.""" kwargs_open = {'newline': ''} mode = 'w' if sys.version_info < (3, 0): kwargs_open.pop('newline', None) mode = 'wb' with open(filepath, mode, **kwargs_open) as fp: if 'delimiter' not in kwargs: kwargs['delimiter'] = ',' if 'quotechar' not in kwargs: kwargs['quotechar'] = '"' with open(filepath, 'w') as fp: writer = csv.writer(fp, **kwargs) writer.writerows(data) return data
[ "def", "_write_csv", "(", "filepath", ",", "data", ",", "kwargs", ")", ":", "kwargs_open", "=", "{", "'newline'", ":", "''", "}", "mode", "=", "'w'", "if", "sys", ".", "version_info", "<", "(", "3", ",", "0", ")", ":", "kwargs_open", ".", "pop", "(", "'newline'", ",", "None", ")", "mode", "=", "'wb'", "with", "open", "(", "filepath", ",", "mode", ",", "*", "*", "kwargs_open", ")", "as", "fp", ":", "if", "'delimiter'", "not", "in", "kwargs", ":", "kwargs", "[", "'delimiter'", "]", "=", "','", "if", "'quotechar'", "not", "in", "kwargs", ":", "kwargs", "[", "'quotechar'", "]", "=", "'\"'", "with", "open", "(", "filepath", ",", "'w'", ")", "as", "fp", ":", "writer", "=", "csv", ".", "writer", "(", "fp", ",", "*", "*", "kwargs", ")", "writer", ".", "writerows", "(", "data", ")", "return", "data" ]
34.625
0.001757
[ "def _write_csv(filepath, data, kwargs):\n", " \"\"\"See documentation of mpu.io.write.\"\"\"\n", " kwargs_open = {'newline': ''}\n", " mode = 'w'\n", " if sys.version_info < (3, 0):\n", " kwargs_open.pop('newline', None)\n", " mode = 'wb'\n", " with open(filepath, mode, **kwargs_open) as fp:\n", " if 'delimiter' not in kwargs:\n", " kwargs['delimiter'] = ','\n", " if 'quotechar' not in kwargs:\n", " kwargs['quotechar'] = '\"'\n", " with open(filepath, 'w') as fp:\n", " writer = csv.writer(fp, **kwargs)\n", " writer.writerows(data)\n", " return data" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667 ]
16
0.004167
def sorted_nicely(l): """ Sort the given iterable in the way that humans expect. http://blog.codinghorror.com/sorting-for-humans-natural-sort-order/ """ convert = lambda text: int(text) if text.isdigit() else text alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ] return sorted(l, key = alphanum_key)
[ "def", "sorted_nicely", "(", "l", ")", ":", "convert", "=", "lambda", "text", ":", "int", "(", "text", ")", "if", "text", ".", "isdigit", "(", ")", "else", "text", "alphanum_key", "=", "lambda", "key", ":", "[", "convert", "(", "c", ")", "for", "c", "in", "re", ".", "split", "(", "'([0-9]+)'", ",", "key", ")", "]", "return", "sorted", "(", "l", ",", "key", "=", "alphanum_key", ")" ]
43.5
0.025352
[ "def sorted_nicely(l):\n", " \"\"\"\n", " Sort the given iterable in the way that humans expect.\n", " http://blog.codinghorror.com/sorting-for-humans-natural-sort-order/\n", " \"\"\"\n", " convert = lambda text: int(text) if text.isdigit() else text\n", " alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]\n", " return sorted(l, key = alphanum_key)" ]
[ 0.045454545454545456, 0, 0, 0, 0, 0.015384615384615385, 0.04938271604938271, 0.075 ]
8
0.023153
def insert_string_at_line(input_file: str, string_to_be_inserted: str, put_at_line_number: int, output_file: str, append: bool = True, newline_character: str = '\n'): r"""Write a string at the specified line. :parameter input_file: the file that needs to be read. :parameter string_to_be_inserted: the string that needs to be added. :parameter put_at_line_number: the line number on which to append the string. :parameter output_file: the file that needs to be written with the new content. :parameter append: decides whether to append or prepend the string at the selected line. Defaults to ``True``. :parameter newline_character: set the character used to fill the file in case line_number is greater than the number of lines of input_file. Defaults to ``\n``. :type input_file: str :type string_to_be_inserted: str :type line_number: int :type output_file: str :type append: bool :type newline_character: str :returns: None :raises: LineOutOfFileBoundsError or a built-in exception. .. note:: Line numbers start from ``1``. """ assert put_at_line_number >= 1 with open(input_file, 'r') as f: lines = f.readlines() line_counter = 1 i = 0 loop = True extra_lines_done = False line_number_after_eof = len(lines) + 1 with atomic_write(output_file, overwrite=True) as f: while loop: if put_at_line_number > len( lines) and line_counter == line_number_after_eof: # There are extra lines to write. line = str() else: line = lines[i] # It is ok if the position of line to be written is greater # than the last line number of the input file. We just need to add # the appropriate number of new line characters which will fill # the non existing lines of the output file. if put_at_line_number > len( lines) and line_counter == line_number_after_eof: for additional_newlines in range( 0, put_at_line_number - len(lines) - 1): # Skip the newline in the line where we need to insert # the new string. f.write(newline_character) line_counter += 1 i += 1 extra_lines_done = True if line_counter == put_at_line_number: # A very simple append operation: if the original line ends # with a '\n' character, the string will be added on the next # line... if append: line = line + string_to_be_inserted # ...otherwise the string is prepended. else: line = string_to_be_inserted + line f.write(line) line_counter += 1 i += 1 # Quit the loop if there is nothing more to write. if i >= len(lines): loop = False # Continue looping if there are still extra lines to write. if put_at_line_number > len(lines) and not extra_lines_done: loop = True
[ "def", "insert_string_at_line", "(", "input_file", ":", "str", ",", "string_to_be_inserted", ":", "str", ",", "put_at_line_number", ":", "int", ",", "output_file", ":", "str", ",", "append", ":", "bool", "=", "True", ",", "newline_character", ":", "str", "=", "'\\n'", ")", ":", "assert", "put_at_line_number", ">=", "1", "with", "open", "(", "input_file", ",", "'r'", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "line_counter", "=", "1", "i", "=", "0", "loop", "=", "True", "extra_lines_done", "=", "False", "line_number_after_eof", "=", "len", "(", "lines", ")", "+", "1", "with", "atomic_write", "(", "output_file", ",", "overwrite", "=", "True", ")", "as", "f", ":", "while", "loop", ":", "if", "put_at_line_number", ">", "len", "(", "lines", ")", "and", "line_counter", "==", "line_number_after_eof", ":", "# There are extra lines to write.", "line", "=", "str", "(", ")", "else", ":", "line", "=", "lines", "[", "i", "]", "# It is ok if the position of line to be written is greater", "# than the last line number of the input file. We just need to add", "# the appropriate number of new line characters which will fill", "# the non existing lines of the output file.", "if", "put_at_line_number", ">", "len", "(", "lines", ")", "and", "line_counter", "==", "line_number_after_eof", ":", "for", "additional_newlines", "in", "range", "(", "0", ",", "put_at_line_number", "-", "len", "(", "lines", ")", "-", "1", ")", ":", "# Skip the newline in the line where we need to insert", "# the new string.", "f", ".", "write", "(", "newline_character", ")", "line_counter", "+=", "1", "i", "+=", "1", "extra_lines_done", "=", "True", "if", "line_counter", "==", "put_at_line_number", ":", "# A very simple append operation: if the original line ends", "# with a '\\n' character, the string will be added on the next", "# line...", "if", "append", ":", "line", "=", "line", "+", "string_to_be_inserted", "# ...otherwise the string is prepended.", "else", ":", "line", "=", "string_to_be_inserted", "+", "line", "f", ".", "write", "(", "line", ")", "line_counter", "+=", "1", "i", "+=", "1", "# Quit the loop if there is nothing more to write.", "if", "i", ">=", "len", "(", "lines", ")", ":", "loop", "=", "False", "# Continue looping if there are still extra lines to write.", "if", "put_at_line_number", ">", "len", "(", "lines", ")", "and", "not", "extra_lines_done", ":", "loop", "=", "True" ]
40.719512
0.000292
[ "def insert_string_at_line(input_file: str,\n", " string_to_be_inserted: str,\n", " put_at_line_number: int,\n", " output_file: str,\n", " append: bool = True,\n", " newline_character: str = '\\n'):\n", " r\"\"\"Write a string at the specified line.\n", "\n", " :parameter input_file: the file that needs to be read.\n", " :parameter string_to_be_inserted: the string that needs to be added.\n", " :parameter put_at_line_number: the line number on which to append the\n", " string.\n", " :parameter output_file: the file that needs to be written with the new\n", " content.\n", " :parameter append: decides whether to append or prepend the string at the\n", " selected line. Defaults to ``True``.\n", " :parameter newline_character: set the character used to fill the file\n", " in case line_number is greater than the number of lines of\n", " input_file. Defaults to ``\\n``.\n", " :type input_file: str\n", " :type string_to_be_inserted: str\n", " :type line_number: int\n", " :type output_file: str\n", " :type append: bool\n", " :type newline_character: str\n", " :returns: None\n", " :raises: LineOutOfFileBoundsError or a built-in exception.\n", "\n", " .. note::\n", " Line numbers start from ``1``.\n", " \"\"\"\n", " assert put_at_line_number >= 1\n", "\n", " with open(input_file, 'r') as f:\n", " lines = f.readlines()\n", "\n", " line_counter = 1\n", " i = 0\n", " loop = True\n", " extra_lines_done = False\n", " line_number_after_eof = len(lines) + 1\n", " with atomic_write(output_file, overwrite=True) as f:\n", " while loop:\n", " if put_at_line_number > len(\n", " lines) and line_counter == line_number_after_eof:\n", " # There are extra lines to write.\n", " line = str()\n", " else:\n", " line = lines[i]\n", " # It is ok if the position of line to be written is greater\n", " # than the last line number of the input file. We just need to add\n", " # the appropriate number of new line characters which will fill\n", " # the non existing lines of the output file.\n", " if put_at_line_number > len(\n", " lines) and line_counter == line_number_after_eof:\n", " for additional_newlines in range(\n", " 0, put_at_line_number - len(lines) - 1):\n", " # Skip the newline in the line where we need to insert\n", " # the new string.\n", " f.write(newline_character)\n", " line_counter += 1\n", " i += 1\n", " extra_lines_done = True\n", "\n", " if line_counter == put_at_line_number:\n", " # A very simple append operation: if the original line ends\n", " # with a '\\n' character, the string will be added on the next\n", " # line...\n", " if append:\n", " line = line + string_to_be_inserted\n", " # ...otherwise the string is prepended.\n", " else:\n", " line = string_to_be_inserted + line\n", " f.write(line)\n", " line_counter += 1\n", " i += 1\n", " # Quit the loop if there is nothing more to write.\n", " if i >= len(lines):\n", " loop = False\n", " # Continue looping if there are still extra lines to write.\n", " if put_at_line_number > len(lines) and not extra_lines_done:\n", " loop = True" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035 ]
82
0.000452
def add_actions(target, actions, insert_before=None): """Add actions to a QMenu or a QToolBar.""" previous_action = None target_actions = list(target.actions()) if target_actions: previous_action = target_actions[-1] if previous_action.isSeparator(): previous_action = None for action in actions: if (action is None) and (previous_action is not None): if insert_before is None: target.addSeparator() else: target.insertSeparator(insert_before) elif isinstance(action, QMenu): if insert_before is None: target.addMenu(action) else: target.insertMenu(insert_before, action) elif isinstance(action, QAction): if isinstance(action, SpyderAction): if isinstance(target, QMenu) or not isinstance(target, QToolBar): try: action = action.no_icon_action except RuntimeError: continue if insert_before is None: # This is needed in order to ignore adding an action whose # wrapped C/C++ object has been deleted. See issue 5074 try: target.addAction(action) except RuntimeError: continue else: target.insertAction(insert_before, action) previous_action = action
[ "def", "add_actions", "(", "target", ",", "actions", ",", "insert_before", "=", "None", ")", ":", "previous_action", "=", "None", "target_actions", "=", "list", "(", "target", ".", "actions", "(", ")", ")", "if", "target_actions", ":", "previous_action", "=", "target_actions", "[", "-", "1", "]", "if", "previous_action", ".", "isSeparator", "(", ")", ":", "previous_action", "=", "None", "for", "action", "in", "actions", ":", "if", "(", "action", "is", "None", ")", "and", "(", "previous_action", "is", "not", "None", ")", ":", "if", "insert_before", "is", "None", ":", "target", ".", "addSeparator", "(", ")", "else", ":", "target", ".", "insertSeparator", "(", "insert_before", ")", "elif", "isinstance", "(", "action", ",", "QMenu", ")", ":", "if", "insert_before", "is", "None", ":", "target", ".", "addMenu", "(", "action", ")", "else", ":", "target", ".", "insertMenu", "(", "insert_before", ",", "action", ")", "elif", "isinstance", "(", "action", ",", "QAction", ")", ":", "if", "isinstance", "(", "action", ",", "SpyderAction", ")", ":", "if", "isinstance", "(", "target", ",", "QMenu", ")", "or", "not", "isinstance", "(", "target", ",", "QToolBar", ")", ":", "try", ":", "action", "=", "action", ".", "no_icon_action", "except", "RuntimeError", ":", "continue", "if", "insert_before", "is", "None", ":", "# This is needed in order to ignore adding an action whose\r", "# wrapped C/C++ object has been deleted. See issue 5074\r", "try", ":", "target", ".", "addAction", "(", "action", ")", "except", "RuntimeError", ":", "continue", "else", ":", "target", ".", "insertAction", "(", "insert_before", ",", "action", ")", "previous_action", "=", "action" ]
41.694444
0.001302
[ "def add_actions(target, actions, insert_before=None):\r\n", " \"\"\"Add actions to a QMenu or a QToolBar.\"\"\"\r\n", " previous_action = None\r\n", " target_actions = list(target.actions())\r\n", " if target_actions:\r\n", " previous_action = target_actions[-1]\r\n", " if previous_action.isSeparator():\r\n", " previous_action = None\r\n", " for action in actions:\r\n", " if (action is None) and (previous_action is not None):\r\n", " if insert_before is None:\r\n", " target.addSeparator()\r\n", " else:\r\n", " target.insertSeparator(insert_before)\r\n", " elif isinstance(action, QMenu):\r\n", " if insert_before is None:\r\n", " target.addMenu(action)\r\n", " else:\r\n", " target.insertMenu(insert_before, action)\r\n", " elif isinstance(action, QAction):\r\n", " if isinstance(action, SpyderAction):\r\n", " if isinstance(target, QMenu) or not isinstance(target, QToolBar):\r\n", " try:\r\n", " action = action.no_icon_action\r\n", " except RuntimeError:\r\n", " continue\r\n", " if insert_before is None:\r\n", " # This is needed in order to ignore adding an action whose\r\n", " # wrapped C/C++ object has been deleted. See issue 5074\r\n", " try:\r\n", " target.addAction(action)\r\n", " except RuntimeError:\r\n", " continue\r\n", " else:\r\n", " target.insertAction(insert_before, action)\r\n", " previous_action = action" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125 ]
36
0.001203
def send(self, request, ordered=False): """ This method enqueues the given request to be sent. Its send state will be saved until a response arrives, and a ``Future`` that will be resolved when the response arrives will be returned: .. code-block:: python async def method(): # Sending (enqueued for the send loop) future = sender.send(request) # Receiving (waits for the receive loop to read the result) result = await future Designed like this because Telegram may send the response at any point, and it can send other items while one waits for it. Once the response for this future arrives, it is set with the received result, quite similar to how a ``receive()`` call would otherwise work. Since the receiving part is "built in" the future, it's impossible to await receive a result that was never sent. """ if not self._user_connected: raise ConnectionError('Cannot send requests while disconnected') if not utils.is_list_like(request): state = RequestState(request, self._loop) self._send_queue.append(state) return state.future else: states = [] futures = [] state = None for req in request: state = RequestState(req, self._loop, after=ordered and state) states.append(state) futures.append(state.future) self._send_queue.extend(states) return futures
[ "def", "send", "(", "self", ",", "request", ",", "ordered", "=", "False", ")", ":", "if", "not", "self", ".", "_user_connected", ":", "raise", "ConnectionError", "(", "'Cannot send requests while disconnected'", ")", "if", "not", "utils", ".", "is_list_like", "(", "request", ")", ":", "state", "=", "RequestState", "(", "request", ",", "self", ".", "_loop", ")", "self", ".", "_send_queue", ".", "append", "(", "state", ")", "return", "state", ".", "future", "else", ":", "states", "=", "[", "]", "futures", "=", "[", "]", "state", "=", "None", "for", "req", "in", "request", ":", "state", "=", "RequestState", "(", "req", ",", "self", ".", "_loop", ",", "after", "=", "ordered", "and", "state", ")", "states", ".", "append", "(", "state", ")", "futures", ".", "append", "(", "state", ".", "future", ")", "self", ".", "_send_queue", ".", "extend", "(", "states", ")", "return", "futures" ]
38.926829
0.001222
[ "def send(self, request, ordered=False):\n", " \"\"\"\n", " This method enqueues the given request to be sent. Its send\n", " state will be saved until a response arrives, and a ``Future``\n", " that will be resolved when the response arrives will be returned:\n", "\n", " .. code-block:: python\n", "\n", " async def method():\n", " # Sending (enqueued for the send loop)\n", " future = sender.send(request)\n", " # Receiving (waits for the receive loop to read the result)\n", " result = await future\n", "\n", " Designed like this because Telegram may send the response at\n", " any point, and it can send other items while one waits for it.\n", " Once the response for this future arrives, it is set with the\n", " received result, quite similar to how a ``receive()`` call\n", " would otherwise work.\n", "\n", " Since the receiving part is \"built in\" the future, it's\n", " impossible to await receive a result that was never sent.\n", " \"\"\"\n", " if not self._user_connected:\n", " raise ConnectionError('Cannot send requests while disconnected')\n", "\n", " if not utils.is_list_like(request):\n", " state = RequestState(request, self._loop)\n", " self._send_queue.append(state)\n", " return state.future\n", " else:\n", " states = []\n", " futures = []\n", " state = None\n", " for req in request:\n", " state = RequestState(req, self._loop, after=ordered and state)\n", " states.append(state)\n", " futures.append(state.future)\n", "\n", " self._send_queue.extend(states)\n", " return futures" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464 ]
41
0.002971
def main(args=sys.argv): """ main entry point for the manifest CLI """ if len(args) < 2: return usage("Command expected") command = args[1] rest = args[2:] if "create".startswith(command): return cli_create(rest) elif "query".startswith(command): return cli_query(rest) elif "verify".startswith(command): return cli_verify(rest) else: return usage("Unknown command: %s" % command)
[ "def", "main", "(", "args", "=", "sys", ".", "argv", ")", ":", "if", "len", "(", "args", ")", "<", "2", ":", "return", "usage", "(", "\"Command expected\"", ")", "command", "=", "args", "[", "1", "]", "rest", "=", "args", "[", "2", ":", "]", "if", "\"create\"", ".", "startswith", "(", "command", ")", ":", "return", "cli_create", "(", "rest", ")", "elif", "\"query\"", ".", "startswith", "(", "command", ")", ":", "return", "cli_query", "(", "rest", ")", "elif", "\"verify\"", ".", "startswith", "(", "command", ")", ":", "return", "cli_verify", "(", "rest", ")", "else", ":", "return", "usage", "(", "\"Unknown command: %s\"", "%", "command", ")" ]
23.421053
0.00216
[ "def main(args=sys.argv):\n", " \"\"\"\n", " main entry point for the manifest CLI\n", " \"\"\"\n", "\n", " if len(args) < 2:\n", " return usage(\"Command expected\")\n", "\n", " command = args[1]\n", " rest = args[2:]\n", "\n", " if \"create\".startswith(command):\n", " return cli_create(rest)\n", " elif \"query\".startswith(command):\n", " return cli_query(rest)\n", " elif \"verify\".startswith(command):\n", " return cli_verify(rest)\n", " else:\n", " return usage(\"Unknown command: %s\" % command)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.018867924528301886 ]
19
0.000993
def matches(self, verb, params): """ Test if the method matches the provided set of arguments :param verb: HTTP verb. Uppercase :type verb: str :param params: Existing route parameters :type params: set :returns: Whether this view matches :rtype: bool """ return (self.ifset is None or self.ifset <= params) and \ (self.ifnset is None or self.ifnset.isdisjoint(params)) and \ (self.methods is None or verb in self.methods)
[ "def", "matches", "(", "self", ",", "verb", ",", "params", ")", ":", "return", "(", "self", ".", "ifset", "is", "None", "or", "self", ".", "ifset", "<=", "params", ")", "and", "(", "self", ".", "ifnset", "is", "None", "or", "self", ".", "ifnset", ".", "isdisjoint", "(", "params", ")", ")", "and", "(", "self", ".", "methods", "is", "None", "or", "verb", "in", "self", ".", "methods", ")" ]
40.384615
0.009311
[ "def matches(self, verb, params):\n", " \"\"\" Test if the method matches the provided set of arguments\n", "\n", " :param verb: HTTP verb. Uppercase\n", " :type verb: str\n", " :param params: Existing route parameters\n", " :type params: set\n", " :returns: Whether this view matches\n", " :rtype: bool\n", " \"\"\"\n", " return (self.ifset is None or self.ifset <= params) and \\\n", " (self.ifnset is None or self.ifnset.isdisjoint(params)) and \\\n", " (self.methods is None or verb in self.methods)" ]
[ 0, 0.014492753623188406, 0, 0, 0, 0, 0, 0, 0, 0, 0.025974025974025976, 0.01282051282051282, 0.01639344262295082 ]
13
0.00536
def set_dimensional_calibrations(self, dimensional_calibrations: typing.List[CalibrationModule.Calibration]) -> None: """Set the dimensional calibrations. :param dimensional_calibrations: A list of calibrations, must match the dimensions of the data. .. versionadded:: 1.0 Scriptable: Yes """ self.__data_item.set_dimensional_calibrations(dimensional_calibrations)
[ "def", "set_dimensional_calibrations", "(", "self", ",", "dimensional_calibrations", ":", "typing", ".", "List", "[", "CalibrationModule", ".", "Calibration", "]", ")", "->", "None", ":", "self", ".", "__data_item", ".", "set_dimensional_calibrations", "(", "dimensional_calibrations", ")" ]
40.6
0.009639
[ "def set_dimensional_calibrations(self, dimensional_calibrations: typing.List[CalibrationModule.Calibration]) -> None:\n", " \"\"\"Set the dimensional calibrations.\n", "\n", " :param dimensional_calibrations: A list of calibrations, must match the dimensions of the data.\n", "\n", " .. versionadded:: 1.0\n", "\n", " Scriptable: Yes\n", " \"\"\"\n", " self.__data_item.set_dimensional_calibrations(dimensional_calibrations)" ]
[ 0.00847457627118644, 0.022222222222222223, 0, 0.009615384615384616, 0, 0, 0, 0, 0, 0.012658227848101266 ]
10
0.005297
def add(self, name, proc_cls, **kwargs): """ Add a function implementation fo the library. :param name: The name of the function as a string :param proc_cls: The implementation of the function as a SimProcedure _class_, not instance :param kwargs: Any additional parameters to the procedure class constructor may be passed as kwargs """ self.procedures[name] = proc_cls(display_name=name, **kwargs)
[ "def", "add", "(", "self", ",", "name", ",", "proc_cls", ",", "*", "*", "kwargs", ")", ":", "self", ".", "procedures", "[", "name", "]", "=", "proc_cls", "(", "display_name", "=", "name", ",", "*", "*", "kwargs", ")" ]
51.333333
0.008511
[ "def add(self, name, proc_cls, **kwargs):\n", " \"\"\"\n", " Add a function implementation fo the library.\n", "\n", " :param name: The name of the function as a string\n", " :param proc_cls: The implementation of the function as a SimProcedure _class_, not instance\n", " :param kwargs: Any additional parameters to the procedure class constructor may be passed as kwargs\n", " \"\"\"\n", " self.procedures[name] = proc_cls(display_name=name, **kwargs)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0.009708737864077669, 0.008849557522123894, 0, 0.014492753623188406 ]
9
0.012932
def integrations(since, to, write, force): """ Generates a markdown file containing the list of integrations shipped in a given Agent release. Agent version numbers are derived inspecting tags on `integrations-core` so running this tool might provide unexpected results if the repo is not up to date with the Agent release process. If neither `--since` or `--to` are passed (the most common use case), the tool will generate the list for every Agent since version 6.3.0 (before that point we don't have enough information to build the log). """ agent_tags = get_agent_tags(since, to) # get the list of integrations shipped with the agent from the requirements file req_file_name = os.path.basename(get_agent_release_requirements()) integrations_contents = StringIO() for tag in agent_tags: integrations_contents.write('## Datadog Agent version {}\n\n'.format(tag)) # Requirements for current tag file_contents = git_show_file(req_file_name, tag) for name, ver in iteritems(parse_agent_req_file(file_contents)): integrations_contents.write('* {}: {}\n'.format(name, ver)) integrations_contents.write('\n') # save the changelog on disk if --write was passed if write: dest = get_agent_integrations_file() # don't overwrite an existing file if os.path.exists(dest) and not force: msg = "Output file {} already exists, run the command again with --force to overwrite" abort(msg.format(dest)) write_file(dest, integrations_contents.getvalue()) else: echo_info(integrations_contents.getvalue())
[ "def", "integrations", "(", "since", ",", "to", ",", "write", ",", "force", ")", ":", "agent_tags", "=", "get_agent_tags", "(", "since", ",", "to", ")", "# get the list of integrations shipped with the agent from the requirements file", "req_file_name", "=", "os", ".", "path", ".", "basename", "(", "get_agent_release_requirements", "(", ")", ")", "integrations_contents", "=", "StringIO", "(", ")", "for", "tag", "in", "agent_tags", ":", "integrations_contents", ".", "write", "(", "'## Datadog Agent version {}\\n\\n'", ".", "format", "(", "tag", ")", ")", "# Requirements for current tag", "file_contents", "=", "git_show_file", "(", "req_file_name", ",", "tag", ")", "for", "name", ",", "ver", "in", "iteritems", "(", "parse_agent_req_file", "(", "file_contents", ")", ")", ":", "integrations_contents", ".", "write", "(", "'* {}: {}\\n'", ".", "format", "(", "name", ",", "ver", ")", ")", "integrations_contents", ".", "write", "(", "'\\n'", ")", "# save the changelog on disk if --write was passed", "if", "write", ":", "dest", "=", "get_agent_integrations_file", "(", ")", "# don't overwrite an existing file", "if", "os", ".", "path", ".", "exists", "(", "dest", ")", "and", "not", "force", ":", "msg", "=", "\"Output file {} already exists, run the command again with --force to overwrite\"", "abort", "(", "msg", ".", "format", "(", "dest", ")", ")", "write_file", "(", "dest", ",", "integrations_contents", ".", "getvalue", "(", ")", ")", "else", ":", "echo_info", "(", "integrations_contents", ".", "getvalue", "(", ")", ")" ]
46.914286
0.002387
[ "def integrations(since, to, write, force):\n", " \"\"\"\n", " Generates a markdown file containing the list of integrations shipped in a\n", " given Agent release. Agent version numbers are derived inspecting tags on\n", " `integrations-core` so running this tool might provide unexpected results\n", " if the repo is not up to date with the Agent release process.\n", "\n", " If neither `--since` or `--to` are passed (the most common use case), the\n", " tool will generate the list for every Agent since version 6.3.0\n", " (before that point we don't have enough information to build the log).\n", " \"\"\"\n", " agent_tags = get_agent_tags(since, to)\n", " # get the list of integrations shipped with the agent from the requirements file\n", " req_file_name = os.path.basename(get_agent_release_requirements())\n", "\n", " integrations_contents = StringIO()\n", " for tag in agent_tags:\n", " integrations_contents.write('## Datadog Agent version {}\\n\\n'.format(tag))\n", " # Requirements for current tag\n", " file_contents = git_show_file(req_file_name, tag)\n", " for name, ver in iteritems(parse_agent_req_file(file_contents)):\n", " integrations_contents.write('* {}: {}\\n'.format(name, ver))\n", " integrations_contents.write('\\n')\n", "\n", " # save the changelog on disk if --write was passed\n", " if write:\n", " dest = get_agent_integrations_file()\n", " # don't overwrite an existing file\n", " if os.path.exists(dest) and not force:\n", " msg = \"Output file {} already exists, run the command again with --force to overwrite\"\n", " abort(msg.format(dest))\n", "\n", " write_file(dest, integrations_contents.getvalue())\n", " else:\n", " echo_info(integrations_contents.getvalue())" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010101010101010102, 0, 0, 0, 0, 0.0196078431372549 ]
35
0.001529
def pass_from_pipe(cls): """Return password from pipe if not on TTY, else False. """ is_pipe = not sys.stdin.isatty() return is_pipe and cls.strip_last_newline(sys.stdin.read())
[ "def", "pass_from_pipe", "(", "cls", ")", ":", "is_pipe", "=", "not", "sys", ".", "stdin", ".", "isatty", "(", ")", "return", "is_pipe", "and", "cls", ".", "strip_last_newline", "(", "sys", ".", "stdin", ".", "read", "(", ")", ")" ]
41
0.009569
[ "def pass_from_pipe(cls):\n", " \"\"\"Return password from pipe if not on TTY, else False.\n", " \"\"\"\n", " is_pipe = not sys.stdin.isatty()\n", " return is_pipe and cls.strip_last_newline(sys.stdin.read())" ]
[ 0, 0.015625, 0, 0, 0.014925373134328358 ]
5
0.00611
def _arrange_fields(self, channels, sampfrom=0, expanded=False): """ Arrange/edit object fields to reflect user channel and/or signal range input. Parameters ---------- channels : list List of channel numbers specified. sampfrom : int, optional Starting sample number read. expanded : bool, optional Whether the record was read in expanded mode. """ # Rearrange signal specification fields for field in _header.SIGNAL_SPECS.index: item = getattr(self, field) setattr(self, field, [item[c] for c in channels]) # Expanded signals - multiple samples per frame. if expanded: # Checksum and init_value to be updated if present # unless the whole signal length was input if self.sig_len != int(len(self.e_d_signal[0]) / self.samps_per_frame[0]): self.checksum = self.calc_checksum(expanded) self.init_value = [s[0] for s in self.e_d_signal] self.n_sig = len(channels) self.sig_len = int(len(self.e_d_signal[0]) / self.samps_per_frame[0]) # MxN numpy array d_signal else: # Checksum and init_value to be updated if present # unless the whole signal length was input if self.sig_len != self.d_signal.shape[0]: if self.checksum is not None: self.checksum = self.calc_checksum() if self.init_value is not None: ival = list(self.d_signal[0, :]) self.init_value = [int(i) for i in ival] # Update record specification parameters # Important that these get updated after^^ self.n_sig = len(channels) self.sig_len = self.d_signal.shape[0] # Adjust date and time if necessary self._adjust_datetime(sampfrom=sampfrom)
[ "def", "_arrange_fields", "(", "self", ",", "channels", ",", "sampfrom", "=", "0", ",", "expanded", "=", "False", ")", ":", "# Rearrange signal specification fields", "for", "field", "in", "_header", ".", "SIGNAL_SPECS", ".", "index", ":", "item", "=", "getattr", "(", "self", ",", "field", ")", "setattr", "(", "self", ",", "field", ",", "[", "item", "[", "c", "]", "for", "c", "in", "channels", "]", ")", "# Expanded signals - multiple samples per frame.", "if", "expanded", ":", "# Checksum and init_value to be updated if present", "# unless the whole signal length was input", "if", "self", ".", "sig_len", "!=", "int", "(", "len", "(", "self", ".", "e_d_signal", "[", "0", "]", ")", "/", "self", ".", "samps_per_frame", "[", "0", "]", ")", ":", "self", ".", "checksum", "=", "self", ".", "calc_checksum", "(", "expanded", ")", "self", ".", "init_value", "=", "[", "s", "[", "0", "]", "for", "s", "in", "self", ".", "e_d_signal", "]", "self", ".", "n_sig", "=", "len", "(", "channels", ")", "self", ".", "sig_len", "=", "int", "(", "len", "(", "self", ".", "e_d_signal", "[", "0", "]", ")", "/", "self", ".", "samps_per_frame", "[", "0", "]", ")", "# MxN numpy array d_signal", "else", ":", "# Checksum and init_value to be updated if present", "# unless the whole signal length was input", "if", "self", ".", "sig_len", "!=", "self", ".", "d_signal", ".", "shape", "[", "0", "]", ":", "if", "self", ".", "checksum", "is", "not", "None", ":", "self", ".", "checksum", "=", "self", ".", "calc_checksum", "(", ")", "if", "self", ".", "init_value", "is", "not", "None", ":", "ival", "=", "list", "(", "self", ".", "d_signal", "[", "0", ",", ":", "]", ")", "self", ".", "init_value", "=", "[", "int", "(", "i", ")", "for", "i", "in", "ival", "]", "# Update record specification parameters", "# Important that these get updated after^^", "self", ".", "n_sig", "=", "len", "(", "channels", ")", "self", ".", "sig_len", "=", "self", ".", "d_signal", ".", "shape", "[", "0", "]", "# Adjust date and time if necessary", "self", ".", "_adjust_datetime", "(", "sampfrom", "=", "sampfrom", ")" ]
37.705882
0.002027
[ "def _arrange_fields(self, channels, sampfrom=0, expanded=False):\n", " \"\"\"\n", " Arrange/edit object fields to reflect user channel and/or signal\n", " range input.\n", "\n", " Parameters\n", " ----------\n", " channels : list\n", " List of channel numbers specified.\n", " sampfrom : int, optional\n", " Starting sample number read.\n", " expanded : bool, optional\n", " Whether the record was read in expanded mode.\n", "\n", " \"\"\"\n", "\n", " # Rearrange signal specification fields\n", " for field in _header.SIGNAL_SPECS.index:\n", " item = getattr(self, field)\n", " setattr(self, field, [item[c] for c in channels])\n", "\n", " # Expanded signals - multiple samples per frame.\n", " if expanded:\n", " # Checksum and init_value to be updated if present\n", " # unless the whole signal length was input\n", " if self.sig_len != int(len(self.e_d_signal[0]) / self.samps_per_frame[0]):\n", " self.checksum = self.calc_checksum(expanded)\n", " self.init_value = [s[0] for s in self.e_d_signal]\n", "\n", " self.n_sig = len(channels)\n", " self.sig_len = int(len(self.e_d_signal[0]) / self.samps_per_frame[0])\n", "\n", " # MxN numpy array d_signal\n", " else:\n", " # Checksum and init_value to be updated if present\n", " # unless the whole signal length was input\n", " if self.sig_len != self.d_signal.shape[0]:\n", "\n", " if self.checksum is not None:\n", " self.checksum = self.calc_checksum()\n", " if self.init_value is not None:\n", " ival = list(self.d_signal[0, :])\n", " self.init_value = [int(i) for i in ival]\n", "\n", " # Update record specification parameters\n", " # Important that these get updated after^^\n", " self.n_sig = len(channels)\n", " self.sig_len = self.d_signal.shape[0]\n", "\n", " # Adjust date and time if necessary\n", " self._adjust_datetime(sampfrom=sampfrom)" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.020833333333333332 ]
51
0.002507
def get_motd(self, server=None): """ Gets the server's MOTD. Optional arguments: * server=None - Server to get the MOTD of. """ with self.lock: if not server: self.send('MOTD') else: self.send('MOTD %s' % server) motd = [] while self.readable(): msg = self._recv(expected_replies=('375', '372', '376', '422')) if msg[0] == '375': pass elif msg[0] == '372': motd.append(msg[2].replace(':', '', 1)) elif msg[0] == '376': break elif msg[0] == '422': break self.motd = tuple(motd) return self.motd
[ "def", "get_motd", "(", "self", ",", "server", "=", "None", ")", ":", "with", "self", ".", "lock", ":", "if", "not", "server", ":", "self", ".", "send", "(", "'MOTD'", ")", "else", ":", "self", ".", "send", "(", "'MOTD %s'", "%", "server", ")", "motd", "=", "[", "]", "while", "self", ".", "readable", "(", ")", ":", "msg", "=", "self", ".", "_recv", "(", "expected_replies", "=", "(", "'375'", ",", "'372'", ",", "'376'", ",", "'422'", ")", ")", "if", "msg", "[", "0", "]", "==", "'375'", ":", "pass", "elif", "msg", "[", "0", "]", "==", "'372'", ":", "motd", ".", "append", "(", "msg", "[", "2", "]", ".", "replace", "(", "':'", ",", "''", ",", "1", ")", ")", "elif", "msg", "[", "0", "]", "==", "'376'", ":", "break", "elif", "msg", "[", "0", "]", "==", "'422'", ":", "break", "self", ".", "motd", "=", "tuple", "(", "motd", ")", "return", "self", ".", "motd" ]
31.28
0.002481
[ "def get_motd(self, server=None):\n", " \"\"\"\n", " Gets the server's MOTD.\n", " Optional arguments:\n", " * server=None - Server to get the MOTD of.\n", " \"\"\"\n", " with self.lock:\n", " if not server:\n", " self.send('MOTD')\n", " else:\n", " self.send('MOTD %s' % server)\n", "\n", " motd = []\n", " while self.readable():\n", " msg = self._recv(expected_replies=('375', '372', '376', '422'))\n", " if msg[0] == '375':\n", " pass\n", " elif msg[0] == '372':\n", " motd.append(msg[2].replace(':', '', 1))\n", " elif msg[0] == '376':\n", " break\n", " elif msg[0] == '422':\n", " break\n", " self.motd = tuple(motd)\n", " return self.motd" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571 ]
25
0.004762
def _exclude_ipv4_networks(self, networks, networks_to_exclude): """ Exclude the list of networks from another list of networks and return a flat list of new networks. :param networks: List of IPv4 networks to exclude from :param networks_to_exclude: List of IPv4 networks to exclude :returns: Flat list of IPv4 networks """ for network_to_exclude in networks_to_exclude: def _exclude_ipv4_network(network): """ Exclude a single network from another single network and return a list of networks. Network to exclude comes from the outer scope. :param network: Network to exclude from :returns: Flat list of IPv4 networks after exclusion. If exclude fails because networks do not overlap, a single element list with the orignal network is returned. If it overlaps, even partially, the network is excluded. """ try: return list(network.address_exclude(network_to_exclude)) except ValueError: # If networks overlap partially, `address_exclude` # will fail, but the network still must not be used # in generation. if network.overlaps(network_to_exclude): return [] else: return [network] networks = list(map(_exclude_ipv4_network, networks)) # flatten list of lists networks = [ item for nested in networks for item in nested ] return networks
[ "def", "_exclude_ipv4_networks", "(", "self", ",", "networks", ",", "networks_to_exclude", ")", ":", "for", "network_to_exclude", "in", "networks_to_exclude", ":", "def", "_exclude_ipv4_network", "(", "network", ")", ":", "\"\"\"\n Exclude a single network from another single network\n and return a list of networks. Network to exclude\n comes from the outer scope.\n\n :param network: Network to exclude from\n :returns: Flat list of IPv4 networks after exclusion.\n If exclude fails because networks do not\n overlap, a single element list with the\n orignal network is returned. If it overlaps,\n even partially, the network is excluded.\n \"\"\"", "try", ":", "return", "list", "(", "network", ".", "address_exclude", "(", "network_to_exclude", ")", ")", "except", "ValueError", ":", "# If networks overlap partially, `address_exclude`", "# will fail, but the network still must not be used", "# in generation.", "if", "network", ".", "overlaps", "(", "network_to_exclude", ")", ":", "return", "[", "]", "else", ":", "return", "[", "network", "]", "networks", "=", "list", "(", "map", "(", "_exclude_ipv4_network", ",", "networks", ")", ")", "# flatten list of lists", "networks", "=", "[", "item", "for", "nested", "in", "networks", "for", "item", "in", "nested", "]", "return", "networks" ]
42.047619
0.001107
[ "def _exclude_ipv4_networks(self, networks, networks_to_exclude):\n", " \"\"\"\n", " Exclude the list of networks from another list of networks\n", " and return a flat list of new networks.\n", "\n", " :param networks: List of IPv4 networks to exclude from\n", " :param networks_to_exclude: List of IPv4 networks to exclude\n", " :returns: Flat list of IPv4 networks\n", " \"\"\"\n", " for network_to_exclude in networks_to_exclude:\n", " def _exclude_ipv4_network(network):\n", " \"\"\"\n", " Exclude a single network from another single network\n", " and return a list of networks. Network to exclude\n", " comes from the outer scope.\n", "\n", " :param network: Network to exclude from\n", " :returns: Flat list of IPv4 networks after exclusion.\n", " If exclude fails because networks do not\n", " overlap, a single element list with the\n", " orignal network is returned. If it overlaps,\n", " even partially, the network is excluded.\n", " \"\"\"\n", " try:\n", " return list(network.address_exclude(network_to_exclude))\n", " except ValueError:\n", " # If networks overlap partially, `address_exclude`\n", " # will fail, but the network still must not be used\n", " # in generation.\n", " if network.overlaps(network_to_exclude):\n", " return []\n", " else:\n", " return [network]\n", "\n", " networks = list(map(_exclude_ipv4_network, networks))\n", "\n", " # flatten list of lists\n", " networks = [\n", " item for nested in networks for item in nested\n", " ]\n", "\n", " return networks" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216 ]
42
0.003019
def inquire_property(name, doc=None): """Creates a property based on an inquire result This method creates a property that calls the :python:`_inquire` method, and return the value of the requested information. Args: name (str): the name of the 'inquire' result information Returns: property: the created property """ def inquire_property(self): if not self._started: msg = ("Cannot read {0} from a security context whose " "establishment has not yet been started.") raise AttributeError(msg) return getattr(self._inquire(**{name: True}), name) return property(inquire_property, doc=doc)
[ "def", "inquire_property", "(", "name", ",", "doc", "=", "None", ")", ":", "def", "inquire_property", "(", "self", ")", ":", "if", "not", "self", ".", "_started", ":", "msg", "=", "(", "\"Cannot read {0} from a security context whose \"", "\"establishment has not yet been started.\"", ")", "raise", "AttributeError", "(", "msg", ")", "return", "getattr", "(", "self", ".", "_inquire", "(", "*", "*", "{", "name", ":", "True", "}", ")", ",", "name", ")", "return", "property", "(", "inquire_property", ",", "doc", "=", "doc", ")" ]
29.652174
0.00142
[ "def inquire_property(name, doc=None):\n", " \"\"\"Creates a property based on an inquire result\n", "\n", " This method creates a property that calls the\n", " :python:`_inquire` method, and return the value of the\n", " requested information.\n", "\n", " Args:\n", " name (str): the name of the 'inquire' result information\n", "\n", " Returns:\n", " property: the created property\n", " \"\"\"\n", "\n", " def inquire_property(self):\n", " if not self._started:\n", " msg = (\"Cannot read {0} from a security context whose \"\n", " \"establishment has not yet been started.\")\n", " raise AttributeError(msg)\n", "\n", " return getattr(self._inquire(**{name: True}), name)\n", "\n", " return property(inquire_property, doc=doc)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608 ]
23
0.000945
def _process_table_cells(self, table): """ Compile all the table cells. Returns a list of rows. The rows may have different lengths because of column spans. """ rows = [] for i, tr in enumerate(table.find_all('tr')): row = [] for c in tr.contents: cell_type = getattr(c, 'name', None) if cell_type not in ('td', 'th'): continue rowspan = int(c.attrs.get('rowspan', 1)) colspan = int(c.attrs.get('colspan', 1)) contents = self._process_children(c).strip() if cell_type == 'th' and i > 0: contents = self._inline('**', contents) row.append(Cell(cell_type, rowspan, colspan, contents)) rows.append(row) return rows
[ "def", "_process_table_cells", "(", "self", ",", "table", ")", ":", "rows", "=", "[", "]", "for", "i", ",", "tr", "in", "enumerate", "(", "table", ".", "find_all", "(", "'tr'", ")", ")", ":", "row", "=", "[", "]", "for", "c", "in", "tr", ".", "contents", ":", "cell_type", "=", "getattr", "(", "c", ",", "'name'", ",", "None", ")", "if", "cell_type", "not", "in", "(", "'td'", ",", "'th'", ")", ":", "continue", "rowspan", "=", "int", "(", "c", ".", "attrs", ".", "get", "(", "'rowspan'", ",", "1", ")", ")", "colspan", "=", "int", "(", "c", ".", "attrs", ".", "get", "(", "'colspan'", ",", "1", ")", ")", "contents", "=", "self", ".", "_process_children", "(", "c", ")", ".", "strip", "(", ")", "if", "cell_type", "==", "'th'", "and", "i", ">", "0", ":", "contents", "=", "self", ".", "_inline", "(", "'**'", ",", "contents", ")", "row", ".", "append", "(", "Cell", "(", "cell_type", ",", "rowspan", ",", "colspan", ",", "contents", ")", ")", "rows", ".", "append", "(", "row", ")", "return", "rows" ]
26.967742
0.002309
[ "def _process_table_cells(self, table):\n", " \"\"\" Compile all the table cells.\n", "\n", " Returns a list of rows. The rows may have different lengths because of\n", " column spans.\n", "\n", " \"\"\"\n", "\n", " rows = []\n", "\n", " for i, tr in enumerate(table.find_all('tr')):\n", " row = []\n", "\n", " for c in tr.contents:\n", " cell_type = getattr(c, 'name', None)\n", "\n", " if cell_type not in ('td', 'th'):\n", " continue\n", "\n", " rowspan = int(c.attrs.get('rowspan', 1))\n", " colspan = int(c.attrs.get('colspan', 1))\n", " contents = self._process_children(c).strip()\n", "\n", " if cell_type == 'th' and i > 0:\n", " contents = self._inline('**', contents)\n", "\n", " row.append(Cell(cell_type, rowspan, colspan, contents))\n", "\n", " rows.append(row)\n", "\n", " return rows" ]
[ 0, 0.024390243902439025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842 ]
31
0.002485
def computeNodeValues(cls, graph): """ Compute the value (size) of each node by summing the associated links. """ for node in graph['nodes']: source_val = np.sum([l['value'] for l in node['sourceLinks']]) target_val = np.sum([l['value'] for l in node['targetLinks']]) node['value'] = max([source_val, target_val])
[ "def", "computeNodeValues", "(", "cls", ",", "graph", ")", ":", "for", "node", "in", "graph", "[", "'nodes'", "]", ":", "source_val", "=", "np", ".", "sum", "(", "[", "l", "[", "'value'", "]", "for", "l", "in", "node", "[", "'sourceLinks'", "]", "]", ")", "target_val", "=", "np", ".", "sum", "(", "[", "l", "[", "'value'", "]", "for", "l", "in", "node", "[", "'targetLinks'", "]", "]", ")", "node", "[", "'value'", "]", "=", "max", "(", "[", "source_val", ",", "target_val", "]", ")" ]
46.75
0.010499
[ "def computeNodeValues(cls, graph):\n", " \"\"\"\n", " Compute the value (size) of each node by summing the associated links.\n", " \"\"\"\n", " for node in graph['nodes']:\n", " source_val = np.sum([l['value'] for l in node['sourceLinks']])\n", " target_val = np.sum([l['value'] for l in node['targetLinks']])\n", " node['value'] = max([source_val, target_val])" ]
[ 0, 0.08333333333333333, 0, 0, 0, 0.013333333333333334, 0.013333333333333334, 0.017543859649122806 ]
8
0.015943
def token_new(): """Create new token.""" form = TokenForm(request.form) form.scopes.choices = current_oauth2server.scope_choices() if form.validate_on_submit(): t = Token.create_personal( form.data['name'], current_user.get_id(), scopes=form.scopes.data ) db.session.commit() session['show_personal_access_token'] = True return redirect(url_for(".token_view", token_id=t.id)) if len(current_oauth2server.scope_choices()) == 0: del(form.scopes) return render_template( "invenio_oauth2server/settings/token_new.html", form=form, )
[ "def", "token_new", "(", ")", ":", "form", "=", "TokenForm", "(", "request", ".", "form", ")", "form", ".", "scopes", ".", "choices", "=", "current_oauth2server", ".", "scope_choices", "(", ")", "if", "form", ".", "validate_on_submit", "(", ")", ":", "t", "=", "Token", ".", "create_personal", "(", "form", ".", "data", "[", "'name'", "]", ",", "current_user", ".", "get_id", "(", ")", ",", "scopes", "=", "form", ".", "scopes", ".", "data", ")", "db", ".", "session", ".", "commit", "(", ")", "session", "[", "'show_personal_access_token'", "]", "=", "True", "return", "redirect", "(", "url_for", "(", "\".token_view\"", ",", "token_id", "=", "t", ".", "id", ")", ")", "if", "len", "(", "current_oauth2server", ".", "scope_choices", "(", ")", ")", "==", "0", ":", "del", "(", "form", ".", "scopes", ")", "return", "render_template", "(", "\"invenio_oauth2server/settings/token_new.html\"", ",", "form", "=", "form", ",", ")" ]
30.8
0.001575
[ "def token_new():\n", " \"\"\"Create new token.\"\"\"\n", " form = TokenForm(request.form)\n", " form.scopes.choices = current_oauth2server.scope_choices()\n", "\n", " if form.validate_on_submit():\n", " t = Token.create_personal(\n", " form.data['name'], current_user.get_id(), scopes=form.scopes.data\n", " )\n", " db.session.commit()\n", " session['show_personal_access_token'] = True\n", " return redirect(url_for(\".token_view\", token_id=t.id))\n", "\n", " if len(current_oauth2server.scope_choices()) == 0:\n", " del(form.scopes)\n", "\n", " return render_template(\n", " \"invenio_oauth2server/settings/token_new.html\",\n", " form=form,\n", " )" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2 ]
20
0.01
def post(self, request, *args, **kwargs): """The only circumstances when we POST is to submit the main form, both updating translations (if any changed) and advancing to the next page of messages. There is no notion of validation of this content; as implemented, unknown fields are ignored and a generic failure message is shown. Submitted changes are saved out to the specified .po file on the filesystem if that file is writable, otherwise the cached version of the file is updated (so it can be downloaded). Then the user is redirected to the next page of messages (if there is one; otherwise they're redirected back to the current page). """ # The message text inputs are captured as hashes of their initial # contents, preceded by "m_". Messages with plurals end with their # variation number. single_text_input_regex = re.compile(r'^m_([0-9a-f]+)$') plural_text_input_regex = re.compile(r'^m_([0-9a-f]+)_([0-9]+)$') file_change = False for field_name, new_msgstr in request.POST.items(): md5hash = None if plural_text_input_regex.match(field_name): md5hash, plural_id = plural_text_input_regex.match(field_name).groups() md5hash = str(md5hash) # polib parses .po files into unicode strings, but # doesn't bother to convert plural indexes to int, # so we need unicode here. plural_id = six.text_type(plural_id) # Above no longer true as of Polib 1.0.4 if plural_id and plural_id.isdigit(): plural_id = int(plural_id) elif single_text_input_regex.match(field_name): md5hash = str(single_text_input_regex.match(field_name).groups()[0]) plural_id = None if md5hash is not None: # Empty string should be processed! entry = self.po_file.find(md5hash, 'md5hash') # If someone did a makemessage, some entries might # have been removed, so we need to check. if entry: old_msgstr = entry.msgstr if plural_id is not None: # 0 is ok! entry.msgstr_plural[plural_id] = self.fix_nls( entry.msgid_plural, new_msgstr ) else: entry.msgstr = self.fix_nls(entry.msgid, new_msgstr) is_fuzzy = bool(self.request.POST.get('f_%s' % md5hash, False)) old_fuzzy = 'fuzzy' in entry.flags if old_fuzzy and not is_fuzzy: entry.flags.remove('fuzzy') elif not old_fuzzy and is_fuzzy: entry.flags.append('fuzzy') file_change = True if old_msgstr != new_msgstr or old_fuzzy != is_fuzzy: entry_changed.send(sender=entry, user=request.user, old_msgstr=old_msgstr, old_fuzzy=old_fuzzy, pofile=self.po_file_path, language_code=self.language_id, ) else: messages.error( self.request, _("Some items in your last translation block couldn't " "be saved: this usually happens when the catalog file " "changes on disk after you last loaded it."), ) if file_change and self.po_file_is_writable: try: self.po_file.metadata['Last-Translator'] = unicodedata.normalize( 'NFKD', u"%s %s <%s>" % ( getattr(self.request.user, 'first_name', 'Anonymous'), getattr(self.request.user, 'last_name', 'User'), getattr(self.request.user, 'email', 'anonymous@user.tld') ) ).encode('ascii', 'ignore') self.po_file.metadata['X-Translated-Using'] = u"django-rosetta %s" % ( get_rosetta_version()) self.po_file.metadata['PO-Revision-Date'] = timestamp_with_timezone() except UnicodeDecodeError: pass try: self.po_file.save() po_filepath, ext = os.path.splitext(self.po_file_path) if rosetta_settings.AUTO_COMPILE: self.po_file.save_as_mofile(po_filepath + '.mo') post_save.send(sender=None, language_code=self.language_id, request=self.request ) # Try auto-reloading via the WSGI daemon mode reload mechanism should_try_wsgi_reload = ( rosetta_settings.WSGI_AUTO_RELOAD and 'mod_wsgi.process_group' in self.request.environ and self.request.environ.get('mod_wsgi.process_group', None) and 'SCRIPT_FILENAME' in self.request.environ and int(self.request.environ.get('mod_wsgi.script_reloading', 0)) ) if should_try_wsgi_reload: try: os.utime(self.request.environ.get('SCRIPT_FILENAME'), None) except OSError: pass # Try auto-reloading via uwsgi daemon reload mechanism if rosetta_settings.UWSGI_AUTO_RELOAD: try: import uwsgi uwsgi.reload() # pretty easy right? except: pass # we may not be running under uwsgi :P # XXX: It would be nice to add a success message here! except Exception as e: messages.error(self.request, e) if file_change and not self.po_file_is_writable: storage = get_storage(self.request) storage.set(self.po_file_cache_key, self.po_file) # Reconstitute url to redirect to. Start with determining whether the # page number can be incremented. paginator = Paginator(self.get_entries(), rosetta_settings.MESSAGES_PER_PAGE) try: page = int(self._request_request('page', 1)) except ValueError: page = 1 # fall back to page 1 else: if not (0 < page <= paginator.num_pages): page = 1 if page < paginator.num_pages: page += 1 query_string_args = { 'msg_filter': self.msg_filter, 'query': self.query, 'ref_lang': self.ref_lang, 'page': page, } # Winnow down the query string args to non-blank ones query_string_args = {k: v for k, v in query_string_args.items() if v} return HttpResponseRedirect("{url}?{qs}".format( url=reverse('rosetta-form', kwargs=self.kwargs), qs=urlencode_safe(query_string_args) ))
[ "def", "post", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# The message text inputs are captured as hashes of their initial", "# contents, preceded by \"m_\". Messages with plurals end with their", "# variation number.", "single_text_input_regex", "=", "re", ".", "compile", "(", "r'^m_([0-9a-f]+)$'", ")", "plural_text_input_regex", "=", "re", ".", "compile", "(", "r'^m_([0-9a-f]+)_([0-9]+)$'", ")", "file_change", "=", "False", "for", "field_name", ",", "new_msgstr", "in", "request", ".", "POST", ".", "items", "(", ")", ":", "md5hash", "=", "None", "if", "plural_text_input_regex", ".", "match", "(", "field_name", ")", ":", "md5hash", ",", "plural_id", "=", "plural_text_input_regex", ".", "match", "(", "field_name", ")", ".", "groups", "(", ")", "md5hash", "=", "str", "(", "md5hash", ")", "# polib parses .po files into unicode strings, but", "# doesn't bother to convert plural indexes to int,", "# so we need unicode here.", "plural_id", "=", "six", ".", "text_type", "(", "plural_id", ")", "# Above no longer true as of Polib 1.0.4", "if", "plural_id", "and", "plural_id", ".", "isdigit", "(", ")", ":", "plural_id", "=", "int", "(", "plural_id", ")", "elif", "single_text_input_regex", ".", "match", "(", "field_name", ")", ":", "md5hash", "=", "str", "(", "single_text_input_regex", ".", "match", "(", "field_name", ")", ".", "groups", "(", ")", "[", "0", "]", ")", "plural_id", "=", "None", "if", "md5hash", "is", "not", "None", ":", "# Empty string should be processed!", "entry", "=", "self", ".", "po_file", ".", "find", "(", "md5hash", ",", "'md5hash'", ")", "# If someone did a makemessage, some entries might", "# have been removed, so we need to check.", "if", "entry", ":", "old_msgstr", "=", "entry", ".", "msgstr", "if", "plural_id", "is", "not", "None", ":", "# 0 is ok!", "entry", ".", "msgstr_plural", "[", "plural_id", "]", "=", "self", ".", "fix_nls", "(", "entry", ".", "msgid_plural", ",", "new_msgstr", ")", "else", ":", "entry", ".", "msgstr", "=", "self", ".", "fix_nls", "(", "entry", ".", "msgid", ",", "new_msgstr", ")", "is_fuzzy", "=", "bool", "(", "self", ".", "request", ".", "POST", ".", "get", "(", "'f_%s'", "%", "md5hash", ",", "False", ")", ")", "old_fuzzy", "=", "'fuzzy'", "in", "entry", ".", "flags", "if", "old_fuzzy", "and", "not", "is_fuzzy", ":", "entry", ".", "flags", ".", "remove", "(", "'fuzzy'", ")", "elif", "not", "old_fuzzy", "and", "is_fuzzy", ":", "entry", ".", "flags", ".", "append", "(", "'fuzzy'", ")", "file_change", "=", "True", "if", "old_msgstr", "!=", "new_msgstr", "or", "old_fuzzy", "!=", "is_fuzzy", ":", "entry_changed", ".", "send", "(", "sender", "=", "entry", ",", "user", "=", "request", ".", "user", ",", "old_msgstr", "=", "old_msgstr", ",", "old_fuzzy", "=", "old_fuzzy", ",", "pofile", "=", "self", ".", "po_file_path", ",", "language_code", "=", "self", ".", "language_id", ",", ")", "else", ":", "messages", ".", "error", "(", "self", ".", "request", ",", "_", "(", "\"Some items in your last translation block couldn't \"", "\"be saved: this usually happens when the catalog file \"", "\"changes on disk after you last loaded it.\"", ")", ",", ")", "if", "file_change", "and", "self", ".", "po_file_is_writable", ":", "try", ":", "self", ".", "po_file", ".", "metadata", "[", "'Last-Translator'", "]", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "u\"%s %s <%s>\"", "%", "(", "getattr", "(", "self", ".", "request", ".", "user", ",", "'first_name'", ",", "'Anonymous'", ")", ",", "getattr", "(", "self", ".", "request", ".", "user", ",", "'last_name'", ",", "'User'", ")", ",", "getattr", "(", "self", ".", "request", ".", "user", ",", "'email'", ",", "'anonymous@user.tld'", ")", ")", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", "self", ".", "po_file", ".", "metadata", "[", "'X-Translated-Using'", "]", "=", "u\"django-rosetta %s\"", "%", "(", "get_rosetta_version", "(", ")", ")", "self", ".", "po_file", ".", "metadata", "[", "'PO-Revision-Date'", "]", "=", "timestamp_with_timezone", "(", ")", "except", "UnicodeDecodeError", ":", "pass", "try", ":", "self", ".", "po_file", ".", "save", "(", ")", "po_filepath", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "self", ".", "po_file_path", ")", "if", "rosetta_settings", ".", "AUTO_COMPILE", ":", "self", ".", "po_file", ".", "save_as_mofile", "(", "po_filepath", "+", "'.mo'", ")", "post_save", ".", "send", "(", "sender", "=", "None", ",", "language_code", "=", "self", ".", "language_id", ",", "request", "=", "self", ".", "request", ")", "# Try auto-reloading via the WSGI daemon mode reload mechanism", "should_try_wsgi_reload", "=", "(", "rosetta_settings", ".", "WSGI_AUTO_RELOAD", "and", "'mod_wsgi.process_group'", "in", "self", ".", "request", ".", "environ", "and", "self", ".", "request", ".", "environ", ".", "get", "(", "'mod_wsgi.process_group'", ",", "None", ")", "and", "'SCRIPT_FILENAME'", "in", "self", ".", "request", ".", "environ", "and", "int", "(", "self", ".", "request", ".", "environ", ".", "get", "(", "'mod_wsgi.script_reloading'", ",", "0", ")", ")", ")", "if", "should_try_wsgi_reload", ":", "try", ":", "os", ".", "utime", "(", "self", ".", "request", ".", "environ", ".", "get", "(", "'SCRIPT_FILENAME'", ")", ",", "None", ")", "except", "OSError", ":", "pass", "# Try auto-reloading via uwsgi daemon reload mechanism", "if", "rosetta_settings", ".", "UWSGI_AUTO_RELOAD", ":", "try", ":", "import", "uwsgi", "uwsgi", ".", "reload", "(", ")", "# pretty easy right?", "except", ":", "pass", "# we may not be running under uwsgi :P", "# XXX: It would be nice to add a success message here!", "except", "Exception", "as", "e", ":", "messages", ".", "error", "(", "self", ".", "request", ",", "e", ")", "if", "file_change", "and", "not", "self", ".", "po_file_is_writable", ":", "storage", "=", "get_storage", "(", "self", ".", "request", ")", "storage", ".", "set", "(", "self", ".", "po_file_cache_key", ",", "self", ".", "po_file", ")", "# Reconstitute url to redirect to. Start with determining whether the", "# page number can be incremented.", "paginator", "=", "Paginator", "(", "self", ".", "get_entries", "(", ")", ",", "rosetta_settings", ".", "MESSAGES_PER_PAGE", ")", "try", ":", "page", "=", "int", "(", "self", ".", "_request_request", "(", "'page'", ",", "1", ")", ")", "except", "ValueError", ":", "page", "=", "1", "# fall back to page 1", "else", ":", "if", "not", "(", "0", "<", "page", "<=", "paginator", ".", "num_pages", ")", ":", "page", "=", "1", "if", "page", "<", "paginator", ".", "num_pages", ":", "page", "+=", "1", "query_string_args", "=", "{", "'msg_filter'", ":", "self", ".", "msg_filter", ",", "'query'", ":", "self", ".", "query", ",", "'ref_lang'", ":", "self", ".", "ref_lang", ",", "'page'", ":", "page", ",", "}", "# Winnow down the query string args to non-blank ones", "query_string_args", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "query_string_args", ".", "items", "(", ")", "if", "v", "}", "return", "HttpResponseRedirect", "(", "\"{url}?{qs}\"", ".", "format", "(", "url", "=", "reverse", "(", "'rosetta-form'", ",", "kwargs", "=", "self", ".", "kwargs", ")", ",", "qs", "=", "urlencode_safe", "(", "query_string_args", ")", ")", ")" ]
46.696774
0.002435
[ "def post(self, request, *args, **kwargs):\n", " \"\"\"The only circumstances when we POST is to submit the main form, both\n", " updating translations (if any changed) and advancing to the next page of\n", " messages.\n", "\n", " There is no notion of validation of this content; as implemented, unknown\n", " fields are ignored and a generic failure message is shown.\n", "\n", " Submitted changes are saved out to the specified .po file on the\n", " filesystem if that file is writable, otherwise the cached version of the\n", " file is updated (so it can be downloaded). Then the user is redirected\n", " to the next page of messages (if there is one; otherwise they're\n", " redirected back to the current page).\n", " \"\"\"\n", " # The message text inputs are captured as hashes of their initial\n", " # contents, preceded by \"m_\". Messages with plurals end with their\n", " # variation number.\n", " single_text_input_regex = re.compile(r'^m_([0-9a-f]+)$')\n", " plural_text_input_regex = re.compile(r'^m_([0-9a-f]+)_([0-9]+)$')\n", " file_change = False\n", " for field_name, new_msgstr in request.POST.items():\n", " md5hash = None\n", "\n", " if plural_text_input_regex.match(field_name):\n", " md5hash, plural_id = plural_text_input_regex.match(field_name).groups()\n", " md5hash = str(md5hash)\n", " # polib parses .po files into unicode strings, but\n", " # doesn't bother to convert plural indexes to int,\n", " # so we need unicode here.\n", " plural_id = six.text_type(plural_id)\n", "\n", " # Above no longer true as of Polib 1.0.4\n", " if plural_id and plural_id.isdigit():\n", " plural_id = int(plural_id)\n", "\n", " elif single_text_input_regex.match(field_name):\n", " md5hash = str(single_text_input_regex.match(field_name).groups()[0])\n", " plural_id = None\n", "\n", " if md5hash is not None: # Empty string should be processed!\n", " entry = self.po_file.find(md5hash, 'md5hash')\n", " # If someone did a makemessage, some entries might\n", " # have been removed, so we need to check.\n", " if entry:\n", " old_msgstr = entry.msgstr\n", " if plural_id is not None: # 0 is ok!\n", " entry.msgstr_plural[plural_id] = self.fix_nls(\n", " entry.msgid_plural, new_msgstr\n", " )\n", " else:\n", " entry.msgstr = self.fix_nls(entry.msgid, new_msgstr)\n", "\n", " is_fuzzy = bool(self.request.POST.get('f_%s' % md5hash, False))\n", " old_fuzzy = 'fuzzy' in entry.flags\n", "\n", " if old_fuzzy and not is_fuzzy:\n", " entry.flags.remove('fuzzy')\n", " elif not old_fuzzy and is_fuzzy:\n", " entry.flags.append('fuzzy')\n", "\n", " file_change = True\n", "\n", " if old_msgstr != new_msgstr or old_fuzzy != is_fuzzy:\n", " entry_changed.send(sender=entry,\n", " user=request.user,\n", " old_msgstr=old_msgstr,\n", " old_fuzzy=old_fuzzy,\n", " pofile=self.po_file_path,\n", " language_code=self.language_id,\n", " )\n", " else:\n", " messages.error(\n", " self.request,\n", " _(\"Some items in your last translation block couldn't \"\n", " \"be saved: this usually happens when the catalog file \"\n", " \"changes on disk after you last loaded it.\"),\n", " )\n", "\n", " if file_change and self.po_file_is_writable:\n", " try:\n", " self.po_file.metadata['Last-Translator'] = unicodedata.normalize(\n", " 'NFKD', u\"%s %s <%s>\" % (\n", " getattr(self.request.user, 'first_name', 'Anonymous'),\n", " getattr(self.request.user, 'last_name', 'User'),\n", " getattr(self.request.user, 'email', 'anonymous@user.tld')\n", " )\n", " ).encode('ascii', 'ignore')\n", " self.po_file.metadata['X-Translated-Using'] = u\"django-rosetta %s\" % (\n", " get_rosetta_version())\n", " self.po_file.metadata['PO-Revision-Date'] = timestamp_with_timezone()\n", " except UnicodeDecodeError:\n", " pass\n", "\n", " try:\n", " self.po_file.save()\n", " po_filepath, ext = os.path.splitext(self.po_file_path)\n", "\n", " if rosetta_settings.AUTO_COMPILE:\n", " self.po_file.save_as_mofile(po_filepath + '.mo')\n", "\n", " post_save.send(sender=None, language_code=self.language_id,\n", " request=self.request\n", " )\n", " # Try auto-reloading via the WSGI daemon mode reload mechanism\n", " should_try_wsgi_reload = (\n", " rosetta_settings.WSGI_AUTO_RELOAD and\n", " 'mod_wsgi.process_group' in self.request.environ and\n", " self.request.environ.get('mod_wsgi.process_group', None) and\n", " 'SCRIPT_FILENAME' in self.request.environ and\n", " int(self.request.environ.get('mod_wsgi.script_reloading', 0))\n", " )\n", " if should_try_wsgi_reload:\n", " try:\n", " os.utime(self.request.environ.get('SCRIPT_FILENAME'), None)\n", " except OSError:\n", " pass\n", " # Try auto-reloading via uwsgi daemon reload mechanism\n", " if rosetta_settings.UWSGI_AUTO_RELOAD:\n", " try:\n", " import uwsgi\n", " uwsgi.reload() # pretty easy right?\n", " except:\n", " pass # we may not be running under uwsgi :P\n", " # XXX: It would be nice to add a success message here!\n", " except Exception as e:\n", " messages.error(self.request, e)\n", "\n", " if file_change and not self.po_file_is_writable:\n", " storage = get_storage(self.request)\n", " storage.set(self.po_file_cache_key, self.po_file)\n", "\n", " # Reconstitute url to redirect to. Start with determining whether the\n", " # page number can be incremented.\n", " paginator = Paginator(self.get_entries(), rosetta_settings.MESSAGES_PER_PAGE)\n", " try:\n", " page = int(self._request_request('page', 1))\n", " except ValueError:\n", " page = 1 # fall back to page 1\n", " else:\n", " if not (0 < page <= paginator.num_pages):\n", " page = 1\n", " if page < paginator.num_pages:\n", " page += 1\n", " query_string_args = {\n", " 'msg_filter': self.msg_filter,\n", " 'query': self.query,\n", " 'ref_lang': self.ref_lang,\n", " 'page': page,\n", " }\n", " # Winnow down the query string args to non-blank ones\n", " query_string_args = {k: v for k, v in query_string_args.items() if v}\n", " return HttpResponseRedirect(\"{url}?{qs}\".format(\n", " url=reverse('rosetta-form', kwargs=self.kwargs),\n", " qs=urlencode_safe(query_string_args)\n", " ))" ]
[ 0, 0.0125, 0.012345679012345678, 0, 0, 0.012195121951219513, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0.012195121951219513, 0, 0, 0.011494252873563218, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0.012195121951219513, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1 ]
155
0.002116
def list_rows(self,rowList=None,table=None,verbose=None): """ Returns the list of primary keys for each of the rows in the specified table. :param rowList (string, optional): Specifies a list of rows. The pattern CO LUMN:VALUE sets this parameter to any rows that contain the specifie d column value; if the COLUMN prefix is not used, the NAME column is matched by default. A list of COLUMN:VALUE pairs of the format COLU MN1:VALUE1,COLUMN2:VALUE2,... can be used to match multiple values. This parameter can also be set to all to include all rows. :param table (string, optional): Specifies a table by table name. If the pr efix SUID: is used, the table corresponding the SUID will be returne d. """ PARAMS=set_param(['rowList','table'],[rowList,table]) response=api(url=self.__url+"/list rows", PARAMS=PARAMS, method="POST", verbose=verbose) return response
[ "def", "list_rows", "(", "self", ",", "rowList", "=", "None", ",", "table", "=", "None", ",", "verbose", "=", "None", ")", ":", "PARAMS", "=", "set_param", "(", "[", "'rowList'", ",", "'table'", "]", ",", "[", "rowList", ",", "table", "]", ")", "response", "=", "api", "(", "url", "=", "self", ".", "__url", "+", "\"/list rows\"", ",", "PARAMS", "=", "PARAMS", ",", "method", "=", "\"POST\"", ",", "verbose", "=", "verbose", ")", "return", "response" ]
58.588235
0.017787
[ "def list_rows(self,rowList=None,table=None,verbose=None):\n", " \"\"\"\n", " Returns the list of primary keys for each of the rows in the specified table.\n", "\n", " :param rowList (string, optional): Specifies a list of rows. The pattern CO\n", " LUMN:VALUE sets this parameter to any rows that contain the specifie\n", " d column value; if the COLUMN prefix is not used, the NAME column is\n", " matched by default. A list of COLUMN:VALUE pairs of the format COLU\n", " MN1:VALUE1,COLUMN2:VALUE2,... can be used to match multiple values.\n", " This parameter can also be set to all to include all rows.\n", " :param table (string, optional): Specifies a table by table name. If the pr\n", " efix SUID: is used, the table corresponding the SUID will be returne\n", " d.\n", " \"\"\"\n", " PARAMS=set_param(['rowList','table'],[rowList,table])\n", " response=api(url=self.__url+\"/list rows\", PARAMS=PARAMS, method=\"POST\", verbose=verbose)\n", " return response" ]
[ 0.05172413793103448, 0.08333333333333333, 0.011627906976744186, 0, 0.011904761904761904, 0.012345679012345678, 0.012345679012345678, 0.011904761904761904, 0, 0, 0.011904761904761904, 0.012345679012345678, 0, 0, 0.06451612903225806, 0.020618556701030927, 0.043478260869565216 ]
17
0.020474