lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_13(self, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_65 in VAR_18.sub_items:\n",
"self._remove_recursive(VAR_65)\n",
"self.fs_db.execute(\"DELETE FROM file_system WHERE uuid = '%s';\" % VAR_18.uuid)\n",
"return\n"
] | [
"def _remove_recursive(self, item):...\n",
"\"\"\"docstring\"\"\"\n",
"for i_sub in item.sub_items:\n",
"self._remove_recursive(i_sub)\n",
"self.fs_db.execute(\"DELETE FROM file_system WHERE uuid = '%s';\" % item.uuid)\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_5(VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_9.status in ['new', 'active']:\n",
"return 'inactive'\n",
"if VAR_9.status == 'inactive':\n",
"VAR_22 = {'scope': VAR_9, 'status': ['active', 'inactive']}\n",
"return VAR_9.status\n",
"if org_admin_logic.getForFields(VAR_22, unique=True):\n",
"return 'active'\n",
"return 'new'\n"
] | [
"def _orgStatusRetriever(entity):...\n",
"\"\"\"docstring\"\"\"\n",
"if entity.status in ['new', 'active']:\n",
"return 'inactive'\n",
"if entity.status == 'inactive':\n",
"fields = {'scope': entity, 'status': ['active', 'inactive']}\n",
"return entity.status\n",
"if org_admin_logic.getForFields(fields, unique=True):\n",
"return 'active'\n",
"return 'new'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"from app.database.connection import get_db, get_cursor\n",
"from types import *\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3):...\n",
"VAR_9 = 'string'.format(FUNC_4(VAR_2, VAR_3))\n",
"VAR_10 = {'limit': VAR_1, 'offset': int(VAR_0 * VAR_1)}\n",
"VAR_11 = get_cursor()\n",
"VAR_12 = VAR_11.execute(VAR_9, VAR_10)\n",
"VAR_7 = FUNC_5(VAR_12)\n",
"return {'rows': VAR_7, 'count': FUNC_1()}\n"
] | [
"from app.database.connection import get_db, get_cursor\n",
"from types import *\n",
"def get_books_by_page(page, pagesize, sort_col, sort_dir):...\n",
"stmt = (\n \"\"\"\n select b.*,\n case when length(a.FirstName) > 0\n then (a.LastName || \", \" || a.FirstName)\n else a.LastName\n end as Author,\n s.name as Series from books as b\n left outer join collaborations as c on c.book_id=b.id\n left outer join authors as a on a.id=c.author_id\n left join series as s on s.id=b.series_id\n order by {0}\n limit :limit offset :offset\n \"\"\"\n .format(get_sort_clause(sort_col, sort_dir)))\n",
"inputs = {'limit': pagesize, 'offset': int(page * pagesize)}\n",
"csr = get_cursor()\n",
"rst = csr.execute(stmt, inputs)\n",
"rows = rows2list(rst)\n",
"return {'rows': rows, 'count': get_all_books_count()}\n"
] | [
0,
0,
0,
4,
4,
4,
4,
0,
4
] | [
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_4, VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=None):...\n",
"VAR_16 = FUNC_11(collect_config.collect_config(VAR_4, VAR_9), VAR_8)\n",
"VAR_31 = FUNC_4(FUNC_9(VAR_5), VAR_16)\n",
"if not VAR_7:\n",
"for VAR_13, VAR_14 in VAR_31.items():\n",
"FUNC_3(os.path.join(VAR_6, FUNC_10('/', VAR_13)), VAR_14)\n"
] | [
"def install_config(config_path, template_root, output_path, validate,...\n",
"config = strip_hash(collect_config.collect_config(config_path,\n fallback_metadata), subhash)\n",
"tree = build_tree(template_paths(template_root), config)\n",
"if not validate:\n",
"for path, contents in tree.items():\n",
"write_file(os.path.join(output_path, strip_prefix('/', path)), contents)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"For",
"Expr'"
] |
[
"def FUNC_21(VAR_12, VAR_13):...\n",
"for VAR_7 in VAR_2.providers:\n",
"if VAR_7.id == VAR_12 and VAR_7.url == VAR_13:\n",
"self.fail('Provider: %s not found' % VAR_12)\n",
"return\n"
] | [
"def _assert_provider(pid, purl):...\n",
"for provider in cluster_api.providers:\n",
"if provider.id == pid and provider.url == purl:\n",
"self.fail('Provider: %s not found' % pid)\n",
"return\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'",
"Return'"
] |
[
"from util import hook, user, database\n",
"import time\n",
"VAR_0 = False\n",
"def FUNC_0(VAR_1):...\n",
"VAR_1.execute('string')\n",
"VAR_1.commit()\n",
"VAR_0 = True\n",
"def FUNC_1(VAR_2, VAR_3, VAR_4, VAR_5, VAR_1, VAR_6, VAR_7):...\n",
"if ' ' in VAR_2:\n",
"VAR_6('Invalid nick')\n",
"VAR_14 = database.get(VAR_1, 'channels', 'votekick', 'chan', VAR_4)\n",
"VAR_14 = 10\n",
"VAR_15 = database.get(VAR_1, 'channels', 'voteban', 'chan', VAR_4)\n",
"VAR_15 = 10\n",
"if len(VAR_2) is 0:\n",
"return\n",
"if VAR_3 is 'kick':\n",
"VAR_10 = False\n",
"VAR_6('Votes required to kick: {}'.format(VAR_14))\n",
"if VAR_3 is 'ban':\n",
"if not VAR_0:\n",
"return\n",
"VAR_6('Votes required to ban: {}'.format(VAR_15))\n",
"FUNC_0(VAR_1)\n",
"VAR_4 = VAR_4.lower()\n",
"VAR_2 = VAR_2.lower()\n",
"VAR_11 = user.format_hostmask(VAR_5)\n",
"VAR_12 = VAR_1.execute(\n \"SELECT voters FROM votes where chan='{}' and action='{}' and target like '{}'\"\n .format(VAR_4, VAR_3, VAR_2)).fetchone()\n",
"if VAR_7.nick.lower() in VAR_2:\n",
"return 'I dont think so Tim.'\n",
"if VAR_12:\n",
"VAR_12 = VAR_12[0]\n",
"VAR_12 = VAR_11\n",
"if VAR_11 in VAR_12:\n",
"VAR_13 = len(VAR_12.split(' '))\n",
"VAR_6('You have already voted.')\n",
"VAR_12 = '{} {}'.format(VAR_12, VAR_11).strip()\n",
"if 'kick' in VAR_3:\n",
"return\n",
"VAR_6('Thank you for your vote!')\n",
"VAR_16 = int(VAR_14)\n",
"if 'ban' in VAR_3:\n",
"if VAR_13 >= VAR_16:\n",
"VAR_16 = int(VAR_15)\n",
"if VAR_10:\n",
"VAR_10 = True\n",
"if VAR_13 >= VAR_16:\n",
"VAR_1.execute(\n \"DELETE FROM votes where chan='{}' and action='{}' and target like '{}'\"\n .format(VAR_4, VAR_3, VAR_2))\n",
"VAR_1.execute(\n 'insert or replace into votes(chan, action, target, voters, time) values(?,?,?,?,?)'\n , (VAR_4, VAR_3, VAR_2, VAR_12, time.time()))\n",
"VAR_7.send('KICK {} {} :{}'.format(VAR_4, VAR_2,\n 'You have been voted off the island.'))\n",
"VAR_10 = True\n",
"VAR_1.commit()\n",
"VAR_7.send('MODE {} +b {}'.format(VAR_4, user.get_hostmask(VAR_2, VAR_1)))\n",
"return 'Votes to {} {}: {}/{}'.format(VAR_3, VAR_2, VAR_13, VAR_16)\n",
"VAR_7.send('KICK {} {} :'.format(VAR_4, VAR_2,\n 'You have been voted off the island.'))\n"
] | [
"from util import hook, user, database\n",
"import time\n",
"db_ready = False\n",
"def db_init(db):...\n",
"db.execute(\n 'CREATE TABLE if not exists votes(chan, action, target, voters, time, primary key(chan, action, target));'\n )\n",
"db.commit()\n",
"db_ready = True\n",
"def process_vote(target, action, chan, mask, db, notice, conn):...\n",
"if ' ' in target:\n",
"notice('Invalid nick')\n",
"votes2kick = database.get(db, 'channels', 'votekick', 'chan', chan)\n",
"votes2kick = 10\n",
"votes2ban = database.get(db, 'channels', 'voteban', 'chan', chan)\n",
"votes2ban = 10\n",
"if len(target) is 0:\n",
"return\n",
"if action is 'kick':\n",
"votefinished = False\n",
"notice('Votes required to kick: {}'.format(votes2kick))\n",
"if action is 'ban':\n",
"if not db_ready:\n",
"return\n",
"notice('Votes required to ban: {}'.format(votes2ban))\n",
"db_init(db)\n",
"chan = chan.lower()\n",
"target = target.lower()\n",
"voter = user.format_hostmask(mask)\n",
"voters = db.execute(\n \"SELECT voters FROM votes where chan='{}' and action='{}' and target like '{}'\"\n .format(chan, action, target)).fetchone()\n",
"if conn.nick.lower() in target:\n",
"return 'I dont think so Tim.'\n",
"if voters:\n",
"voters = voters[0]\n",
"voters = voter\n",
"if voter in voters:\n",
"votecount = len(voters.split(' '))\n",
"notice('You have already voted.')\n",
"voters = '{} {}'.format(voters, voter).strip()\n",
"if 'kick' in action:\n",
"return\n",
"notice('Thank you for your vote!')\n",
"votemax = int(votes2kick)\n",
"if 'ban' in action:\n",
"if votecount >= votemax:\n",
"votemax = int(votes2ban)\n",
"if votefinished:\n",
"votefinished = True\n",
"if votecount >= votemax:\n",
"db.execute(\n \"DELETE FROM votes where chan='{}' and action='{}' and target like '{}'\"\n .format(chan, action, target))\n",
"db.execute(\n 'insert or replace into votes(chan, action, target, voters, time) values(?,?,?,?,?)'\n , (chan, action, target, voters, time.time()))\n",
"conn.send('KICK {} {} :{}'.format(chan, target,\n 'You have been voted off the island.'))\n",
"votefinished = True\n",
"db.commit()\n",
"conn.send('MODE {} +b {}'.format(chan, user.get_hostmask(target, db)))\n",
"return 'Votes to {} {}: {}/{}'.format(action, target, votecount, votemax)\n",
"conn.send('KICK {} {} :'.format(chan, target,\n 'You have been voted off the island.'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'"
] |
[
"def FUNC_0(VAR_0=None):...\n",
"VAR_9 = frappe._dict(template='templates/includes/blog/blog.html', get_list\n =get_blog_list, hide_filters=True, children=get_children(), title=_('Blog')\n )\n",
"VAR_10 = (frappe.local.form_dict.blog_category or frappe.local.form_dict.\n category)\n",
"if VAR_10:\n",
"VAR_15 = FUNC_3(VAR_10)\n",
"if frappe.local.form_dict.blogger:\n",
"VAR_9.sub_title = _('Posts filed under {0}').format(VAR_15)\n",
"VAR_16 = frappe.db.get_value('Blogger', {'name': frappe.local.form_dict.\n blogger}, 'full_name')\n",
"if frappe.local.form_dict.txt:\n",
"VAR_9.title = VAR_15\n",
"VAR_9.sub_title = _('Posts by {0}').format(VAR_16)\n",
"VAR_9.sub_title = _('Filtered by \"{0}\"').format(frappe.local.form_dict.txt)\n",
"if VAR_9.sub_title:\n",
"VAR_9.title = VAR_16\n",
"VAR_9.parents = [{'name': _('Home'), 'route': '/'}, {'name': 'Blog',\n 'route': '/blog'}]\n",
"VAR_9.parents = [{'name': _('Home'), 'route': '/'}]\n",
"VAR_9.update(frappe.get_doc('Blog Settings', 'Blog Settings').as_dict(\n no_default_fields=True))\n",
"return VAR_9\n"
] | [
"def get_list_context(context=None):...\n",
"list_context = frappe._dict(template='templates/includes/blog/blog.html',\n get_list=get_blog_list, hide_filters=True, children=get_children(),\n title=_('Blog'))\n",
"category = (frappe.local.form_dict.blog_category or frappe.local.form_dict.\n category)\n",
"if category:\n",
"category_title = get_blog_category(category)\n",
"if frappe.local.form_dict.blogger:\n",
"list_context.sub_title = _('Posts filed under {0}').format(category_title)\n",
"blogger = frappe.db.get_value('Blogger', {'name': frappe.local.form_dict.\n blogger}, 'full_name')\n",
"if frappe.local.form_dict.txt:\n",
"list_context.title = category_title\n",
"list_context.sub_title = _('Posts by {0}').format(blogger)\n",
"list_context.sub_title = _('Filtered by \"{0}\"').format(frappe.local.\n form_dict.txt)\n",
"if list_context.sub_title:\n",
"list_context.title = blogger\n",
"list_context.parents = [{'name': _('Home'), 'route': '/'}, {'name': 'Blog',\n 'route': '/blog'}]\n",
"list_context.parents = [{'name': _('Home'), 'route': '/'}]\n",
"list_context.update(frappe.get_doc('Blog Settings', 'Blog Settings').\n as_dict(no_default_fields=True))\n",
"return list_context\n"
] | [
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'AGGRI', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='AGRI', VAR_10=[{'name': '----AGRI'}, {'name':\n 'AGGRI'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'AGGRI', id='1')\n",
"verify_results(client, jwt, query='AGRI', expected=[{'name': '----AGRI'}, {\n 'name': 'AGGRI'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self, VAR_6, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = None\n",
"VAR_10 = self.common._safe_hostname(VAR_9['host'])\n",
"VAR_25 = self.common.get_cpg(VAR_6, allowSnap=True)\n",
"VAR_12 = self.common.get_domain(VAR_25)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_13 = self.common.get_persona_type(VAR_6)\n",
"return VAR_20\n",
"if not VAR_20['FCPaths']:\n",
"VAR_10 = self._create_3par_fibrechan_host(VAR_10, VAR_9['wwpns'], VAR_12,\n VAR_13)\n",
"self._modify_3par_fibrechan_host(VAR_10, VAR_9['wwpns'])\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n"
] | [
"def _create_host(self, volume, connector):...\n",
"\"\"\"docstring\"\"\"\n",
"host = None\n",
"hostname = self.common._safe_hostname(connector['host'])\n",
"cpg = self.common.get_cpg(volume, allowSnap=True)\n",
"domain = self.common.get_domain(cpg)\n",
"host = self.common._get_3par_host(hostname)\n",
"persona_id = self.common.get_persona_type(volume)\n",
"return host\n",
"if not host['FCPaths']:\n",
"hostname = self._create_3par_fibrechan_host(hostname, connector['wwpns'],\n domain, persona_id)\n",
"self._modify_3par_fibrechan_host(hostname, connector['wwpns'])\n",
"host = self.common._get_3par_host(hostname)\n",
"host = self.common._get_3par_host(hostname)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_14(self, VAR_16):...\n",
"VAR_39 = StreamingHttpResponse(self._make_flat_wins_csv_stream(self.\n _make_flat_wins_csv()), content_type=mimetypes.types_map['.csv'])\n",
"VAR_39['Content-Disposition'] = f'attachent; filename={VAR_16}'\n",
"return VAR_39\n"
] | [
"def streaming_response(self, filename):...\n",
"resp = StreamingHttpResponse(self._make_flat_wins_csv_stream(self.\n _make_flat_wins_csv()), content_type=mimetypes.types_map['.csv'])\n",
"resp['Content-Disposition'] = f'attachent; filename={filename}'\n",
"return resp\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_14(self, VAR_13, VAR_14):...\n",
"self.cursor.execute('update lists set listhash = \"%s\" where listid = \"%s\"' %\n (VAR_14, VAR_13))\n",
"self.connection.commit()\n"
] | [
"def update_shoppinglist(self, slistid, slisthash):...\n",
"self.cursor.execute('update lists set listhash = \"%s\" where listid = \"%s\"' %\n (slisthash, slistid))\n",
"self.connection.commit()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_1):...\n",
"super(CLASS_0, self).__init__(VAR_1)\n",
"VAR_5 = list(self.shape.objects(VAR_0))\n",
"if len(VAR_5) > 1:\n",
"self.class_rule = VAR_5[0]\n"
] | [
"def __init__(self, shape):...\n",
"super(ClassConstraintComponent, self).__init__(shape)\n",
"class_rules = list(self.shape.objects(SH_class))\n",
"if len(class_rules) > 1:\n",
"self.class_rule = class_rules[0]\n"
] | [
0,
0,
0,
1,
1
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_15(VAR_16):...\n",
"return self\n"
] | [
"def get(user_id):...\n",
"return self\n"
] | [
0,
4
] | [
"FunctionDef'",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_0._capabilities\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return cls._capabilities\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_47(self, VAR_14):...\n",
"return list(self.list_properties[VAR_14].local_columns)[0]\n"
] | [
"def get_relation_fk(self, col_name):...\n",
"return list(self.list_properties[col_name].local_columns)[0]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self):...\n",
"self.xsrf_token = None\n",
"self.session = requests.Session()\n"
] | [
"def __init__(self):...\n",
"self.xsrf_token = None\n",
"self.session = requests.Session()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_4(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == VAR_1 or ('abcd\\n', 'efgh\\n', '\\n') ==\n VAR_1)\n"
] | [
"def test_stdin_input(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == out or ('abcd\\n', 'efgh\\n', '\\n') ==\n out)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@validate(VUser(), VAR_0=VEditMeetup('id'))...\n",
"return BoringPage(pagename='Edit Meetup', VAR_27=EditMeetup(meetup, title=\n meetup.title, description=meetup.description, location=meetup.location,\n latitude=meetup.latitude, longitude=meetup.longitude, timestamp=int(\n meetup.timestamp * 1000), tzoffset=meetup.tzoffset)).render()\n"
] | [
"@validate(VUser(), meetup=VEditMeetup('id'))...\n",
"return BoringPage(pagename='Edit Meetup', content=EditMeetup(meetup, title=\n meetup.title, description=meetup.description, location=meetup.location,\n latitude=meetup.latitude, longitude=meetup.longitude, timestamp=int(\n meetup.timestamp * 1000), tzoffset=meetup.tzoffset)).render()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_7, VAR_4=None, VAR_12=None):...\n",
"GenericRequest.__init__(self, VAR_1, VAR_4)\n",
"self.url = '%stasks/%s/submit' % (self.base_url, VAR_7[1])\n",
"self.task = VAR_7\n",
"self.submissions_path = VAR_12\n",
"self.data = {}\n"
] | [
"def __init__(self, browser, task, base_url=None, submissions_path=None):...\n",
"GenericRequest.__init__(self, browser, base_url)\n",
"self.url = '%stasks/%s/submit' % (self.base_url, task[1])\n",
"self.task = task\n",
"self.submissions_path = submissions_path\n",
"self.data = {}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = subprocess.Popen('nmcli -t -f NAME,TYPE conn | grep 802-11-wireless',\n shell=True, stdout=subprocess.PIPE).communicate()[0]\n",
"VAR_9 = VAR_8.split('\\n')\n",
"VAR_10 = []\n",
"for row in VAR_9:\n",
"VAR_19 = row.split(':')\n",
"return VAR_10\n",
"print(VAR_19)\n",
"VAR_10.append(VAR_19[0])\n"
] | [
"def get_allconfiguredwifi():...\n",
"\"\"\"docstring\"\"\"\n",
"ps = subprocess.Popen('nmcli -t -f NAME,TYPE conn | grep 802-11-wireless',\n shell=True, stdout=subprocess.PIPE).communicate()[0]\n",
"wifirows = ps.split('\\n')\n",
"wifi = []\n",
"for row in wifirows:\n",
"name = row.split(':')\n",
"return wifi\n",
"print(name)\n",
"wifi.append(name[0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'",
"Expr'"
] |
[
"def FUNC_19(self, VAR_6, VAR_14):...\n",
"self.common.extend_volume(VAR_6, VAR_14)\n"
] | [
"def extend_volume(self, volume, new_size):...\n",
"self.common.extend_volume(volume, new_size)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_1.route('/restaurants/<ingredient>/')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.info('GET query_restaurants_by_ingredient query')\n",
"VAR_19, VAR_20 = request.args.get('loclat'), request.args.get('loclng')\n",
"VAR_21 = request.args.get('price_category')\n",
"VAR_22 = request.args.get('online_delivery')\n",
"VAR_23 = request.args.get('min_review')\n",
"VAR_24 = sql_queries.restaurants_by_ingredient % VAR_10\n",
"if VAR_19 != None and VAR_20 != None:\n",
"VAR_31 = [float(VAR_19) - VAR_7, float(VAR_19) + VAR_7]\n",
"VAR_31 = None\n",
"VAR_32 = [float(VAR_20) - VAR_7, float(VAR_20) + VAR_7]\n",
"VAR_32 = None\n",
"VAR_25 = VAR_3.restaurant_query_builder(VAR_24, VAR_31, VAR_32, VAR_21,\n VAR_23, VAR_22)\n",
"VAR_26 = VAR_3.order_by_and_limit_query(VAR_25, 'agg_review DESC', 20)\n",
"VAR_18 = VAR_3.run_sql_query(VAR_26)\n",
"if VAR_18 == -1:\n",
"return None\n",
"return VAR_18\n"
] | [
"@app.route('/restaurants/<ingredient>/')...\n",
"\"\"\"docstring\"\"\"\n",
"logger.info('GET query_restaurants_by_ingredient query')\n",
"loclat, loclng = request.args.get('loclat'), request.args.get('loclng')\n",
"price_category = request.args.get('price_category')\n",
"online_delivery = request.args.get('online_delivery')\n",
"min_review = request.args.get('min_review')\n",
"base_query = sql_queries.restaurants_by_ingredient % ingredient\n",
"if loclat != None and loclng != None:\n",
"lat_range = [float(loclat) - geodist, float(loclat) + geodist]\n",
"lat_range = None\n",
"lng_range = [float(loclng) - geodist, float(loclng) + geodist]\n",
"lng_range = None\n",
"filtered_query = database.restaurant_query_builder(base_query, lat_range,\n lng_range, price_category, min_review, online_delivery)\n",
"limited_query = database.order_by_and_limit_query(filtered_query,\n 'agg_review DESC', 20)\n",
"query_res = database.run_sql_query(limited_query)\n",
"if query_res == -1:\n",
"return None\n",
"return query_res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_2(self, VAR_7):...\n",
"self.error_window = Toplevel()\n",
"self.error_window.title = 'ERROR - PLEASE READ'\n",
"VAR_15 = Message(self.error_window, text=error_message, width=500)\n",
"VAR_15.grid(row=0, column=0, columnspan=3)\n",
"VAR_16 = Button(self.error_window, text='Ok', command=lambda : self.\n error_window.destroy())\n",
"VAR_16.grid(row=1, column=1)\n"
] | [
"def errorNotification(self, error_message):...\n",
"self.error_window = Toplevel()\n",
"self.error_window.title = 'ERROR - PLEASE READ'\n",
"error_message_text = Message(self.error_window, text=error_message, width=500)\n",
"error_message_text.grid(row=0, column=0, columnspan=3)\n",
"close_button_error = Button(self.error_window, text='Ok', command=lambda :\n self.error_window.destroy())\n",
"close_button_error.grid(row=1, column=1)\n"
] | [
0,
0,
0,
5,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"VAR_5 = [{'application': self.TEST_APP, 'stack': self.TEST_STACK,\n 'credentials': self.bindings['GCE_CREDENTIALS'], 'loadBalancers': [self\n .__lb_name], 'targetSize': 1, 'capacity': {'min': 1, 'max': 1,\n 'desired': 1}, 'zone': self.TEST_ZONE, 'network': 'default',\n 'instanceMetadata': {'load-balancer-names': self.__lb_name},\n 'availabilityZones': {self.TEST_REGION: [self.TEST_ZONE]},\n 'cloudProvider': 'gce', 'source': {'account': self.bindings[\n 'GCE_CREDENTIALS'], 'region': self.TEST_REGION, 'zone': self.TEST_ZONE,\n 'serverGroupName': self.__server_group_name, 'asgName': self.\n __server_group_name}, 'instanceType': 'f1-micro', 'image': self.\n bindings['TEST_GCE_IMAGE_NAME'], 'initialNumReplicas': 1,\n 'loadBalancers': [self.__lb_name], 'type': 'cloneServerGroup',\n 'account': self.bindings['GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n",
"VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_6.new_clause_builder('Server Group Cloned', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_path_value(\n 'baseInstanceName', self.__cloned_server_group_name)\n",
"VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - clone server group', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'clone_server_group', data=payload, path=self.__path), contract=builder\n .build())\n"
] | [
"def clone_server_group(self):...\n",
"job = [{'application': self.TEST_APP, 'stack': self.TEST_STACK,\n 'credentials': self.bindings['GCE_CREDENTIALS'], 'loadBalancers': [self\n .__lb_name], 'targetSize': 1, 'capacity': {'min': 1, 'max': 1,\n 'desired': 1}, 'zone': self.TEST_ZONE, 'network': 'default',\n 'instanceMetadata': {'load-balancer-names': self.__lb_name},\n 'availabilityZones': {self.TEST_REGION: [self.TEST_ZONE]},\n 'cloudProvider': 'gce', 'source': {'account': self.bindings[\n 'GCE_CREDENTIALS'], 'region': self.TEST_REGION, 'zone': self.TEST_ZONE,\n 'serverGroupName': self.__server_group_name, 'asgName': self.\n __server_group_name}, 'instanceType': 'f1-micro', 'image': self.\n bindings['TEST_GCE_IMAGE_NAME'], 'initialNumReplicas': 1,\n 'loadBalancers': [self.__lb_name], 'type': 'cloneServerGroup',\n 'account': self.bindings['GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Server Group Cloned', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_path_value(\n 'baseInstanceName', self.__cloned_server_group_name)\n",
"payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - clone server group', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'clone_server_group', data=payload, path=self.__path), contract=builder\n .build())\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"for c in VAR_2._classes:\n",
"c.create_api(VAR_2._name, VAR_3, VAR_4)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"for c in cls._classes:\n",
"c.create_api(cls._name, flask, base_url)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"For",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"self.name = None\n",
"self.password = None\n",
"self.email = None\n",
"self.count = None\n"
] | [
"def clean(self):...\n",
"self.name = None\n",
"self.password = None\n",
"self.email = None\n",
"self.count = None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(VAR_0, VAR_3):...\n",
"VAR_4 = VAR_0.cursor()\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = 'DELETE FROM Presets WHERE id = ' + VAR_3\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n"
] | [
"def remove_preset(conn, key):...\n",
"cursor = conn.cursor()\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n",
"quer = 'DELETE FROM Presets WHERE id = ' + key\n",
"cursor.execute(quer)\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_11(self, VAR_12):...\n",
"VAR_5 = {}\n",
"VAR_5['serverName'] = VAR_12['host']\n",
"VAR_16 = self._cliq_run_xml('getServerInfo', VAR_5, False)\n",
"VAR_33 = VAR_16.find('response')\n",
"VAR_34 = VAR_33.attrib.get('result')\n",
"if VAR_34 != '0':\n",
"VAR_5 = {}\n",
"VAR_5['serverName'] = VAR_12['host']\n",
"VAR_5['initiator'] = VAR_12['initiator']\n",
"self._cliq_run_xml('createServer', VAR_5)\n"
] | [
"def _create_server(self, connector):...\n",
"cliq_args = {}\n",
"cliq_args['serverName'] = connector['host']\n",
"out = self._cliq_run_xml('getServerInfo', cliq_args, False)\n",
"response = out.find('response')\n",
"result = response.attrib.get('result')\n",
"if result != '0':\n",
"cliq_args = {}\n",
"cliq_args['serverName'] = connector['host']\n",
"cliq_args['initiator'] = connector['initiator']\n",
"self._cliq_run_xml('createServer', cliq_args)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@VAR_0.route('/articles/write/', methods=['GET', 'POST'])...\n",
"VAR_9 = ''\n",
"if request.method == 'POST':\n",
"VAR_12 = request.form['form_display_name']\n",
"VAR_9 += VAR_4['articles_write']\n",
"VAR_19 = request.form['form_notice_level']\n",
"return render_template('index.html', OFORM_APPNAME=LocalSettings.\n OFORM_APPNAME, OFORM_CONTENT=BODY_CONTENT)\n",
"VAR_15 = request.form['form_body_content']\n",
"if request.form['submit'] == 'publish':\n",
"VAR_18 = 1\n",
"if request.form['submit'] == 'preview':\n",
"VAR_13 = datetime.today()\n",
"VAR_18 = 0\n",
"VAR_2.execute('string'.format(VAR_12, VAR_19, VAR_13, VAR_18, VAR_15))\n"
] | [
"@app.route('/articles/write/', methods=['GET', 'POST'])...\n",
"BODY_CONTENT = ''\n",
"if request.method == 'POST':\n",
"form_display_name = request.form['form_display_name']\n",
"BODY_CONTENT += CONVERSTATIONS_DICT['articles_write']\n",
"form_notice_level = request.form['form_notice_level']\n",
"return render_template('index.html', OFORM_APPNAME=LocalSettings.\n OFORM_APPNAME, OFORM_CONTENT=BODY_CONTENT)\n",
"form_body_content = request.form['form_body_content']\n",
"if request.form['submit'] == 'publish':\n",
"form_enabled = 1\n",
"if request.form['submit'] == 'preview':\n",
"form_publish_date = datetime.today()\n",
"form_enabled = 0\n",
"curs.execute(\n 'insert into FORM_DATA_TB (form_display_name, form_notice_level, form_publish_date, form_enabled, form_body_content) values(\"{}\", \"{}\", \"{}\", {}, \"{}\")'\n .format(form_display_name, form_notice_level, form_publish_date,\n form_enabled, form_body_content))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Assign'",
"Return'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@property...\n",
"return self._session\n"
] | [
"@property...\n",
"return self._session\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"from __future__ import unicode_literals\n",
"from django.db import models, migrations\n",
"VAR_0 = [('userprofile', '0002_auto_20150427_1717'), ('course',\n '0020_auto_20160615_1239')]\n",
"VAR_1 = [migrations.CreateModel(name='Enrollment', fields=[('id', models.\n AutoField(auto_created=True, serialize=False, primary_key=True,\n verbose_name='ID')), ('timestamp', models.DateTimeField(auto_now_add=\n True)), ('personal_code', models.CharField(max_length=10, blank=True,\n default='')), ('course_instance', models.ForeignKey(to=\n 'course.CourseInstance')), ('user_profile', models.ForeignKey(to=\n 'userprofile.UserProfile'))], options={}, bases=(models.Model,)),\n migrations.AddField(model_name='courseinstance', name='students2',\n field=models.ManyToManyField(to='userprofile.UserProfile', through=\n 'course.Enrollment', related_name='enrolled', blank=True),\n preserve_default=True)]\n"
] | [
"from __future__ import unicode_literals\n",
"from django.db import models, migrations\n",
"dependencies = [('userprofile', '0002_auto_20150427_1717'), ('course',\n '0020_auto_20160615_1239')]\n",
"operations = [migrations.CreateModel(name='Enrollment', fields=[('id',\n models.AutoField(auto_created=True, serialize=False, primary_key=True,\n verbose_name='ID')), ('timestamp', models.DateTimeField(auto_now_add=\n True)), ('personal_code', models.CharField(max_length=10, blank=True,\n default='')), ('course_instance', models.ForeignKey(to=\n 'course.CourseInstance')), ('user_profile', models.ForeignKey(to=\n 'userprofile.UserProfile'))], options={}, bases=(models.Model,)),\n migrations.AddField(model_name='courseinstance', name='students2',\n field=models.ManyToManyField(to='userprofile.UserProfile', through=\n 'course.Enrollment', related_name='enrolled', blank=True),\n preserve_default=True)]\n"
] | [
0,
0,
0,
4
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'"
] |
[
"@property...\n",
"VAR_20 = self._get_access_form()\n",
"if VAR_20.is_valid():\n",
"VAR_20.save()\n",
"return False\n",
"return True\n"
] | [
"@property...\n",
"form = self._get_access_form()\n",
"if form.is_valid():\n",
"form.save()\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"VAR_6 = None\n",
"VAR_6 = tempfile.NamedTemporaryFile(delete=False)\n",
"if VAR_6:\n",
"VAR_6.close()\n",
"os.remove(VAR_6.name)\n",
"os.chmod(VAR_6.name, stat.S_IREAD)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn('data')\n",
"self._mox.ReplayAll()\n",
"self.assertFalse(url_helper.DownloadFile(VAR_6.name, 'http://www.fakeurl.com'))\n",
"self._mox.VerifyAll()\n"
] | [
"def testDownloadFileSavingErrors(self):...\n",
"file_readonly = None\n",
"file_readonly = tempfile.NamedTemporaryFile(delete=False)\n",
"if file_readonly:\n",
"file_readonly.close()\n",
"os.remove(file_readonly.name)\n",
"os.chmod(file_readonly.name, stat.S_IREAD)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn('data')\n",
"self._mox.ReplayAll()\n",
"self.assertFalse(url_helper.DownloadFile(file_readonly.name,\n 'http://www.fakeurl.com'))\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.before_request...\n",
"if request.method == 'POST':\n",
"VAR_13 = VAR_14.pop('_csrf_token', None)\n",
"if not VAR_13 or VAR_13 != request.form.get('_csrf_token'):\n",
"abort(403)\n",
"VAR_14['_csrf_token'] = FUNC_2()\n"
] | [
"@app.before_request...\n",
"if request.method == 'POST':\n",
"token = session.pop('_csrf_token', None)\n",
"if not token or token != request.form.get('_csrf_token'):\n",
"abort(403)\n",
"session['_csrf_token'] = generate_csrf_token()\n"
] | [
5,
0,
5,
5,
0,
0
] | [
"For",
"Condition",
"Assign'",
"For",
"Expr'",
"Assign'"
] |
[
"def __init__(self):...\n",
"self.initialised = False\n",
"self.board = None\n"
] | [
"def __init__(self):...\n",
"self.initialised = False\n",
"self.board = None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(VAR_5, VAR_6):...\n",
"return None\n"
] | [
"def fix_syntax_err(code, err):...\n",
"return None\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@classmethod...\n",
"VAR_7 = tournament.argument_parser()\n",
"VAR_6.parser = VAR_7\n"
] | [
"@classmethod...\n",
"parser = tournament.argument_parser()\n",
"cls.parser = parser\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual((), VAR_1)\n",
"self.uut.use_stderr = False\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), VAR_1)\n",
"def FUNC_9(VAR_2):...\n",
"assert VAR_2 == 'hello'\n",
"VAR_3 = self.uut.warn\n",
"self.uut.warn = FUNC_9\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = VAR_3\n"
] | [
"def test_stderr_output(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual((), out)\n",
"self.uut.use_stderr = False\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), out)\n",
"def assert_warn(line):...\n",
"assert line == 'hello'\n",
"old_warn = self.uut.warn\n",
"self.uut.warn = assert_warn\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = old_warn\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assert'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_7(self, VAR_8):...\n",
"VAR_9 = 'iSCSI target name is '\n",
"VAR_29 = self._get_prefixed_value(VAR_8, VAR_9)[:-1]\n",
"VAR_30 = '%s:%s,1 %s 0' % (self._group_ip, '3260', VAR_29)\n",
"VAR_31 = {}\n",
"VAR_31['provider_location'] = VAR_30\n",
"if self.configuration.eqlx_use_chap:\n",
"VAR_31['provider_auth'] = 'CHAP %s %s' % (self.configuration.\n eqlx_chap_login, self.configuration.eqlx_chap_password)\n",
"return VAR_31\n"
] | [
"def _get_volume_data(self, lines):...\n",
"prefix = 'iSCSI target name is '\n",
"target_name = self._get_prefixed_value(lines, prefix)[:-1]\n",
"lun_id = '%s:%s,1 %s 0' % (self._group_ip, '3260', target_name)\n",
"model_update = {}\n",
"model_update['provider_location'] = lun_id\n",
"if self.configuration.eqlx_use_chap:\n",
"model_update['provider_auth'] = 'CHAP %s %s' % (self.configuration.\n eqlx_chap_login, self.configuration.eqlx_chap_password)\n",
"return model_update\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist within the context.')\n",
"self.assertTrue(os.path.exists(fp.name) == False,\n 'Temporary file should not exist outside of the context.')\n"
] | [
"def test_temporary_file_no_args(self):...\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist within the context.')\n",
"self.assertTrue(os.path.exists(fp.name) == False,\n 'Temporary file should not exist outside of the context.')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_5, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 not in ('strings', 'files'):\n",
"VAR_0.error(\"Bad parameter '%s' for submit_type\", VAR_5)\n",
"VAR_13 = Folders.create_temp()\n",
"return False\n",
"VAR_14 = {'data': [], 'errors': []}\n",
"if VAR_5 == 'strings':\n",
"for VAR_4 in VAR_6:\n",
"if VAR_5 == 'files':\n",
"self._handle_string(VAR_14, VAR_13, VAR_4)\n",
"for entry in VAR_6:\n",
"return Database().add_submit(VAR_13, VAR_5, VAR_14)\n",
"VAR_22 = Storage.get_filename_from_path(entry['name'])\n",
"VAR_18 = Files.create(VAR_13, VAR_22, entry['data'])\n",
"VAR_14['data'].append({'type': 'file', 'data': VAR_18})\n"
] | [
"def pre(self, submit_type, data):...\n",
"\"\"\"docstring\"\"\"\n",
"if submit_type not in ('strings', 'files'):\n",
"log.error(\"Bad parameter '%s' for submit_type\", submit_type)\n",
"path_tmp = Folders.create_temp()\n",
"return False\n",
"submit_data = {'data': [], 'errors': []}\n",
"if submit_type == 'strings':\n",
"for line in data:\n",
"if submit_type == 'files':\n",
"self._handle_string(submit_data, path_tmp, line)\n",
"for entry in data:\n",
"return Database().add_submit(path_tmp, submit_type, submit_data)\n",
"filename = Storage.get_filename_from_path(entry['name'])\n",
"filepath = Files.create(path_tmp, filename, entry['data'])\n",
"submit_data['data'].append({'type': 'file', 'data': filepath})\n"
] | [
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Condition",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_21(self, VAR_9=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(VAR_13.PROCESS_TYPE_RAYLET_MONITOR, VAR_9=check_alive)\n"
] | [
"def kill_raylet_monitor(self, check_alive=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(ray_constants.PROCESS_TYPE_RAYLET_MONITOR,\n check_alive=check_alive)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@VAR_0.route('/')...\n",
"if not session.get('logged_in'):\n",
"return redirect(url_for('users.login'))\n",
"VAR_1 = session['logged_id']\n",
"VAR_2 = 'SELECT * FROM message where user_id = %d ORDER BY c_time DESC' % VAR_1\n",
"cursor.execute(VAR_2)\n",
"VAR_3 = cursor.fetchall()\n",
"VAR_4 = list(VAR_3)\n",
"for VAR_8, VAR_5 in enumerate(VAR_4):\n",
"VAR_5 = list(VAR_5)\n",
"return render_template('show_entries.html', entries=messages)\n",
"VAR_1 = VAR_5[1]\n",
"VAR_2 = 'SELECT nickname FROM users where user_id = %d' % VAR_1\n",
"cursor.execute(VAR_2)\n",
"VAR_6 = cursor.fetchone()\n",
"VAR_5.append(VAR_6[0])\n",
"VAR_2 = 'SELECT * FROM like_msg where msg_id = %d AND user_id = %d' % (VAR_5\n [0], VAR_1)\n",
"cursor.execute(VAR_2)\n",
"VAR_7 = cursor.fetchone()\n",
"if VAR_7 is not None:\n",
"VAR_9 = 1\n",
"VAR_9 = 0\n",
"VAR_5.append(VAR_9)\n",
"VAR_4[VAR_8] = VAR_5\n"
] | [
"@app.route('/')...\n",
"if not session.get('logged_in'):\n",
"return redirect(url_for('users.login'))\n",
"user_id = session['logged_id']\n",
"sql = 'SELECT * FROM message where user_id = %d ORDER BY c_time DESC' % user_id\n",
"cursor.execute(sql)\n",
"m = cursor.fetchall()\n",
"messages = list(m)\n",
"for i, message in enumerate(messages):\n",
"message = list(message)\n",
"return render_template('show_entries.html', entries=messages)\n",
"user_id = message[1]\n",
"sql = 'SELECT nickname FROM users where user_id = %d' % user_id\n",
"cursor.execute(sql)\n",
"u = cursor.fetchone()\n",
"message.append(u[0])\n",
"sql = 'SELECT * FROM like_msg where msg_id = %d AND user_id = %d' % (message\n [0], user_id)\n",
"cursor.execute(sql)\n",
"like = cursor.fetchone()\n",
"if like is not None:\n",
"like_flag = 1\n",
"like_flag = 0\n",
"message.append(like_flag)\n",
"messages[i] = message\n"
] | [
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.output = VAR_69, VAR_70\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.output = paths, kwpaths\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = self.stdout_interceptor.flush_all()\n",
"return\n",
"if len(VAR_4) > 0:\n",
"self.body.append(render_texts.preformatted_text(VAR_4))\n",
"return VAR_4\n",
"self._last_update_time = time.time()\n"
] | [
"def flush_stdout(self):...\n",
"\"\"\"docstring\"\"\"\n",
"contents = self.stdout_interceptor.flush_all()\n",
"return\n",
"if len(contents) > 0:\n",
"self.body.append(render_texts.preformatted_text(contents))\n",
"return contents\n",
"self._last_update_time = time.time()\n"
] | [
0,
0,
6,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Return'",
"Assign'"
] |
[
"def FUNC_4(self, VAR_9, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def remove_export(self, context, volume):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_13(self, VAR_13):...\n",
"if VAR_13:\n",
"self._update_backend_status()\n",
"return self.device_stats\n"
] | [
"def get_volume_stats(self, refresh):...\n",
"if refresh:\n",
"self._update_backend_status()\n",
"return self.device_stats\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"@then(parsers.parse(...\n",
"VAR_6 = kube_utils.get_pods(VAR_0, VAR_4, VAR_2)\n",
"assert len(VAR_6\n ) == 1, 'Expected one (and only one) pod with label {l}, found {f}'.format(\n l=label, f=len(candidates))\n",
"VAR_7 = VAR_6[0]\n",
"VAR_8 = ' '.join(['kubectl', '--kubeconfig=/etc/kubernetes/admin.conf',\n 'exec', '--namespace {0}'.format(VAR_2), VAR_7['metadata']['name'], VAR_3])\n",
"VAR_0.check_output(VAR_8)\n"
] | [
"@then(parsers.parse(...\n",
"candidates = kube_utils.get_pods(host, label, namespace)\n",
"assert len(candidates\n ) == 1, 'Expected one (and only one) pod with label {l}, found {f}'.format(\n l=label, f=len(candidates))\n",
"pod = candidates[0]\n",
"cmd = ' '.join(['kubectl', '--kubeconfig=/etc/kubernetes/admin.conf',\n 'exec', '--namespace {0}'.format(namespace), pod['metadata']['name'],\n command])\n",
"host.check_output(cmd)\n"
] | [
0,
0,
0,
0,
2,
2
] | [
"Condition",
"Assign'",
"Assert'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return FUNC_4(VAR_1, VAR_2['oauth_token'], VAR_6=resp['oauth_token_secret'],\n VAR_4=extra_data, VAR_3=token_type)\n"
] | [
"def oauth1_token_setter(remote, resp, token_type='', extra_data=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return token_setter(remote, resp['oauth_token'], secret=resp[\n 'oauth_token_secret'], extra_data=extra_data, token_type=token_type)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_3 = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"VAR_1 = {'name': self.volume_name}\n",
"self.driver._eql_execute('volume', 'select', VAR_3['volume_name'],\n 'snapshot', 'select', VAR_3['name'], 'clone', VAR_1['name']).AndReturn([\n 'iSCSI target name is %s.' % self.fake_iqn])\n",
"self.mox.ReplayAll()\n",
"VAR_2 = self.driver.create_volume_from_snapshot(VAR_1, VAR_3)\n",
"self.assertEqual(VAR_2, self._model_update)\n"
] | [
"def test_create_volume_from_snapshot(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"snapshot = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"volume = {'name': self.volume_name}\n",
"self.driver._eql_execute('volume', 'select', snapshot['volume_name'],\n 'snapshot', 'select', snapshot['name'], 'clone', volume['name']).AndReturn(\n ['iSCSI target name is %s.' % self.fake_iqn])\n",
"self.mox.ReplayAll()\n",
"model_update = self.driver.create_volume_from_snapshot(volume, snapshot)\n",
"self.assertEqual(model_update, self._model_update)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7():...\n",
"populate_test_database()\n",
"FUNC_1('first playlist')\n",
"FUNC_0(1, 'the title of the video', 'the url of the video', 1)\n",
"FUNC_0(1, 'the title of the video', 'the url of the video', 2)\n",
"VAR_11 = VAR_3.delete('/playlists/1')\n",
"assert VAR_11.json['status'] == 'OK'\n",
"VAR_12 = VAR_3.get('/playlists/1')\n",
"assert VAR_12.json['status'] == 'OK'\n",
"assert VAR_12.json['data'] == None\n",
"VAR_13 = VAR_3.get('/videos/1')\n",
"assert VAR_13.json['status'] == 'OK'\n",
"assert VAR_13.json['data'] == []\n"
] | [
"def test_should_delete_a_playlist_and_remove_all_its_videos():...\n",
"populate_test_database()\n",
"create_playlist('first playlist')\n",
"create_video(1, 'the title of the video', 'the url of the video', 1)\n",
"create_video(1, 'the title of the video', 'the url of the video', 2)\n",
"response = test_app.delete('/playlists/1')\n",
"assert response.json['status'] == 'OK'\n",
"response2 = test_app.get('/playlists/1')\n",
"assert response2.json['status'] == 'OK'\n",
"assert response2.json['data'] == None\n",
"response3 = test_app.get('/videos/1')\n",
"assert response3.json['status'] == 'OK'\n",
"assert response3.json['data'] == []\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"from flask import request\n",
"from api import *\n",
"import faf.db as db\n",
"VAR_0 = 'string'\n",
"@app.route('/achievements')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = request.args.get('language', 'en')\n",
"VAR_6 = request.args.get('region', 'US')\n",
"VAR_11 = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"VAR_11.execute(VAR_0 + ' ORDER BY `order` ASC', {'language': VAR_5,\n 'region': VAR_6})\n",
"return flask.jsonify(items=cursor.fetchall())\n"
] | [
"from flask import request\n",
"from api import *\n",
"import faf.db as db\n",
"SELECT_ACHIEVEMENTS_QUERY = \"\"\"SELECT\n ach.id,\n ach.type,\n ach.total_steps,\n ach.revealed_icon_url,\n ach.unlocked_icon_url,\n ach.initial_state,\n ach.experience_points,\n COALESCE(name_langReg.value, name_lang.value, name_def.value) as name,\n COALESCE(desc_langReg.value, desc_lang.value, desc_def.value) as description\n FROM achievement_definitions ach\n LEFT OUTER JOIN messages name_langReg\n ON ach.name_key = name_langReg.key\n AND name_langReg.language = %(language)s\n AND name_langReg.region = %(region)s\n LEFT OUTER JOIN messages name_lang\n ON ach.name_key = name_lang.key\n AND name_lang.language = %(language)s\n LEFT OUTER JOIN messages name_def\n ON ach.name_key = name_def.key\n AND name_def.language = 'en'\n AND name_def.region = 'US'\n LEFT OUTER JOIN messages desc_langReg\n ON ach.description_key = desc_langReg.key\n AND desc_langReg.language = %(language)s\n AND desc_langReg.region = %(region)s\n LEFT OUTER JOIN messages desc_lang\n ON ach.description_key = desc_lang.key\n AND desc_lang.language = %(language)s\n LEFT OUTER JOIN messages desc_def\n ON ach.description_key = desc_def.key\n AND desc_def.language = 'en'\n AND desc_def.region = 'US'\"\"\"\n",
"@app.route('/achievements')...\n",
"\"\"\"docstring\"\"\"\n",
"language = request.args.get('language', 'en')\n",
"region = request.args.get('region', 'US')\n",
"cursor = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"cursor.execute(SELECT_ACHIEVEMENTS_QUERY + ' ORDER BY `order` ASC', {\n 'language': language, 'region': region})\n",
"return flask.jsonify(items=cursor.fetchall())\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_26(self):...\n",
"VAR_3 = '/afterlogin/'\n",
"VAR_8 = self.add_outstanding_query(VAR_3)\n",
"VAR_31 = self.app.dispatch_request()\n",
"self.assertEqual(VAR_31.status, '302 FOUND')\n",
"self.assertIn('testing-relay-state', VAR_31.location)\n"
] | [
"def test_logout_service_startingSP_already_logout(self):...\n",
"came_from = '/afterlogin/'\n",
"session_id = self.add_outstanding_query(came_from)\n",
"response = self.app.dispatch_request()\n",
"self.assertEqual(response.status, '302 FOUND')\n",
"self.assertIn('testing-relay-state', response.location)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1[0] = VAR_1[1]\n"
] | [
"def p_elem(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t[0] = t[1]\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def __init__(self, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8='id'):...\n",
"self.__parent__ = VAR_4\n",
"self.factory = VAR_7\n",
"self.object_name = VAR_6\n",
"self.__name__ = VAR_5\n",
"self.id_key = VAR_8\n"
] | [
"def __init__(self, parent, traversal_name, object_name, factory, id_key='id'):...\n",
"self.__parent__ = parent\n",
"self.factory = factory\n",
"self.object_name = object_name\n",
"self.__name__ = traversal_name\n",
"self.id_key = id_key\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_9(self):...\n",
"self.db.close()\n"
] | [
"def close(self):...\n",
"self.db.close()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0: DirectoryEntry, VAR_1: Dict) ->ScanResult:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6, VAR_23 = FUNC_3(VAR_0.landing_page_url)\n",
"return ScanResult(VAR_0=securedrop, live=pshtt_results['Live'],\n http_status_200_ok=False)\n",
"return ScanResult(landing_page_url=securedrop.landing_page_url, live=\n pshtt_results['Live'], http_status_200_ok=validate_200_ok(\n no_redirects_page), forces_https=bool(pshtt_results[\n 'Strictly Forces HTTPS']), hsts=pshtt_results['HSTS'], hsts_max_age=\n validate_hsts_max_age(pshtt_results['HSTS Max Age']),\n hsts_entire_domain=validate_hsts_entire_domain(pshtt_results[\n 'HSTS Entire Domain']), hsts_preloaded=pshtt_results['HSTS Preloaded'],\n subdomain=validate_subdomain(securedrop.landing_page_url), no_cookies=\n validate_no_cookies(page), safe_onion_address=\n validate_onion_address_not_in_href(soup), no_cdn=validate_not_using_cdn\n (page), http_no_redirect=validate_no_redirects(no_redirects_page),\n expected_encoding=validate_encoding(page), no_analytics=\n validate_not_using_analytics(page), no_server_info=\n validate_server_software(page), no_server_version=\n validate_server_version(page), csp_origin_only=validate_csp(page),\n mime_sniffing_blocked=validate_no_sniff(page), noopen_download=\n validate_download_options(page), xss_protection=validate_xss_protection\n (page), clickjacking_protection=validate_clickjacking_protection(page),\n good_cross_domain_policy=validate_cross_domain_policy(page),\n http_1_0_caching_disabled=validate_pragma(page), expires_set=\n validate_expires(page), cache_control_set=validate_cache_control_set(\n page), cache_control_revalidate_set=validate_cache_must_revalidate(page\n ), cache_control_nocache_set=validate_nocache(page),\n cache_control_notransform_set=validate_notransform(page),\n cache_control_nostore_set=validate_nostore(page),\n cache_control_private_set=validate_private(page),\n referrer_policy_set_to_no_referrer=validate_no_referrer_policy(page))\n",
"VAR_24, VAR_25 = FUNC_3(VAR_0.landing_page_url, VAR_5=False)\n"
] | [
"def pshtt_data_to_result(securedrop: DirectoryEntry, pshtt_results: Dict...\n",
"\"\"\"docstring\"\"\"\n",
"page, soup = request_and_scrape_page(securedrop.landing_page_url)\n",
"return ScanResult(securedrop=securedrop, live=pshtt_results['Live'],\n http_status_200_ok=False)\n",
"return ScanResult(landing_page_url=securedrop.landing_page_url, live=\n pshtt_results['Live'], http_status_200_ok=validate_200_ok(\n no_redirects_page), forces_https=bool(pshtt_results[\n 'Strictly Forces HTTPS']), hsts=pshtt_results['HSTS'], hsts_max_age=\n validate_hsts_max_age(pshtt_results['HSTS Max Age']),\n hsts_entire_domain=validate_hsts_entire_domain(pshtt_results[\n 'HSTS Entire Domain']), hsts_preloaded=pshtt_results['HSTS Preloaded'],\n subdomain=validate_subdomain(securedrop.landing_page_url), no_cookies=\n validate_no_cookies(page), safe_onion_address=\n validate_onion_address_not_in_href(soup), no_cdn=validate_not_using_cdn\n (page), http_no_redirect=validate_no_redirects(no_redirects_page),\n expected_encoding=validate_encoding(page), no_analytics=\n validate_not_using_analytics(page), no_server_info=\n validate_server_software(page), no_server_version=\n validate_server_version(page), csp_origin_only=validate_csp(page),\n mime_sniffing_blocked=validate_no_sniff(page), noopen_download=\n validate_download_options(page), xss_protection=validate_xss_protection\n (page), clickjacking_protection=validate_clickjacking_protection(page),\n good_cross_domain_policy=validate_cross_domain_policy(page),\n http_1_0_caching_disabled=validate_pragma(page), expires_set=\n validate_expires(page), cache_control_set=validate_cache_control_set(\n page), cache_control_revalidate_set=validate_cache_must_revalidate(page\n ), cache_control_nocache_set=validate_nocache(page),\n cache_control_notransform_set=validate_notransform(page),\n cache_control_nostore_set=validate_nostore(page),\n cache_control_private_set=validate_private(page),\n referrer_policy_set_to_no_referrer=validate_no_referrer_policy(page))\n",
"no_redirects_page, _ = request_and_scrape_page(securedrop.landing_page_url,\n allow_redirects=False)\n"
] | [
0,
0,
6,
6,
6,
6
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Return'",
"Assign'"
] |
[
"def __init__(self):...\n",
"self.order = list()\n"
] | [
"def __init__(self):...\n",
"self.order = list()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_8(self, VAR_9):...\n",
"if isinstance(VAR_9, str):\n",
"VAR_25 = json.loads(VAR_9)\n",
"return VAR_9\n",
"VAR_9 = [self.base_field.to_python(val) for val in VAR_25]\n"
] | [
"def to_python(self, value):...\n",
"if isinstance(value, str):\n",
"vals = json.loads(value)\n",
"return value\n",
"value = [self.base_field.to_python(val) for val in vals]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Assign'"
] |
[
"def __call__(self, VAR_2, VAR_3, VAR_4=None):...\n",
"if not VAR_3.strip():\n",
"return\n",
"VAR_90, VAR_47, VAR_91 = self._parse_instruction(VAR_3)\n",
"self.reply(VAR_2,\n 'could not parse your request: {}. please use format: poly1 mod poly2 in GF(p)[x]'\n .format(VAR_55))\n",
"VAR_73, VAR_104 = divmod(VAR_90, VAR_91)\n",
"self.reply(VAR_2, 'division by zero')\n",
"self.reply(VAR_2, '{a} // {b} = {d}; remainder: {r}'.format(a=p1, b=p2,\n VAR_73=d, VAR_104=r))\n",
"return\n",
"return\n"
] | [
"def __call__(self, msg, arguments, errorSink=None):...\n",
"if not arguments.strip():\n",
"return\n",
"p1, _, p2 = self._parse_instruction(arguments)\n",
"self.reply(msg,\n 'could not parse your request: {}. please use format: poly1 mod poly2 in GF(p)[x]'\n .format(err))\n",
"d, r = divmod(p1, p2)\n",
"self.reply(msg, 'division by zero')\n",
"self.reply(msg, '{a} // {b} = {d}; remainder: {r}'.format(a=p1, b=p2, d=d, r=r)\n )\n",
"return\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_6(self, VAR_41):...\n",
"VAR_79 = CLASS_24.run(self, VAR_41)\n",
"if VAR_79:\n",
"if not VAR_79._loaded:\n",
"return self.error(errors.NOT_AUTHOR)\n",
"VAR_79._load()\n",
"if VAR_101.user_is_loggedin and VAR_79.author_id == VAR_101.user._id:\n",
"return VAR_79\n"
] | [
"def run(self, fullname):...\n",
"thing = VByName.run(self, fullname)\n",
"if thing:\n",
"if not thing._loaded:\n",
"return self.error(errors.NOT_AUTHOR)\n",
"thing._load()\n",
"if c.user_is_loggedin and thing.author_id == c.user._id:\n",
"return thing\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Condition",
"Return'"
] |
[
"@staticmethod...\n",
"return '/'.join(VAR_3) or '/'\n"
] | [
"@staticmethod...\n",
"return '/'.join(path) or '/'\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"import logging\n",
"import json\n",
"import os\n",
"from os.path import exists, join, isdir\n",
"from shutil import rmtree\n",
"from subprocess import call, PIPE, Popen, CalledProcessError, run\n",
"from urllib.parse import urlparse\n",
"import shutil\n",
"import xml.etree.ElementTree as ElementTree\n",
"from quark.utils import DirectoryContext as cd, fork, log_check_output\n",
"from quark.utils import freeze_file, dependency_file, mkdir, load_conf\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def FUNC_0(VAR_1, VAR_2=True):...\n",
"if exists(join(VAR_1, '.svn')):\n",
"VAR_24 = CLASS_3\n",
"if exists(join(VAR_1, '.git')):\n",
"return VAR_24.url_from_directory(VAR_1, VAR_2)\n",
"VAR_24 = CLASS_2\n"
] | [
"import logging\n",
"import json\n",
"import os\n",
"from os.path import exists, join, isdir\n",
"from shutil import rmtree\n",
"from subprocess import call, PIPE, Popen, CalledProcessError, run\n",
"from urllib.parse import urlparse\n",
"import shutil\n",
"import xml.etree.ElementTree as ElementTree\n",
"from quark.utils import DirectoryContext as cd, fork, log_check_output\n",
"from quark.utils import freeze_file, dependency_file, mkdir, load_conf\n",
"logger = logging.getLogger(__name__)\n",
"def url_from_directory(directory, include_commit=True):...\n",
"if exists(join(directory, '.svn')):\n",
"cls = SvnSubproject\n",
"if exists(join(directory, '.git')):\n",
"return cls.url_from_directory(directory, include_commit)\n",
"cls = GitSubproject\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'"
] |
[
"def FUNC_20(self, VAR_28, **VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"self.abort(VAR_28, json=kwargs, VAR_29={'Content-Type': self.CONTENT_TYPE_FULL}\n )\n"
] | [
"def abort_with_error(self, http_code, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"self.abort(http_code, json=kwargs, headers={'Content-Type': self.\n CONTENT_TYPE_FULL})\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_1(VAR_4):...\n",
"return '.' in VAR_4 and VAR_4.rsplit('.', 1)[1] in VAR_1.config[\n 'ALLOWED_EXTENSIONS']\n"
] | [
"def allowed_file(filename):...\n",
"return '.' in filename and filename.rsplit('.', 1)[1] in app.config[\n 'ALLOWED_EXTENSIONS']\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_5, VAR_6):...\n",
"self._zinc_factory = VAR_5\n",
"self._products = VAR_6\n"
] | [
"def __init__(self, zinc_factory, products):...\n",
"self._zinc_factory = zinc_factory\n",
"self._products = products\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'PLANCK', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='PLANK', VAR_10=[{'name': '----PLANK'}, {'name':\n 'PLANCK'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'PLANCK', id='1')\n",
"verify_results(client, jwt, query='PLANK', expected=[{'name': '----PLANK'},\n {'name': 'PLANCK'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"return 'load statement for task %s (%s)' % (self.task_id, self.url)\n"
] | [
"def describe(self):...\n",
"return 'load statement for task %s (%s)' % (self.task_id, self.url)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(self, VAR_3):...\n",
"if VAR_3.name not in self.host_names:\n",
"self.hosts.append(VAR_3)\n",
"self._hosts.add(VAR_3.name)\n",
"VAR_3.add_group(self)\n",
"self.clear_hosts_cache()\n"
] | [
"def add_host(self, host):...\n",
"if host.name not in self.host_names:\n",
"self.hosts.append(host)\n",
"self._hosts.add(host.name)\n",
"host.add_group(self)\n",
"self.clear_hosts_cache()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@app.route('/<issue>/articles/new', methods=['GET'])...\n",
""
] | [
"@app.route('/<issue>/articles/new', methods=['GET'])...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_29(self):...\n",
"VAR_49 = re.split(self.dynamic_fill, self.file)[0]\n",
"VAR_50 = os.path.dirname(VAR_49)\n",
"if len(VAR_50) > 0 and not os.path.exists(VAR_50):\n",
"os.makedirs(VAR_50)\n",
"if e.errno != 17:\n"
] | [
"def prepare(self):...\n",
"path_until_wildcard = re.split(self.dynamic_fill, self.file)[0]\n",
"dir = os.path.dirname(path_until_wildcard)\n",
"if len(dir) > 0 and not os.path.exists(dir):\n",
"os.makedirs(dir)\n",
"if e.errno != 17:\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition"
] |
[
"def FUNC_11(self):...\n",
"return u'Array({})'.format(self.inner_type.for_schema())\n"
] | [
"def for_schema(self):...\n",
"return u'Array({})'.format(self.inner_type.for_schema())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_16 = self.mox.CreateMock(paramiko.SSHClient)\n",
"VAR_12 = self.mox.CreateMock(paramiko.Channel)\n",
"VAR_17 = self.mox.CreateMock(paramiko.Transport)\n",
"self.mox.StubOutWithMock(self.driver, '_get_output')\n",
"self.mox.StubOutWithMock(VAR_12, 'invoke_shell')\n",
"VAR_15 = ['NoError: test run']\n",
"VAR_16.get_transport().AndReturn(VAR_17)\n",
"VAR_17.open_session().AndReturn(VAR_12)\n",
"VAR_12.invoke_shell()\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_18 = 'this is dummy command'\n",
"VAR_12.send('stty columns 255' + '\\r')\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_12.send(VAR_18 + '\\r')\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_12.close()\n",
"self.mox.ReplayAll()\n",
"self.assertEqual(self.driver._ssh_execute(VAR_16, VAR_18), VAR_15)\n"
] | [
"def test_ssh_execute(self):...\n",
"ssh = self.mox.CreateMock(paramiko.SSHClient)\n",
"chan = self.mox.CreateMock(paramiko.Channel)\n",
"transport = self.mox.CreateMock(paramiko.Transport)\n",
"self.mox.StubOutWithMock(self.driver, '_get_output')\n",
"self.mox.StubOutWithMock(chan, 'invoke_shell')\n",
"expected_output = ['NoError: test run']\n",
"ssh.get_transport().AndReturn(transport)\n",
"transport.open_session().AndReturn(chan)\n",
"chan.invoke_shell()\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"cmd = 'this is dummy command'\n",
"chan.send('stty columns 255' + '\\r')\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"chan.send(cmd + '\\r')\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"chan.close()\n",
"self.mox.ReplayAll()\n",
"self.assertEqual(self.driver._ssh_execute(ssh, cmd), expected_output)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_5):...\n",
"return VAR_5.has_sources(extension=tuple(self._native_source_extensions))\n"
] | [
"def pydist_has_native_sources(self, target):...\n",
"return target.has_sources(extension=tuple(self._native_source_extensions))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from bs4 import BeautifulSoup\n",
"from bs4.element import NavigableString, Tag\n",
"import requests\n",
"import time\n",
"def __init__(self, VAR_0):...\n",
"self.MAX_P_CHECKS = 5\n",
"self.MAX_CRAWLS = 1\n",
"self.MAX_PATH_LENGTH = 50\n",
"self.TARGET = 'Philosophy'\n",
"self.DOMAIN = 'https://en.wikipedia.org'\n",
"self.start_wiki = 'Special:Random' if not VAR_0 else VAR_0\n",
"self.path_lengths = []\n",
"self.wiki_to_target_length = {}\n",
"self.completed_path = 0\n",
"self.invalid_path = 0\n",
"def FUNC_0(self, VAR_1, VAR_2):...\n",
"if VAR_2:\n",
"VAR_15 = self.DOMAIN + '/wiki/' + VAR_1\n",
"VAR_15 = self.DOMAIN + VAR_1\n",
"return VAR_15\n"
] | [
"from bs4 import BeautifulSoup\n",
"from bs4.element import NavigableString, Tag\n",
"import requests\n",
"import time\n",
"def __init__(self, wiki):...\n",
"self.MAX_P_CHECKS = 5\n",
"self.MAX_CRAWLS = 1\n",
"self.MAX_PATH_LENGTH = 50\n",
"self.TARGET = 'Philosophy'\n",
"self.DOMAIN = 'https://en.wikipedia.org'\n",
"self.start_wiki = 'Special:Random' if not wiki else wiki\n",
"self.path_lengths = []\n",
"self.wiki_to_target_length = {}\n",
"self.completed_path = 0\n",
"self.invalid_path = 0\n",
"def build_url(self, wiki_topic, add_wiki_text):...\n",
"if add_wiki_text:\n",
"url = self.DOMAIN + '/wiki/' + wiki_topic\n",
"url = self.DOMAIN + wiki_topic\n",
"return url\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"@classmethod...\n",
"return '-Werror',\n"
] | [
"@classmethod...\n",
"return '-Werror',\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"from django.http import Http404\n",
"from django.views.generic.simple import direct_to_template\n",
"from oi.projects.models import Project, OINeedsPrjPerms\n",
"from oi.helpers import OI_READ\n",
"def FUNC_0(VAR_0, VAR_1):...\n",
"VAR_2 = Project.objects.get(VAR_1=id)\n",
"if not VAR_2.has_perm(VAR_0.user, OI_READ):\n",
"return direct_to_template(VAR_0, template='funding/feature.html',\n extra_context={'object': task.master, 'task': task})\n"
] | [
"from django.http import Http404\n",
"from django.views.generic.simple import direct_to_template\n",
"from oi.projects.models import Project, OINeedsPrjPerms\n",
"from oi.helpers import OI_READ\n",
"def get_feature(request, id):...\n",
"task = Project.objects.get(id=id)\n",
"if not task.has_perm(request.user, OI_READ):\n",
"return direct_to_template(request, template='funding/feature.html',\n extra_context={'object': task.master, 'task': task})\n"
] | [
0,
0,
6,
6,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_1(self, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"return datetime.utcnow() > VAR_5 + self.expires\n"
] | [
"def has_expired(self, timestamp):...\n",
"\"\"\"docstring\"\"\"\n",
"return datetime.utcnow() > timestamp + self.expires\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@route('POST', '/-test/fail', VAR_1=website)...\n",
"VAR_16 = get_http_argument('message')\n",
"VAR_0.error(VAR_16)\n",
"get_executing_test().error = VAR_16\n"
] | [
"@route('POST', '/-test/fail', website=website)...\n",
"message = get_http_argument('message')\n",
"LOGGER.error(message)\n",
"get_executing_test().error = message\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_3(self):...\n",
"VAR_2 = self.connect()\n",
"VAR_3 = 'DELETE FROM crimes;'\n",
"VAR_2.close()\n",
"cursor.execute(VAR_3)\n",
"VAR_2.commit()\n"
] | [
"def clear_all(self):...\n",
"connection = self.connect()\n",
"query = 'DELETE FROM crimes;'\n",
"connection.close()\n",
"cursor.execute(query)\n",
"connection.commit()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@api.public...\n",
"self.response.write(json.dumps({'peer_id': api.get_peer_identity().to_bytes\n (), 'cur_id': api.get_current_identity().to_bytes()}))\n"
] | [
"@api.public...\n",
"self.response.write(json.dumps({'peer_id': api.get_peer_identity().to_bytes\n (), 'cur_id': api.get_current_identity().to_bytes()}))\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_29(self, VAR_10=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_10 is None:\n",
"VAR_10 = flask.request\n",
"VAR_33 = urlparse.urlparse(VAR_10.url)\n",
"if VAR_10.scheme == 'https':\n",
"VAR_44 = 'on'\n",
"if app.debug and app.config['SAML_FAKE_HTTPS']:\n",
"return {'https': VAR_44, 'http_host': VAR_10.host, 'server_port': VAR_33.\n port, 'script_name': VAR_10.path, 'get_data': VAR_10.args.copy(),\n 'post_data': VAR_10.form.copy()}\n",
"VAR_44 = 'on'\n",
"VAR_44 = 'off'\n"
] | [
"def _saml_req_dict_from_request(self, flask_request=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if flask_request is None:\n",
"flask_request = flask.request\n",
"url_data = urlparse.urlparse(flask_request.url)\n",
"if flask_request.scheme == 'https':\n",
"https = 'on'\n",
"if app.debug and app.config['SAML_FAKE_HTTPS']:\n",
"return {'https': https, 'http_host': flask_request.host, 'server_port':\n url_data.port, 'script_name': flask_request.path, 'get_data':\n flask_request.args.copy(), 'post_data': flask_request.form.copy()}\n",
"https = 'on'\n",
"https = 'off'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"@wraps(VAR_6)...\n",
"if 'logged_in' in VAR_10:\n",
"return VAR_6(*VAR_8, **kwargs)\n",
"flash('Unauthorized, Please login', 'danger')\n",
"return redirect(url_for('login'))\n"
] | [
"@wraps(f)...\n",
"if 'logged_in' in session:\n",
"return f(*args, **kwargs)\n",
"flash('Unauthorized, Please login', 'danger')\n",
"return redirect(url_for('login'))\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_22 = os.path.join(tempdir, 'foo')\n",
"os.symlink(not_zip.name, VAR_22)\n",
"self.assertEqual(os.path.realpath(VAR_22), os.path.realpath(not_zip.name))\n",
"next(open_zip(VAR_22).gen)\n"
] | [
"def test_open_zip_returns_realpath_on_badzipfile(self):...\n",
"file_symlink = os.path.join(tempdir, 'foo')\n",
"os.symlink(not_zip.name, file_symlink)\n",
"self.assertEqual(os.path.realpath(file_symlink), os.path.realpath(not_zip.name)\n )\n",
"next(open_zip(file_symlink).gen)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.route('/search', methods=['GET', 'POST'])...\n",
"if request.method == 'GET':\n",
"return render_template('search-form.html')\n",
"VAR_2 = request.form['keywords']\n",
"VAR_3 = list(request.form.keys())\n",
"VAR_3.remove('keywords')\n",
"VAR_4 = generic_search(VAR_2, VAR_3, VAR_0, g)\n",
"return jsonify(VAR_4)\n"
] | [
"@app.route('/search', methods=['GET', 'POST'])...\n",
"if request.method == 'GET':\n",
"return render_template('search-form.html')\n",
"keywords = request.form['keywords']\n",
"tables = list(request.form.keys())\n",
"tables.remove('keywords')\n",
"data = generic_search(keywords, tables, app, g)\n",
"return jsonify(data)\n"
] | [
0,
0,
0,
0,
0,
0,
4,
4
] | [
"Condition",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"from flask import Flask, render_template, request, current_app, g\n",
"from indic_transliteration import sanscript\n",
"from indic_transliteration.sanscript import SchemeMap, SCHEMES, transliterate\n",
"import random\n",
"import sqlite3 as sql\n",
"import re\n",
"VAR_0 = Flask(__name__, static_url_path='', static_folder='static')\n",
"@VAR_0.route('/')...\n",
"VAR_1 = ['स्वर्गवर्गः', 'व्योमवर्गः', 'दिग्वर्गः', 'कालवर्गः', 'धीवर्गः',\n 'शब्दादिवर्गः', 'नाट्यवर्गः', 'पातालभोगिवर्गः', 'नरकवर्गः', 'वारिवर्गः',\n 'भूमिवर्गः', 'पुरवर्गः', 'शैलवर्गः', 'वनौषधिवर्गः', 'सिंहादिवर्गः',\n 'मनुष्यवर्गः', 'ब्रह्मवर्गः', 'क्षत्रियवर्गः', 'वैश्यवर्गः',\n 'शूद्रवर्गः', 'विशेष्यनिघ्नवर्गः', 'सङ्कीर्णवर्गः', 'विशेष्यनिघ्नवर्गः',\n 'सङ्कीर्णवर्गः', 'नानार्थवर्गः', 'अव्ययवर्गः']\n",
"return render_template('index.html', VAR_1=all_vargas)\n"
] | [
"from flask import Flask, render_template, request, current_app, g\n",
"from indic_transliteration import sanscript\n",
"from indic_transliteration.sanscript import SchemeMap, SCHEMES, transliterate\n",
"import random\n",
"import sqlite3 as sql\n",
"import re\n",
"app = Flask(__name__, static_url_path='', static_folder='static')\n",
"@app.route('/')...\n",
"all_vargas = ['स्वर्गवर्गः', 'व्योमवर्गः', 'दिग्वर्गः', 'कालवर्गः',\n 'धीवर्गः', 'शब्दादिवर्गः', 'नाट्यवर्गः', 'पातालभोगिवर्गः', 'नरकवर्गः',\n 'वारिवर्गः', 'भूमिवर्गः', 'पुरवर्गः', 'शैलवर्गः', 'वनौषधिवर्गः',\n 'सिंहादिवर्गः', 'मनुष्यवर्गः', 'ब्रह्मवर्गः', 'क्षत्रियवर्गः',\n 'वैश्यवर्गः', 'शूद्रवर्गः', 'विशेष्यनिघ्नवर्गः', 'सङ्कीर्णवर्गः',\n 'विशेष्यनिघ्नवर्गः', 'सङ्कीर्णवर्गः', 'नानार्थवर्गः', 'अव्ययवर्गः']\n",
"return render_template('index.html', all_vargas=all_vargas)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_9(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.ImageEnv(VAR_1.value)\n",
"return VAR_1\n"
] | [
"def t_IMGENV(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.ImageEnv(t.value)\n",
"return t\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"VAR_0 = pd.read_csv('Social_Network_Ads.csv')\n",
"VAR_1 = VAR_0.iloc[:, ([2, 3])].values\n",
"VAR_2 = VAR_0.iloc[:, (4)].values\n",
"from sklearn.model_selection import train_test_split\n",
"VAR_3, VAR_4, VAR_5, VAR_6 = train_test_split(VAR_1, VAR_2, test_size=0.25,\n random_state=0)\n",
"from sklearn.preprocessing import StandardScaler\n",
"VAR_7 = StandardScaler()\n",
"VAR_3 = VAR_7.fit_transform(VAR_3)\n",
"VAR_4 = VAR_7.transform(VAR_4)\n",
"VAR_8 = classifier.predict(VAR_4)\n",
"from sklearn.metrics import confusion_matrix\n",
"VAR_9 = confusion_matrix(VAR_6, VAR_8)\n",
"from matplotlib.colors import ListedColormap\n",
"VAR_10, VAR_11 = VAR_3, VAR_5\n",
"VAR_12, VAR_13 = np.meshgrid(np.arange(start=X_set[:, (0)].min() - 1, stop=\n X_set[:, (0)].max() + 1, step=0.01), np.arange(start=X_set[:, (1)].min(\n ) - 1, stop=X_set[:, (1)].max() + 1, step=0.01))\n",
"plt.contourf(VAR_12, VAR_13, classifier.predict(np.array([VAR_12.ravel(),\n VAR_13.ravel()]).T).reshape(VAR_12.shape), alpha=0.75, cmap=\n ListedColormap(('red', 'green')))\n",
"plt.xlim(VAR_12.min(), VAR_12.max())\n",
"plt.ylim(VAR_13.min(), VAR_13.max())\n",
"for i, j in enumerate(np.unique(VAR_11)):\n",
"plt.scatter(VAR_10[VAR_11 == j, 0], VAR_10[VAR_11 == j, 1], c=\n ListedColormap(('red', 'green'))(i), label=j)\n",
"plt.title('Classifier (Training set)')\n",
"plt.xlabel('Age')\n",
"plt.ylabel('Estimated Salary')\n",
"plt.legend()\n",
"plt.show()\n",
"from matplotlib.colors import ListedColormap\n",
"VAR_10, VAR_11 = VAR_4, VAR_6\n",
"VAR_12, VAR_13 = np.meshgrid(np.arange(start=X_set[:, (0)].min() - 1, stop=\n X_set[:, (0)].max() + 1, step=0.01), np.arange(start=X_set[:, (1)].min(\n ) - 1, stop=X_set[:, (1)].max() + 1, step=0.01))\n",
"plt.contourf(VAR_12, VAR_13, classifier.predict(np.array([VAR_12.ravel(),\n VAR_13.ravel()]).T).reshape(VAR_12.shape), alpha=0.75, cmap=\n ListedColormap(('red', 'green')))\n",
"plt.xlim(VAR_12.min(), VAR_12.max())\n",
"plt.ylim(VAR_13.min(), VAR_13.max())\n",
"for i, j in enumerate(np.unique(VAR_11)):\n",
"plt.scatter(VAR_10[VAR_11 == j, 0], VAR_10[VAR_11 == j, 1], c=\n ListedColormap(('red', 'green'))(i), label=j)\n",
"plt.title('Classifier (Test set)')\n",
"plt.xlabel('Age')\n",
"plt.ylabel('Estimated Salary')\n",
"plt.legend()\n",
"plt.show()\n"
] | [
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"dataset = pd.read_csv('Social_Network_Ads.csv')\n",
"X = dataset.iloc[:, ([2, 3])].values\n",
"y = dataset.iloc[:, (4)].values\n",
"from sklearn.model_selection import train_test_split\n",
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25,\n random_state=0)\n",
"from sklearn.preprocessing import StandardScaler\n",
"sc = StandardScaler()\n",
"X_train = sc.fit_transform(X_train)\n",
"X_test = sc.transform(X_test)\n",
"y_pred = classifier.predict(X_test)\n",
"from sklearn.metrics import confusion_matrix\n",
"cm = confusion_matrix(y_test, y_pred)\n",
"from matplotlib.colors import ListedColormap\n",
"X_set, y_set = X_train, y_train\n",
"X1, X2 = np.meshgrid(np.arange(start=X_set[:, (0)].min() - 1, stop=X_set[:,\n (0)].max() + 1, step=0.01), np.arange(start=X_set[:, (1)].min() - 1,\n stop=X_set[:, (1)].max() + 1, step=0.01))\n",
"plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).\n T).reshape(X1.shape), alpha=0.75, cmap=ListedColormap(('red', 'green')))\n",
"plt.xlim(X1.min(), X1.max())\n",
"plt.ylim(X2.min(), X2.max())\n",
"for i, j in enumerate(np.unique(y_set)):\n",
"plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=ListedColormap((\n 'red', 'green'))(i), label=j)\n",
"plt.title('Classifier (Training set)')\n",
"plt.xlabel('Age')\n",
"plt.ylabel('Estimated Salary')\n",
"plt.legend()\n",
"plt.show()\n",
"from matplotlib.colors import ListedColormap\n",
"X_set, y_set = X_test, y_test\n",
"X1, X2 = np.meshgrid(np.arange(start=X_set[:, (0)].min() - 1, stop=X_set[:,\n (0)].max() + 1, step=0.01), np.arange(start=X_set[:, (1)].min() - 1,\n stop=X_set[:, (1)].max() + 1, step=0.01))\n",
"plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).\n T).reshape(X1.shape), alpha=0.75, cmap=ListedColormap(('red', 'green')))\n",
"plt.xlim(X1.min(), X1.max())\n",
"plt.ylim(X2.min(), X2.max())\n",
"for i, j in enumerate(np.unique(y_set)):\n",
"plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=ListedColormap((\n 'red', 'green'))(i), label=j)\n",
"plt.title('Classifier (Test set)')\n",
"plt.xlabel('Age')\n",
"plt.ylabel('Estimated Salary')\n",
"plt.legend()\n",
"plt.show()\n"
] | [
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"\"\"\"string\"\"\"\n",
"import sys\n",
"import citest.gcp_testing as gcp\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.gate as gate\n",
"\"\"\"string\"\"\"\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(VAR_1)\n"
] | [
"\"\"\"\nSmoke test to see if Spinnaker can interoperate with Google Cloud Platform.\n\nSee testable_service/integration_test.py and spinnaker_testing/spinnaker.py\nfor more details.\n\nThe smoke test will use ssh to peek at the spinnaker configuration\nto determine the managed project it should verify, and to determine\nthe spinnaker account name to use when sending it commands.\n\nSample Usage:\n Assuming you have created $PASSPHRASE_FILE (which you should chmod 400)\n and $CITEST_ROOT points to the root directory of this repository\n (which is . if you execute this from the root)\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/google_smoke_test.py --gce_ssh_passphrase_file=$PASSPHRASE_FILE --gce_project=$PROJECT --gce_zone=$ZONE --gce_instance=$INSTANCE\nor\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/google_smoke_test.py --native_hostname=host-running-smoke-test\n --managed_gce_project=$PROJECT --test_gce_zone=$ZONE\n\"\"\"\n",
"import sys\n",
"import citest.gcp_testing as gcp\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.gate as gate\n",
"\"\"\"Defines the scenario for the smoke test.\n\n This scenario defines the different test operations.\n We're going to:\n Create a Spinnaker Application\n Create a Load Balancer\n Create a Server Group\n Delete each of the above (in reverse order)\n \"\"\"\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(bindings)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Expr'",
"Condition",
"Docstring",
"Return'"
] |
[
"@validate(VUser(), VCreateMeetup(), VAR_4=ValueOrBlank('title'), VAR_8=...\n",
"return BoringPage(pagename='New Meetup', VAR_27=NewMeetup(*a, **kw)).render()\n"
] | [
"@validate(VUser(), VCreateMeetup(), title=ValueOrBlank('title'),...\n",
"return BoringPage(pagename='New Meetup', content=NewMeetup(*a, **kw)).render()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"@functools.wraps(VAR_0)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return VAR_0(self, *VAR_5, **kwargs)\n"
] | [
"def require_xsrf_token_request(f):...\n",
"\"\"\"docstring\"\"\"\n",
"@functools.wraps(f)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return f(self, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_25(VAR_3, VAR_6):...\n",
"VAR_7 = FUNC_0()\n",
"for VAR_41 in range(int(VAR_6 * 10)):\n",
"return VAR_7\n",
"VAR_7.connect(VAR_3)\n",
"if VAR_2.VERBOSE:\n",
"print('+', end='')\n",
"time.sleep(0.1)\n",
"sys.stdout.flush()\n"
] | [
"def connect(addr, timeout):...\n",
"sock = create_client()\n",
"for _ in range(int(timeout * 10)):\n",
"return sock\n",
"sock.connect(addr)\n",
"if cls.VERBOSE:\n",
"print('+', end='')\n",
"time.sleep(0.1)\n",
"sys.stdout.flush()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Return'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_1):...\n",
"return self.properties\n"
] | [
"def _fake_get_iscsi_properties(self, volume):...\n",
"return self.properties\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(VAR_17, VAR_18):...\n",
"VAR_17 = urllib.parse.quote_plus(VAR_17)\n",
"VAR_13 = '{}/sendMessage?text={}&chat_id={}&parse_mode=Markdown'.format(VAR_1,\n VAR_17, VAR_18)\n",
"logging.info('send_message: Sending %s to chat %d', VAR_17, VAR_18)\n",
"requests.get(VAR_13)\n"
] | [
"def send_message(text, chat_id):...\n",
"text = urllib.parse.quote_plus(text)\n",
"url = '{}/sendMessage?text={}&chat_id={}&parse_mode=Markdown'.format(base_url,\n text, chat_id)\n",
"logging.info('send_message: Sending %s to chat %d', text, chat_id)\n",
"requests.get(url)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self.compilation_ok('string')\n",
"self.representation_is('string')\n"
] | [
"def test_multiline_string(self):...\n",
"self.compilation_ok(\n \"\"\"\nrequire \"reject\";\n\nif allof (false, address :is [\"From\", \"Sender\"] [\"blka@bla.com\"]) {\n reject text:\nnoreply\n============================\nYour email has been canceled\n============================\n.\n;\n stop;\n} else {\n reject text:\n================================\nYour email has been canceled too\n================================\n.\n;\n}\n\"\"\"\n )\n",
"self.representation_is(\n \"\"\"\nrequire (type: control)\n \"reject\"\nif (type: control)\n allof (type: test)\n false (type: test)\n address (type: test)\n :is\n [\"From\",\"Sender\"]\n [\"blka@bla.com\"]\n reject (type: action)\n text:\nnoreply\n============================\nYour email has been canceled\n============================\n.\n stop (type: control)\nelse (type: control)\n reject (type: action)\n text:\n================================\nYour email has been canceled too\n================================\n.\n\"\"\"\n )\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_29(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example('os_system.py', VAR_2)\n"
] | [
"def test_os_system(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example('os_system.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"return reverse('extraction_script_edit', kwargs={'pk': self.pk})\n"
] | [
"def get_absolute_url(self):...\n",
"return reverse('extraction_script_edit', kwargs={'pk': self.pk})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = jc.Contract()\n",
"return st.OperationContract(self.agent.make_create_app_operation(VAR_1=self\n .bindings, application=self.TEST_APP), VAR_5=contract)\n"
] | [
"def create_app(self):...\n",
"\"\"\"docstring\"\"\"\n",
"contract = jc.Contract()\n",
"return st.OperationContract(self.agent.make_create_app_operation(bindings=\n self.bindings, application=self.TEST_APP), contract=contract)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_0(*VAR_0, **VAR_1):...\n",
"if not app.config['USE_AUTH']:\n",
"VAR_13 = CLASS_1\n",
"VAR_14 = app.config['USER_AUTH_MODULE'].lower()\n",
"logging.info('Initializing {} user authenticator'.format(VAR_13.auth_type))\n",
"if VAR_14 == 'google':\n",
"return VAR_13(*VAR_0, **kwargs)\n",
"VAR_13 = CLASS_2\n",
"if VAR_14 == 'saml':\n",
"VAR_13 = CLASS_3\n",
"if VAR_14 == 'null':\n",
"VAR_13 = CLASS_1\n"
] | [
"def init_user_auth_class(*args, **kwargs):...\n",
"if not app.config['USE_AUTH']:\n",
"module = NullUserAuthenticator\n",
"module_name = app.config['USER_AUTH_MODULE'].lower()\n",
"logging.info('Initializing {} user authenticator'.format(module.auth_type))\n",
"if module_name == 'google':\n",
"return module(*args, **kwargs)\n",
"module = GoogleOauthAuthenticator\n",
"if module_name == 'saml':\n",
"module = SamlAuthenticator\n",
"if module_name == 'null':\n",
"module = NullUserAuthenticator\n"
] | [
0,
0,
0,
0,
5,
0,
5,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"from osv import osv\n",
"from tools.translate import _\n",
"VAR_0 = 'pos.close.statement'\n",
"VAR_1 = 'Close Statements'\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_7 = []\n",
"VAR_8 = self.pool.get('ir.model.data')\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_13 = self.pool.get('ir.model.data')\n",
"if not VAR_4:\n",
"VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_7.append(VAR_4[0])\n",
"VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if VAR_14:\n",
"VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n",
"VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n",
"if VAR_15:\n",
"VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
"from osv import osv\n",
"from tools.translate import _\n",
"_name = 'pos.close.statement'\n",
"_description = 'Close Statements'\n",
"def close_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if not ids:\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"list_statement.append(ids[0])\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if id2:\n",
"statement_obj.button_confirm_cash(cr, uid, ids, context)\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |