lines
listlengths
1
444
raw_lines
listlengths
1
444
label
listlengths
1
444
type
listlengths
1
444
[ "def FUNC_3(self, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = VAR_5[:]\n", "for request in VAR_5:\n", "self.assertEqual(tuple, request.__class__)\n", "self.assertEqual([], self._requests)\n", "self.assertIn(len(request), (3, 4))\n", "self._requests = VAR_5\n" ]
[ "def expected_requests(self, requests):...\n", "\"\"\"docstring\"\"\"\n", "requests = requests[:]\n", "for request in requests:\n", "self.assertEqual(tuple, request.__class__)\n", "self.assertEqual([], self._requests)\n", "self.assertIn(len(request), (3, 4))\n", "self._requests = requests\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_19(VAR_8, **VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "return VAR_8.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n" ]
[ "def limit(pattern, **wildcards):...\n", "\"\"\"docstring\"\"\"\n", "return pattern.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_21(VAR_12, VAR_14, VAR_15):...\n", "VAR_31 = getattr(VAR_1, VAR_15, None)\n", "if VAR_31 is not None:\n", "VAR_12 += \"{}='{}' \".format(VAR_14, VAR_31)\n", "return VAR_12\n" ]
[ "def _addCredential(credentials, env_key, data_key):...\n", "value = getattr(data, data_key, None)\n", "if value is not None:\n", "credentials += \"{}='{}' \".format(env_key, value)\n", "return credentials\n" ]
[ 0, 0, 0, 2, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "AugAssign'", "Return'" ]
[ "def FUNC_0(VAR_3, VAR_4=False):...\n", "VAR_5 = sqlite3.connect(VAR_0)\n", "VAR_6 = VAR_5.cursor()\n", "VAR_6.execute(VAR_3)\n", "if VAR_4:\n", "VAR_17 = VAR_6.fetchall()\n", "VAR_17 = None\n", "VAR_5.close()\n", "VAR_5.commit()\n", "return VAR_17\n" ]
[ "def db_execute(sql, expect_return_values=False):...\n", "db_connection = sqlite3.connect(DB_FILE)\n", "db_cursor = db_connection.cursor()\n", "db_cursor.execute(sql)\n", "if expect_return_values:\n", "return_values = db_cursor.fetchall()\n", "return_values = None\n", "db_connection.close()\n", "db_connection.commit()\n", "return return_values\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_6(VAR_12, VAR_2, VAR_5):...\n", "VAR_26 = VAR_12.request.body\n", "VAR_2 = FUNC_1(VAR_2)\n", "def FUNC_8(VAR_4):...\n", "if not os.path.exists('D:' + VAR_4):\n", "return VAR_4\n", "VAR_28 = 1\n", "while VAR_28 < 101:\n", "VAR_32 = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % VAR_28, VAR_4)\n", "return ''\n", "if not os.path.exists('D:' + VAR_32):\n", "return VAR_32\n", "VAR_28 = VAR_28 + 1\n" ]
[ "def save_file_async(alter_ego, target_path, file_name):...\n", "upload_data = alter_ego.request.body\n", "target_path = decode_hexed_b64_to_str(target_path)\n", "def get_non_duplicate_path(file_path):...\n", "if not os.path.exists('D:' + file_path):\n", "return file_path\n", "duplicate = 1\n", "while duplicate < 101:\n", "new_path = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % duplicate, file_path)\n", "return ''\n", "if not os.path.exists('D:' + new_path):\n", "return new_path\n", "duplicate = duplicate + 1\n" ]
[ 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_9(VAR_13, VAR_14, VAR_15):...\n", "\"\"\"docstring\"\"\"\n", "VAR_25 = 'string' % VAR_15\n", "VAR_14.execute(VAR_25)\n", "for record in VAR_14:\n", "VAR_39 = record[0]\n", "return VAR_39\n" ]
[ "def get_old_sourcebyinstitution_number(conn, sqlite, sourcebyinstitution):...\n", "\"\"\"docstring\"\"\"\n", "query = (\n \"\"\"\n SELECT\n titles\n FROM\n history\n WHERE\n sourcebyinstitution = \"%s\"\n ORDER BY\n titles DESC\n LIMIT 1\n \"\"\"\n % sourcebyinstitution)\n", "sqlite.execute(query)\n", "for record in sqlite:\n", "old_sourcebyinstitution_number = record[0]\n", "return old_sourcebyinstitution_number\n" ]
[ 0, 0, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "For", "Assign'", "Return'" ]
[ "def __init__(self, VAR_3, VAR_4):...\n", "self.email = VAR_3\n", "self.password = VAR_4\n" ]
[ "def __init__(self, email, password):...\n", "self.email = email\n", "self.password = password\n" ]
[ 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def __init__(self, *VAR_19, **VAR_46):...\n", "super(CLASS_1, self).__init__(*VAR_19, **kwargs)\n", "self._tags = VAR_46.get('tags', None)\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super(MetalTemplate, self).__init__(*args, **kwargs)\n", "self._tags = kwargs.get('tags', None)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_7(self, VAR_6, VAR_7=None, VAR_8='icon', VAR_9=None):...\n", "if VAR_7 is None:\n", "return self.icon\n", "return self.aq_parent.display_icon(VAR_6, VAR_7, VAR_8, VAR_9)\n" ]
[ "def display_icon(self, REQUEST, meta_type=None, key='icon', zpt=None):...\n", "if meta_type is None:\n", "return self.icon\n", "return self.aq_parent.display_icon(REQUEST, meta_type, key, zpt)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_2(VAR_4):...\n", "VAR_5 = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "VAR_6 = re.findall(VAR_5, VAR_2.written[0])\n", "self.assertEquals(len(VAR_6), 1)\n" ]
[ "def assert_response(_):...\n", "expected = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "matches = re.findall(expected, request.written[0])\n", "self.assertEquals(len(matches), 1)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self):...\n", "" ]
[ "def __init__(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "VAR_3 = XsrfTool()\n", "VAR_4 = users.get_current_user()\n", "self.render('admin_delete_record.html', id=self.env.domain + '/person.',\n xsrf_token=xsrf_tool.generate_token(user.user_id(), 'admin_delete_record'))\n" ]
[ "def get(self):...\n", "xsrf_tool = XsrfTool()\n", "user = users.get_current_user()\n", "self.render('admin_delete_record.html', id=self.env.domain + '/person.',\n xsrf_token=xsrf_tool.generate_token(user.user_id(), 'admin_delete_record'))\n" ]
[ 0, 5, 5, 5 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_8(VAR_25, VAR_20, VAR_21, VAR_5=False, VAR_22=None, VAR_23=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_52 = [\n \"\"\"<collection><record>\n<controlfield tag=\"001\">%s</controlfield>\"\"\" %\n VAR_25]\n", "VAR_20 = VAR_20.items()\n", "VAR_21 = VAR_21.items()\n", "VAR_52.append(FUNC_9(VAR_20, VAR_21, VAR_22, VAR_23))\n", "VAR_52.append('</record></collection>')\n", "return '\\n'.join(VAR_52)\n" ]
[ "def build_marc(recid, single_keywords, composite_keywords, spires=False,...\n", "\"\"\"docstring\"\"\"\n", "output = [\n \"\"\"<collection><record>\n<controlfield tag=\"001\">%s</controlfield>\"\"\" %\n recid]\n", "single_keywords = single_keywords.items()\n", "composite_keywords = composite_keywords.items()\n", "output.append(_output_marc(single_keywords, composite_keywords,\n author_keywords, acronyms))\n", "output.append('</record></collection>')\n", "return '\\n'.join(output)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_23(self):...\n", "VAR_22 = model.Identity.from_bytes('user:peer@a.com')\n", "@classmethod...\n", "return [lambda _request: VAR_22]\n" ]
[ "def test_delegation_token(self):...\n", "peer_ident = model.Identity.from_bytes('user:peer@a.com')\n", "@classmethod...\n", "return [lambda _request: peer_ident]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_32(VAR_6):...\n", "VAR_22 = VAR_6.find_all('a')\n", "for link in VAR_22:\n", "return True\n", "if '.onion' in link.attrs['href']:\n", "return False\n" ]
[ "def validate_onion_address_not_in_href(page):...\n", "links_on_landing_page = page.find_all('a')\n", "for link in links_on_landing_page:\n", "return True\n", "if '.onion' in link.attrs['href']:\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Return'", "Condition", "Return'" ]
[ "def __init__(self, VAR_65, VAR_7, *VAR_15, **VAR_16):...\n", "self.cache_prefix = VAR_65\n", "CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n" ]
[ "def __init__(self, cache_prefix, param, *a, **kw):...\n", "self.cache_prefix = cache_prefix\n", "Validator.__init__(self, param, *a, **kw)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "VAR_18 = 0\n", "if VAR_13:\n", "VAR_23 = (\"(jdk_entries.title LIKE '%\" + VAR_13 + \"%' OR \" +\n \"jdk_entries.body LIKE '%\" + VAR_13 + \"%') \")\n", "VAR_23 = ''\n", "VAR_18 += 1\n", "if VAR_14:\n", "VAR_24 = \"jdk_entries.date_last_modified >= '\" + VAR_14 + \"' \"\n", "VAR_24 = ''\n", "VAR_18 += 1\n", "if VAR_15:\n", "VAR_25 = \"jdk_entries.date_last_modified <= '\" + VAR_15 + \"' \"\n", "VAR_25 = ''\n", "VAR_18 += 1\n", "VAR_3 = ('SELECT jdk_entries.id, jdk_entries.title ' + 'FROM jdk_entries ' +\n 'WHERE ' + VAR_23 + ('AND ' if VAR_23 and (VAR_14 or VAR_15) else '') +\n VAR_24 + ('AND ' if VAR_14 and VAR_15 else '') + VAR_25 + 'LIMIT 30;')\n", "return FUNC_0(VAR_3, True)\n" ]
[ "@staticmethod...\n", "total_parameters = 0\n", "if keyword:\n", "keyword_string = (\"(jdk_entries.title LIKE '%\" + keyword + \"%' OR \" +\n \"jdk_entries.body LIKE '%\" + keyword + \"%') \")\n", "keyword_string = ''\n", "total_parameters += 1\n", "if from_date:\n", "from_date_string = \"jdk_entries.date_last_modified >= '\" + from_date + \"' \"\n", "from_date_string = ''\n", "total_parameters += 1\n", "if to_date:\n", "to_date_string = \"jdk_entries.date_last_modified <= '\" + to_date + \"' \"\n", "to_date_string = ''\n", "total_parameters += 1\n", "sql = ('SELECT jdk_entries.id, jdk_entries.title ' + 'FROM jdk_entries ' +\n 'WHERE ' + keyword_string + ('AND ' if keyword_string and (from_date or\n to_date) else '') + from_date_string + ('AND ' if from_date and to_date\n else '') + to_date_string + 'LIMIT 30;')\n", "return db_execute(sql, True)\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_15):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = iter(VAR_15)\n", "VAR_30 = next(VAR_29)\n", "for x in VAR_29:\n", "yield VAR_30, False\n", "yield VAR_30, True\n", "VAR_30 = x\n" ]
[ "def laggy_iter(self, iterable):...\n", "\"\"\"docstring\"\"\"\n", "it = iter(iterable)\n", "prev = next(it)\n", "for x in it:\n", "yield prev, False\n", "yield prev, True\n", "prev = x\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_2(VAR_7, VAR_8=None, VAR_9=None, VAR_10=None):...\n", "if VAR_8 is None:\n", "return reverse(VAR_7, VAR_9=args)\n", "VAR_42 = VAR_8._meta.app_label\n", "VAR_43 = '.'.join([VAR_42, 'urls'])\n", "VAR_44 = reverse(VAR_7, VAR_43, kwargs=kw)\n", "return ''.join(['/', VAR_42, VAR_44])\n" ]
[ "def get_url(urlname, group=None, args=None, kw=None):...\n", "if group is None:\n", "return reverse(urlname, args=args)\n", "app = group._meta.app_label\n", "urlconf = '.'.join([app, 'urls'])\n", "url = reverse(urlname, urlconf, kwargs=kw)\n", "return ''.join(['/', app, url])\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_18(self, VAR_38):...\n", "\"\"\"docstring\"\"\"\n", "return self._parse_conditions(self.AND_CONDITION, VAR_38)\n" ]
[ "def _parse_and(self, data):...\n", "\"\"\"docstring\"\"\"\n", "return self._parse_conditions(self.AND_CONDITION, data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_1(self):...\n", "from engineauth.strategies.google import GoogleStrategy\n", "VAR_1 = VAR_0._load_strategy('google')\n", "self.assertEqual(VAR_1, GoogleStrategy)\n", "self.assertRaises(Exception, VAR_0._load_strategy, 'enron')\n", "from engineauth.strategies.appengine_openid import AppEngineOpenIDStrategy\n", "VAR_1 = VAR_0._load_strategy('appengine_openid')\n", "self.assertEqual(VAR_1, AppEngineOpenIDStrategy)\n" ]
[ "def test_load_strategy(self):...\n", "from engineauth.strategies.google import GoogleStrategy\n", "strategy_class = app._load_strategy('google')\n", "self.assertEqual(strategy_class, GoogleStrategy)\n", "self.assertRaises(Exception, app._load_strategy, 'enron')\n", "from engineauth.strategies.appengine_openid import AppEngineOpenIDStrategy\n", "strategy_class = app._load_strategy('appengine_openid')\n", "self.assertEqual(strategy_class, AppEngineOpenIDStrategy)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "ImportFrom'", "Assign'", "Expr'" ]
[ "VAR_0 = 'string'\n", "import sys, os, re\n", "import unittest\n", "from decimal import Decimal\n", "from datetime import datetime, date, time\n", "from os.path import join, getsize, dirname, abspath\n", "from testutils import *\n", "VAR_1 = '0123456789-abcdefghijklmnopqrstuvwxyz-'\n", "def FUNC_0(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_2 <= VAR_23(VAR_1):\n", "return VAR_1[:VAR_2]\n", "VAR_3 = (VAR_2 + VAR_23(VAR_1) - 1) // VAR_23(VAR_1)\n", "VAR_4 = VAR_1 * VAR_3\n", "return VAR_4[:VAR_2]\n" ]
[ "usage = \"\"\"usage: %prog [options] connection_string\n\nUnit tests for SQLite using the ODBC driver from http://www.ch-werner.de/sqliteodbc\n\nTo use, pass a connection string as the parameter. The tests will create and\ndrop tables t1 and t2 as necessary. On Windows, use the 32-bit driver with\n32-bit Python and the 64-bit driver with 64-bit Python (regardless of your\noperating system bitness).\n\nThese run using the version from the 'build' directory, not the version\ninstalled into the Python directories. You must run python setup.py build\nbefore running the tests.\n\nYou can also put the connection string into a setup.cfg file in the root of the project\n(the same one setup.py would use) like so:\n\n [sqlitetests]\n connection-string=Driver=SQLite3 ODBC Driver;Database=sqlite.db\n\"\"\"\n", "import sys, os, re\n", "import unittest\n", "from decimal import Decimal\n", "from datetime import datetime, date, time\n", "from os.path import join, getsize, dirname, abspath\n", "from testutils import *\n", "_TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-'\n", "def _generate_test_string(length):...\n", "\"\"\"docstring\"\"\"\n", "if length <= len(_TESTSTR):\n", "return _TESTSTR[:length]\n", "c = (length + len(_TESTSTR) - 1) // len(_TESTSTR)\n", "v = _TESTSTR * c\n", "return v[:length]\n" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "VAR_3 = self.get_group()\n", "return VAR_3.id if VAR_3 else 0\n" ]
[ "def get_group_id(self):...\n", "group = self.get_group()\n", "return group.id if group else 0\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@api_view(['GET'])...\n", "return FUNC_7(VAR_0, 'regional_team')\n" ]
[ "@api_view(['GET'])...\n", "return _measure_by_org(request, 'regional_team')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_17(self):...\n", "VAR_26 = []\n", "self.mock(bot_main, 'post_error_task', lambda *VAR_24: VAR_26.append(VAR_24))\n", "def FUNC_24(VAR_25, VAR_23, *VAR_24):...\n", "if VAR_23 == 'on_after_task':\n", "VAR_50, VAR_51, VAR_5, VAR_52 = VAR_24\n", "self.mock(bot_main, 'call_hook', FUNC_24)\n", "self.assertEqual(False, VAR_50)\n", "VAR_22 = self._mock_popen(VAR_1=1)\n", "self.assertEqual(True, VAR_51)\n", "VAR_19 = {'command': ['echo', 'hi'], 'dimensions': {'pool': 'default'},\n 'grace_period': 30, 'hard_timeout': 60, 'io_timeout': 60, 'task_id': '24'}\n", "self.assertEqual({'pool': 'default'}, VAR_5)\n", "bot_main.run_manifest(self.bot, VAR_19, time.time())\n", "self.assertEqual(VAR_22, VAR_52)\n", "VAR_6 = [(self.bot, 'Execution failed: internal error (1).', '24')]\n", "self.assertEqual(VAR_6, VAR_26)\n" ]
[ "def test_run_manifest_internal_failure(self):...\n", "posted = []\n", "self.mock(bot_main, 'post_error_task', lambda *args: posted.append(args))\n", "def call_hook(_botobj, name, *args):...\n", "if name == 'on_after_task':\n", "failure, internal_failure, dimensions, summary = args\n", "self.mock(bot_main, 'call_hook', call_hook)\n", "self.assertEqual(False, failure)\n", "result = self._mock_popen(returncode=1)\n", "self.assertEqual(True, internal_failure)\n", "manifest = {'command': ['echo', 'hi'], 'dimensions': {'pool': 'default'},\n 'grace_period': 30, 'hard_timeout': 60, 'io_timeout': 60, 'task_id': '24'}\n", "self.assertEqual({'pool': 'default'}, dimensions)\n", "bot_main.run_manifest(self.bot, manifest, time.time())\n", "self.assertEqual(result, summary)\n", "expected = [(self.bot, 'Execution failed: internal error (1).', '24')]\n", "self.assertEqual(expected, posted)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_19(self):...\n", "" ]
[ "def test_x_terminate_instances(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._node_ip_address\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._node_ip_address\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_4(self, VAR_3, **VAR_6):...\n", "logging.warn('url_open(%s, %s)', VAR_3[:500], str(VAR_6)[:500])\n", "if not self._requests:\n", "return None\n", "VAR_6.pop('stream', None)\n", "for i, n in enumerate(self._requests):\n", "if n[0] == VAR_3:\n", "self.fail('Unknown request %s' % VAR_3)\n", "VAR_9 = self._requests.pop(i)\n", "if len(VAR_9) != 4:\n", "self.fail('Expected normal request, got json data; %s' % VAR_3)\n", "VAR_10, VAR_11, VAR_12, VAR_4 = VAR_9\n", "if callable(VAR_11):\n", "VAR_11(VAR_6)\n", "self.assertEqual(VAR_11, VAR_6)\n", "if VAR_12 is not None:\n", "return FUNC_0(VAR_12, VAR_3, VAR_4)\n", "return None\n" ]
[ "def _url_open(self, url, **kwargs):...\n", "logging.warn('url_open(%s, %s)', url[:500], str(kwargs)[:500])\n", "if not self._requests:\n", "return None\n", "kwargs.pop('stream', None)\n", "for i, n in enumerate(self._requests):\n", "if n[0] == url:\n", "self.fail('Unknown request %s' % url)\n", "data = self._requests.pop(i)\n", "if len(data) != 4:\n", "self.fail('Expected normal request, got json data; %s' % url)\n", "_, expected_kwargs, result, headers = data\n", "if callable(expected_kwargs):\n", "expected_kwargs(kwargs)\n", "self.assertEqual(expected_kwargs, kwargs)\n", "if result is not None:\n", "return make_fake_response(result, url, headers)\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Return'", "Expr'", "For", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 2:\n", "VAR_10, VAR_41 = VAR_0\n", "if len(VAR_0) == 1:\n", "self.name = VAR_10\n", "VAR_17 = VAR_0[0]\n", "self.workflow = VAR_41\n", "self.name = VAR_17.name\n", "self.docstring = None\n", "self.workflow = VAR_17.workflow\n", "self.message = None\n", "self.docstring = VAR_17.docstring\n", "self._input = InputFiles()\n", "self.message = VAR_17.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(VAR_17._input)\n", "self._params = Params()\n", "self._output = OutputFiles(VAR_17._output)\n", "self.dependencies = dict()\n", "self._params = Params(VAR_17._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(VAR_17.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(VAR_17.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(VAR_17.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(VAR_17.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(VAR_17.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(VAR_17.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(VAR_17.subworkflow_input)\n", "self.priority = 0\n", "self.resources = VAR_17.resources\n", "self.version = None\n", "self.priority = VAR_17.priority\n", "self._log = Log()\n", "self.version = VAR_17.version\n", "self._benchmark = None\n", "self._log = VAR_17._log\n", "self.wildcard_names = set()\n", "self._benchmark = VAR_17._benchmark\n", "self.lineno = VAR_1\n", "self.wildcard_names = set(VAR_17.wildcard_names)\n", "self.snakefile = VAR_2\n", "self.lineno = VAR_17.lineno\n", "self.run_func = None\n", "self.snakefile = VAR_17.snakefile\n", "self.shellcmd = None\n", "self.run_func = VAR_17.run_func\n", "self.norun = False\n", "self.shellcmd = VAR_17.shellcmd\n", "self.norun = VAR_17.norun\n" ]
[ "def __init__(self, *args, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(args) == 2:\n", "name, workflow = args\n", "if len(args) == 1:\n", "self.name = name\n", "other = args[0]\n", "self.workflow = workflow\n", "self.name = other.name\n", "self.docstring = None\n", "self.workflow = other.workflow\n", "self.message = None\n", "self.docstring = other.docstring\n", "self._input = InputFiles()\n", "self.message = other.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(other._input)\n", "self._params = Params()\n", "self._output = OutputFiles(other._output)\n", "self.dependencies = dict()\n", "self._params = Params(other._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(other.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(other.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(other.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(other.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(other.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(other.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(other.subworkflow_input)\n", "self.priority = 0\n", "self.resources = other.resources\n", "self.version = None\n", "self.priority = other.priority\n", "self._log = Log()\n", "self.version = other.version\n", "self._benchmark = None\n", "self._log = other._log\n", "self.wildcard_names = set()\n", "self._benchmark = other._benchmark\n", "self.lineno = lineno\n", "self.wildcard_names = set(other.wildcard_names)\n", "self.snakefile = snakefile\n", "self.lineno = other.lineno\n", "self.run_func = None\n", "self.snakefile = other.snakefile\n", "self.shellcmd = None\n", "self.run_func = other.run_func\n", "self.norun = False\n", "self.shellcmd = other.shellcmd\n", "self.norun = other.norun\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = self.extract_boolean(VAR_1, 'get_extras')\n", "VAR_32 = self.extract_boolean(VAR_1, 'get_access_list')\n", "VAR_31 = api.nova.flavor_get(VAR_1, VAR_5, VAR_28=get_extras)\n", "VAR_10 = VAR_31.to_dict()\n", "if 'swap' in VAR_10 and VAR_10['swap'] == '':\n", "VAR_10['swap'] = 0\n", "if VAR_28:\n", "VAR_10['extras'] = VAR_31.extras\n", "if VAR_32 and not VAR_31.is_public:\n", "VAR_44 = [item.tenant_id for item in api.nova.flavor_access_list(VAR_1, VAR_5)]\n", "return VAR_10\n", "VAR_10['access-list'] = VAR_44\n" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "get_extras = self.extract_boolean(request, 'get_extras')\n", "get_access_list = self.extract_boolean(request, 'get_access_list')\n", "flavor = api.nova.flavor_get(request, flavor_id, get_extras=get_extras)\n", "result = flavor.to_dict()\n", "if 'swap' in result and result['swap'] == '':\n", "result['swap'] = 0\n", "if get_extras:\n", "result['extras'] = flavor.extras\n", "if get_access_list and not flavor.is_public:\n", "access_list = [item.tenant_id for item in api.nova.flavor_access_list(\n request, flavor_id)]\n", "return result\n", "result['access-list'] = access_list\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_8, VAR_9 = 'eviluser', 'password'\n", "VAR_10, VAR_11 = 'Evil', 'User'\n", "VAR_12 = 'evil@deis.io'\n", "VAR_13 = {'username': VAR_8, 'password': VAR_9, 'first_name': VAR_10,\n 'last_name': VAR_11, 'email': VAR_12}\n", "VAR_4 = '/api/auth/register'\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_13), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_7 = {'sha': 'df1e628f2244b73f9cdf944f880a2b3470a122f4', 'fingerprint':\n '88:25:ed:67:56:91:3d:c6:1b:7f:42:c6:9b:41:24:99', 'receive_user':\n 'eviluser', 'receive_repo': '{app_id}'.format(**locals()),\n 'ssh_connection': '10.0.1.10 50337 172.17.0.143 22',\n 'ssh_original_command': \"git-receive-pack '{app_id}.git'\".format(**\n locals())}\n", "VAR_4 = '/api/hooks/push'.format(**locals())\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n", "self.assertEqual(VAR_5.status_code, 403)\n" ]
[ "def test_push_abuse(self):...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "username, password = 'eviluser', 'password'\n", "first_name, last_name = 'Evil', 'User'\n", "email = 'evil@deis.io'\n", "submit = {'username': username, 'password': password, 'first_name':\n first_name, 'last_name': last_name, 'email': email}\n", "url = '/api/auth/register'\n", "response = self.client.post(url, json.dumps(submit), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "body = {'sha': 'df1e628f2244b73f9cdf944f880a2b3470a122f4', 'fingerprint':\n '88:25:ed:67:56:91:3d:c6:1b:7f:42:c6:9b:41:24:99', 'receive_user':\n 'eviluser', 'receive_repo': '{app_id}'.format(**locals()),\n 'ssh_connection': '10.0.1.10 50337 172.17.0.143 22',\n 'ssh_original_command': \"git-receive-pack '{app_id}.git'\".format(**\n locals())}\n", "url = '/api/hooks/push'.format(**locals())\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n", "self.assertEqual(response.status_code, 403)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "return FUNC_1(CLASS_0._TalkToHandlerAsync('', VAR_7, 'GET', VAR_10))\n" ]
[ "@staticmethod...\n", "return JsonFromFuture(BaseRequest._TalkToHandlerAsync('', handler, 'GET',\n timeout))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def __new__(VAR_7, VAR_8, VAR_10, VAR_11):...\n", "VAR_25 = super(CLASS_1, VAR_7).__new__\n", "if VAR_8 == 'NewBase' and VAR_11 == {}:\n", "return VAR_25(VAR_7, VAR_8, VAR_10, VAR_11)\n", "VAR_26 = [b for b in VAR_10 if isinstance(b, CLASS_1) and not (b.__name__ ==\n 'NewBase' and b.__mro__ == (b, object))]\n", "if not VAR_26:\n", "return VAR_25(VAR_7, VAR_8, VAR_10, VAR_11)\n", "VAR_27 = VAR_11.pop('__module__')\n", "VAR_28 = VAR_25(VAR_7, VAR_8, VAR_10, {'__module__': VAR_27})\n", "VAR_29 = VAR_11.pop('Meta', None)\n", "VAR_30 = getattr(VAR_29, 'abstract', False)\n", "if not VAR_29:\n", "VAR_6 = getattr(VAR_28, 'Meta', None)\n", "VAR_6 = VAR_29\n", "VAR_31 = getattr(VAR_28, '_meta', None)\n", "VAR_28.add_to_class('_meta', CLASS_0(VAR_6))\n", "if VAR_28._meta.name_space is NOT_PROVIDED and VAR_31:\n", "if not VAR_28._meta.name_space or VAR_28._meta.name_space is NOT_PROVIDED:\n", "if VAR_28._meta.name_space is NOT_PROVIDED:\n", "VAR_28._meta.name_space = VAR_31.name_space\n", "VAR_28._meta.name_space = VAR_27\n", "VAR_32 = registration.get_resource(VAR_28._meta.resource_name)\n", "if VAR_32 is not None:\n", "return VAR_32\n", "for obj_name, obj in VAR_11.items():\n", "VAR_28.add_to_class(obj_name, obj)\n", "VAR_28._meta.fields = sorted(VAR_28._meta.fields, key=hash)\n", "VAR_33 = set([VAR_46.attname for VAR_46 in VAR_28._meta.fields])\n", "VAR_34 = set(VAR_33)\n", "for base in VAR_26:\n", "if not hasattr(base, '_meta'):\n", "if VAR_28._meta.key_field is not None:\n", "for VAR_9 in base._meta.all_fields:\n", "if VAR_28._meta.key_field not in VAR_34:\n", "if VAR_30:\n", "if VAR_9.attname in VAR_33:\n", "for VAR_9 in base._meta.fields:\n", "return VAR_28\n", "registration.register_resources(VAR_28)\n", "if VAR_9.attname not in VAR_34:\n", "for VAR_9 in base._meta.virtual_fields:\n", "return registration.get_resource(VAR_28._meta.resource_name)\n", "VAR_34.add(VAR_9.attname)\n", "VAR_28.add_to_class(VAR_9.attname, copy.deepcopy(VAR_9))\n", "VAR_28._meta.parents += base._meta.parents\n", "VAR_28.add_to_class(VAR_9.attname, copy.deepcopy(VAR_9))\n", "VAR_28._meta.parents.append(base)\n" ]
[ "def __new__(cls, name, bases, attrs):...\n", "super_new = super(ResourceBase, cls).__new__\n", "if name == 'NewBase' and attrs == {}:\n", "return super_new(cls, name, bases, attrs)\n", "parents = [b for b in bases if isinstance(b, ResourceBase) and not (b.\n __name__ == 'NewBase' and b.__mro__ == (b, object))]\n", "if not parents:\n", "return super_new(cls, name, bases, attrs)\n", "module = attrs.pop('__module__')\n", "new_class = super_new(cls, name, bases, {'__module__': module})\n", "attr_meta = attrs.pop('Meta', None)\n", "abstract = getattr(attr_meta, 'abstract', False)\n", "if not attr_meta:\n", "meta = getattr(new_class, 'Meta', None)\n", "meta = attr_meta\n", "base_meta = getattr(new_class, '_meta', None)\n", "new_class.add_to_class('_meta', ResourceOptions(meta))\n", "if new_class._meta.name_space is NOT_PROVIDED and base_meta:\n", "if not new_class._meta.name_space or new_class._meta.name_space is NOT_PROVIDED:\n", "if new_class._meta.name_space is NOT_PROVIDED:\n", "new_class._meta.name_space = base_meta.name_space\n", "new_class._meta.name_space = module\n", "r = registration.get_resource(new_class._meta.resource_name)\n", "if r is not None:\n", "return r\n", "for obj_name, obj in attrs.items():\n", "new_class.add_to_class(obj_name, obj)\n", "new_class._meta.fields = sorted(new_class._meta.fields, key=hash)\n", "local_field_attnames = set([f.attname for f in new_class._meta.fields])\n", "field_attnames = set(local_field_attnames)\n", "for base in parents:\n", "if not hasattr(base, '_meta'):\n", "if new_class._meta.key_field is not None:\n", "for field in base._meta.all_fields:\n", "if new_class._meta.key_field not in field_attnames:\n", "if abstract:\n", "if field.attname in local_field_attnames:\n", "for field in base._meta.fields:\n", "return new_class\n", "registration.register_resources(new_class)\n", "if field.attname not in field_attnames:\n", "for field in base._meta.virtual_fields:\n", "return registration.get_resource(new_class._meta.resource_name)\n", "field_attnames.add(field.attname)\n", "new_class.add_to_class(field.attname, copy.deepcopy(field))\n", "new_class._meta.parents += base._meta.parents\n", "new_class.add_to_class(field.attname, copy.deepcopy(field))\n", "new_class._meta.parents.append(base)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Condition", "For", "Condition", "Condition", "Condition", "For", "Return'", "Expr'", "Condition", "For", "Return'", "Expr'", "Expr'", "AugAssign'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "if len(self.data) == 0:\n", "return None\n", "return self.data.pop(len(self.data) - 1)\n" ]
[ "def pop(self):...\n", "if len(self.data) == 0:\n", "return None\n", "return self.data.pop(len(self.data) - 1)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_0():...\n", "VAR_7.pack_forget()\n", "VAR_5.pack(expand=True, fill='both')\n", "VAR_12 = 'meal_planner.db'\n", "VAR_20 = conn.cursor()\n", "VAR_21 = VAR_20.execute('SELECT * FROM recipe')\n", "for result in [VAR_21]:\n", "for row in result.fetchall():\n", "conn.close()\n", "VAR_24 = row[0]\n", "for i in range(len(VAR_2)):\n", "VAR_2.append(VAR_24)\n", "VAR_13 = Label(VAR_5, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000', text=\n recipeNames[i])\n", "VAR_13.pack()\n", "VAR_13.bind('<Button-1>', lambda event, x=VAR_2[i]: [FUNC_1(x), VAR_5.\n pack_forget()])\n" ]
[ "def view_recipes():...\n", "frame.pack_forget()\n", "viewRecipeFrame.pack(expand=True, fill='both')\n", "database_file = 'meal_planner.db'\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "conn.close()\n", "name = row[0]\n", "for i in range(len(recipeNames)):\n", "recipeNames.append(name)\n", "label = Label(viewRecipeFrame, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000',\n text=recipeNames[i])\n", "label.pack()\n", "label.bind('<Button-1>', lambda event, x=recipeNames[i]: [callback(x),\n viewRecipeFrame.pack_forget()])\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "For", "Expr'", "Assign'", "For", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, VAR_11):...\n", "VAR_60 = SRPSession(VAR_11.username, VAR_1.uuid4(), VAR_1.uuid4(), VAR_1.\n uuid4())\n", "return defer.succeed(LeapSession(self._leap_provider, VAR_60, None, None,\n None, None))\n" ]
[ "def requestAvatarId(self, credentials):...\n", "leap_auth = SRPSession(credentials.username, uuid.uuid4(), uuid.uuid4(),\n uuid.uuid4())\n", "return defer.succeed(LeapSession(self._leap_provider, leap_auth, None, None,\n None, None))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_5 = [{'targetSize': 2, 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'replicaPoolName': self.__server_group_name, 'numReplicas': 2, 'region':\n self.TEST_REGION, 'zone': self.TEST_ZONE, 'asgName': self.\n __server_group_name, 'type': 'resizeServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'cloudProvider': 'gce', 'user': 'integration-tests'}]\n", "VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_6.new_clause_builder('Server Group Resized', retryable_for_secs=90\n ).inspect_resource('instance-groups', self.__server_group_name, [\n '--zone', self.TEST_ZONE]).contains_path_eq('size', 2)\n", "VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - resize to 2 instances', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'resize_instances', data=payload, path=self.__path), contract=builder.\n build())\n" ]
[ "def resize_server_group(self):...\n", "job = [{'targetSize': 2, 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'replicaPoolName': self.__server_group_name, 'numReplicas': 2, 'region':\n self.TEST_REGION, 'zone': self.TEST_ZONE, 'asgName': self.\n __server_group_name, 'type': 'resizeServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'cloudProvider': 'gce', 'user': 'integration-tests'}]\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Server Group Resized', retryable_for_secs=90\n ).inspect_resource('instance-groups', self.__server_group_name, [\n '--zone', self.TEST_ZONE]).contains_path_eq('size', 2)\n", "payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - resize to 2 instances', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'resize_instances', data=payload, path=self.__path), contract=builder.\n build())\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "VAR_27 = VAR_1.GET.get('is_public')\n", "VAR_27 = VAR_27 and VAR_27.lower() == 'true'\n", "VAR_28 = VAR_1.GET.get('get_extras')\n", "VAR_28 = bool(VAR_28 and VAR_28.lower() == 'true')\n", "VAR_29 = api.nova.flavor_list(VAR_1, VAR_27=is_public, VAR_28=get_extras)\n", "VAR_10 = {'items': []}\n", "for VAR_31 in VAR_29:\n", "VAR_43 = VAR_31.to_dict()\n", "return VAR_10\n", "if VAR_28:\n", "VAR_43['extras'] = VAR_31.extras\n", "VAR_10['items'].append(VAR_43)\n" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "is_public = request.GET.get('is_public')\n", "is_public = is_public and is_public.lower() == 'true'\n", "get_extras = request.GET.get('get_extras')\n", "get_extras = bool(get_extras and get_extras.lower() == 'true')\n", "flavors = api.nova.flavor_list(request, is_public=is_public, get_extras=\n get_extras)\n", "result = {'items': []}\n", "for flavor in flavors:\n", "d = flavor.to_dict()\n", "return result\n", "if get_extras:\n", "d['extras'] = flavor.extras\n", "result['items'].append(d)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_15(self):...\n", "if not FUNC_3(self):\n", "return None\n", "VAR_12.add(self)\n" ]
[ "def open(self):...\n", "if not login_get_current_user(self):\n", "return None\n", "runScriptWebSocketConnections.add(self)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_0(self):...\n", "VAR_0 = {'granularity': 86400}\n", "assert column_expr('tags[foo]', VAR_0.copy()\n ) == \"(tags.value[indexOf(tags.key, 'foo')] AS `tags[foo]`)\"\n", "assert column_expr('tags[server_name]', VAR_0.copy()\n ) == '(server_name AS `tags[server_name]`)'\n", "assert column_expr('tags[app.device]', VAR_0.copy()\n ) == '(app_device AS `tags[app.device]`)'\n", "assert column_expr('tags_key', VAR_0.copy()\n ) == '(arrayJoin(tags.key) AS tags_key)'\n", "VAR_1 = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', VAR_1\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('time', VAR_0.copy()) == '(toDate(timestamp) AS time)'\n", "assert column_expr('col', VAR_0.copy(), aggregate='sum') == '(sum(col) AS col)'\n", "assert column_expr(None, VAR_0.copy(), alias='sum', aggregate='sum') == 'sum'\n", "assert column_expr('col', VAR_0.copy(), alias='summation', aggregate='sum'\n ) == '(sum(col) AS summation)'\n", "assert column_expr('', VAR_0.copy(), alias='count', aggregate='count()'\n ) == '(count() AS count)'\n", "assert column_expr('', VAR_0.copy(), alias='aggregate', aggregate='count()'\n ) == '(count() AS aggregate)'\n", "assert column_expr('sentry:release', VAR_0.copy()) == '`sentry:release`'\n", "assert column_expr('-timestamp', VAR_0.copy()) == '-timestamp'\n", "assert column_expr('-sentry:release', VAR_0.copy()) == '-`sentry:release`'\n", "assert column_expr(\"'hello world'\", VAR_0.copy()) == \"'hello world'\"\n", "assert column_expr(tuplify(['concat', ['a', \"':'\", 'b']]), VAR_0.copy()\n ) == \"concat(a, ':', b)\"\n", "VAR_2 = VAR_0.copy()\n", "assert column_expr('issue', VAR_2) == '(group_id AS issue)'\n" ]
[ "def test_column_expr(self):...\n", "body = {'granularity': 86400}\n", "assert column_expr('tags[foo]', body.copy()\n ) == \"(tags.value[indexOf(tags.key, 'foo')] AS `tags[foo]`)\"\n", "assert column_expr('tags[server_name]', body.copy()\n ) == '(server_name AS `tags[server_name]`)'\n", "assert column_expr('tags[app.device]', body.copy()\n ) == '(app_device AS `tags[app.device]`)'\n", "assert column_expr('tags_key', body.copy()\n ) == '(arrayJoin(tags.key) AS tags_key)'\n", "tag_group_body = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', tag_group_body\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('time', body.copy()) == '(toDate(timestamp) AS time)'\n", "assert column_expr('col', body.copy(), aggregate='sum') == '(sum(col) AS col)'\n", "assert column_expr(None, body.copy(), alias='sum', aggregate='sum') == 'sum'\n", "assert column_expr('col', body.copy(), alias='summation', aggregate='sum'\n ) == '(sum(col) AS summation)'\n", "assert column_expr('', body.copy(), alias='count', aggregate='count()'\n ) == '(count() AS count)'\n", "assert column_expr('', body.copy(), alias='aggregate', aggregate='count()'\n ) == '(count() AS aggregate)'\n", "assert column_expr('sentry:release', body.copy()) == '`sentry:release`'\n", "assert column_expr('-timestamp', body.copy()) == '-timestamp'\n", "assert column_expr('-sentry:release', body.copy()) == '-`sentry:release`'\n", "assert column_expr(\"'hello world'\", body.copy()) == \"'hello world'\"\n", "assert column_expr(tuplify(['concat', ['a', \"':'\", 'b']]), body.copy()\n ) == \"concat(a, ':', b)\"\n", "group_id_body = body.copy()\n", "assert column_expr('issue', group_id_body) == '(group_id AS issue)'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'", "Assert'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(VAR_2)\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(bindings)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = []\n", "if CLASS_0.cve_cwe_map is not None:\n", "VAR_8 = [item[1] for item in CLASS_0.cve_cwe_map if self.get_val('cve.id') ==\n item[0]]\n", "return VAR_8\n" ]
[ "def associate_cwes(self):...\n", "\"\"\"docstring\"\"\"\n", "cwe_map = []\n", "if CVE.cve_cwe_map is not None:\n", "cwe_map = [item[1] for item in CVE.cve_cwe_map if self.get_val('cve.id') ==\n item[0]]\n", "return cwe_map\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_11():...\n", "populate_test_database()\n", "FUNC_1('first playlist')\n", "FUNC_0(1, 'title', 'thumbnail', 1)\n", "FUNC_0(1, 'title2', 'thumbnail2', 2)\n", "FUNC_0(1, 'title3', 'thumbnail3', 3)\n", "FUNC_0(1, 'title4', 'thumbnail4', 4)\n", "VAR_11 = VAR_3.put('/videos/4/1/2')\n", "assert VAR_11.json['status'] == 'OK'\n", "VAR_12 = VAR_3.get('/videos/1')\n", "assert VAR_12.json['status'] == 'OK'\n", "assert VAR_12.json['data'] == [dict(id=1, VAR_5='title', VAR_6='thumbnail',\n VAR_7=1), dict(id=4, VAR_5='title4', VAR_6='thumbnail4', VAR_7=2), dict\n (id=2, VAR_5='title2', VAR_6='thumbnail2', VAR_7=3), dict(id=3, VAR_5=\n 'title3', VAR_6='thumbnail3', VAR_7=4)]\n" ]
[ "def test_should_update_a_video_position():...\n", "populate_test_database()\n", "create_playlist('first playlist')\n", "create_video(1, 'title', 'thumbnail', 1)\n", "create_video(1, 'title2', 'thumbnail2', 2)\n", "create_video(1, 'title3', 'thumbnail3', 3)\n", "create_video(1, 'title4', 'thumbnail4', 4)\n", "response = test_app.put('/videos/4/1/2')\n", "assert response.json['status'] == 'OK'\n", "response2 = test_app.get('/videos/1')\n", "assert response2.json['status'] == 'OK'\n", "assert response2.json['data'] == [dict(id=1, title='title', thumbnail=\n 'thumbnail', position=1), dict(id=4, title='title4', thumbnail=\n 'thumbnail4', position=2), dict(id=2, title='title2', thumbnail=\n 'thumbnail2', position=3), dict(id=3, title='title3', thumbnail=\n 'thumbnail3', position=4)]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assert'", "Assign'", "Assert'", "Assert'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "VAR_17 = [VAR_28.mtime for VAR_28 in self.expanded_output if VAR_28.exists]\n", "if self.benchmark and self.benchmark.exists:\n", "VAR_17.append(self.benchmark.mtime)\n", "if VAR_17:\n", "return min(VAR_17)\n", "return None\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "existing = [f.mtime for f in self.expanded_output if f.exists]\n", "if self.benchmark and self.benchmark.exists:\n", "existing.append(self.benchmark.mtime)\n", "if existing:\n", "return min(existing)\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Return'" ]
[ "def FUNC_4(self, VAR_5) ->dict:...\n", "VAR_5 = super().clean(VAR_5)\n", "if self.one_required and (not VAR_5 or not any(v for v in VAR_5)):\n", "if self.require_all_fields and not all(v for v in VAR_5):\n", "return VAR_5\n" ]
[ "def clean(self, value) ->dict:...\n", "value = super().clean(value)\n", "if self.one_required and (not value or not any(v for v in value)):\n", "if self.require_all_fields and not all(v for v in value):\n", "return value\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "For", "Return'" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_submission_id() is not None\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_submission_id() is not None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self):...\n", "c.get(url_for('oauthclient.login', remote_app='test'))\n", "self.mock_response(VAR_5='test')\n", "self.mock_response(VAR_5='test_invalid')\n", "VAR_1 = c.get(url_for('oauthclient.authorized', remote_app='test', code='test')\n )\n", "assert VAR_1.data == 'TEST'\n", "assert self.handled_remote.name == 'test'\n", "assert not self.handled_args\n", "assert not self.handled_kwargs\n", "assert self.handled_resp['access_token'] == 'test_access_token'\n", "VAR_1 = self.assertRaises(TypeError, c.get, url_for(\n 'oauthclient.authorized', remote_app='test_invalid', code='test'))\n" ]
[ "def test_authorized(self):...\n", "c.get(url_for('oauthclient.login', remote_app='test'))\n", "self.mock_response(app='test')\n", "self.mock_response(app='test_invalid')\n", "resp = c.get(url_for('oauthclient.authorized', remote_app='test', code='test'))\n", "assert resp.data == 'TEST'\n", "assert self.handled_remote.name == 'test'\n", "assert not self.handled_args\n", "assert not self.handled_kwargs\n", "assert self.handled_resp['access_token'] == 'test_access_token'\n", "resp = self.assertRaises(TypeError, c.get, url_for('oauthclient.authorized',\n remote_app='test_invalid', code='test'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'" ]
[ "def FUNC_1(VAR_0, VAR_1=None):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_0:\n", "VAR_0 = uuid.uuid4().hex\n", "return VAR_0\n", "if type(VAR_1) in [set, dict]:\n", "while VAR_0 in VAR_1:\n", "VAR_0 = uuid.uuid4().hex\n" ]
[ "def get_new_uuid(uuid_, uuid_list=None):...\n", "\"\"\"docstring\"\"\"\n", "if not uuid_:\n", "uuid_ = uuid.uuid4().hex\n", "return uuid_\n", "if type(uuid_list) in [set, dict]:\n", "while uuid_ in uuid_list:\n", "uuid_ = uuid.uuid4().hex\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Return'", "Condition", "Condition", "Assign'" ]
[ "def FUNC_6(self):...\n", "VAR_91 = Subreddit._by_fullname(VAR_102.post.keys())\n", "return VAR_91.values()\n" ]
[ "def run(self):...\n", "subreddits = Subreddit._by_fullname(request.post.keys())\n", "return subreddits.values()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_5=None, VAR_6=None, VAR_7=None):...\n", "\"\"\"docstring\"\"\"\n", "if self.request.method == 'GET':\n", "if VAR_5:\n", "if VAR_6:\n", "for key, validator in VAR_5.items():\n", "for key, validator in VAR_6.items():\n", "if VAR_7:\n", "if key in self.request.GET:\n", "if key in self.request.POST:\n", "for key, validator in VAR_7.items():\n", "setattr(self.params, key, validator(self.request.GET[key]))\n", "setattr(self.params, key, validator(self.request.POST[key]))\n", "if key in self.request.FILES:\n", "setattr(self.params, key, validator(self.request.FILES[key]))\n" ]
[ "def read_params(self, get_params=None, post_params=None, file_params=None):...\n", "\"\"\"docstring\"\"\"\n", "if self.request.method == 'GET':\n", "if get_params:\n", "if post_params:\n", "for key, validator in get_params.items():\n", "for key, validator in post_params.items():\n", "if file_params:\n", "if key in self.request.GET:\n", "if key in self.request.POST:\n", "for key, validator in file_params.items():\n", "setattr(self.params, key, validator(self.request.GET[key]))\n", "setattr(self.params, key, validator(self.request.POST[key]))\n", "if key in self.request.FILES:\n", "setattr(self.params, key, validator(self.request.FILES[key]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Condition", "For", "For", "Condition", "Condition", "Condition", "For", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_16(self):...\n", "return True\n" ]
[ "def do_existing_paths(self):...\n", "return True\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_7, VAR_20=True, *VAR_15, **VAR_16):...\n", "CLASS_5.__init__(self, VAR_7, *VAR_15, VAR_20=redirect, **kw)\n" ]
[ "def __init__(self, param, redirect=True, *a, **kw):...\n", "VLink.__init__(self, param, *a, redirect=redirect, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(VAR_0, VAR_1=None, VAR_2=False):...\n", "VAR_3 = None\n", "if conf.eString and conf.eString in VAR_0:\n", "VAR_4 = VAR_0.index(conf.eString)\n", "if conf.eRegexp:\n", "VAR_5 = len(conf.eString)\n", "VAR_3 = re.findall(conf.eRegexp, VAR_0, re.I | re.M)\n", "if conf.string:\n", "VAR_6 = VAR_0[:VAR_4]\n", "if VAR_3:\n", "if conf.string in VAR_0:\n", "if conf.regexp:\n", "VAR_6 += VAR_0[VAR_4 + VAR_5:]\n", "for regExpResult in VAR_3:\n", "return True\n", "return False\n", "if re.search(conf.regexp, VAR_0, re.I | re.M):\n", "conf.seqMatcher.set_seq2(VAR_0)\n", "VAR_0 = VAR_6\n", "VAR_4 = VAR_0.index(regExpResult)\n", "return True\n", "return False\n", "if VAR_2:\n", "VAR_5 = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "VAR_7 = VAR_0[:VAR_4]\n", "return True\n", "return False\n", "VAR_7 += VAR_0[VAR_4 + VAR_5:]\n", "VAR_0 = VAR_7\n" ]
[ "def comparison(page, headers=None, getSeqMatcher=False):...\n", "regExpResults = None\n", "if conf.eString and conf.eString in page:\n", "index = page.index(conf.eString)\n", "if conf.eRegexp:\n", "length = len(conf.eString)\n", "regExpResults = re.findall(conf.eRegexp, page, re.I | re.M)\n", "if conf.string:\n", "pageWithoutString = page[:index]\n", "if regExpResults:\n", "if conf.string in page:\n", "if conf.regexp:\n", "pageWithoutString += page[index + length:]\n", "for regExpResult in regExpResults:\n", "return True\n", "return False\n", "if re.search(conf.regexp, page, re.I | re.M):\n", "conf.seqMatcher.set_seq2(page)\n", "page = pageWithoutString\n", "index = page.index(regExpResult)\n", "return True\n", "return False\n", "if getSeqMatcher:\n", "length = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "pageWithoutRegExp = page[:index]\n", "return True\n", "return False\n", "pageWithoutRegExp += page[index + length:]\n", "page = pageWithoutRegExp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "AugAssign'", "For", "Return'", "Return'", "Condition", "Expr'", "Assign'", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Return'", "Return'", "AugAssign'", "Assign'" ]
[ "def __str__(self):...\n", "return ' '.join(map(str, self))\n" ]
[ "def __str__(self):...\n", "return ' '.join(map(str, self))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@defer.inlineCallbacks...\n", "VAR_9 = yield VAR_2.client.mail_store.all_mails()\n", "for mail in VAR_9:\n", "yield VAR_2.client.mail_store.delete_mail(mail.ident)\n" ]
[ "@defer.inlineCallbacks...\n", "mails = yield context.client.mail_store.all_mails()\n", "for mail in mails:\n", "yield context.client.mail_store.delete_mail(mail.ident)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Expr'" ]
[ "def FUNC_1(VAR_0, VAR_1):...\n", "return render(VAR_0, 'posts/posts_detail.html', {'post_id': VAR_1})\n" ]
[ "def post_details(request, post_id):...\n", "return render(request, 'posts/posts_detail.html', {'post_id': post_id})\n" ]
[ 0, 6 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(self, VAR_3, *VAR_7, **VAR_8):...\n", "self.log(VAR_3.rstrip())\n", "self.reply(b'<pre>' + VAR_3.encode('utf-8'), *list(VAR_7), **kwargs)\n" ]
[ "def loud_reply(self, body, *args, **kwargs):...\n", "self.log(body.rstrip())\n", "self.reply(b'<pre>' + body.encode('utf-8'), *list(args), **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "import psycopg2\n", "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4):...\n", "self.connection = psycopg2.connect(dbname=database, VAR_0=host, VAR_1=port,\n user=username, VAR_4=password)\n", "\"\"\"string\"\"\"\n", "def FUNC_0(self, VAR_5, VAR_6, VAR_7):...\n", "VAR_9 = ''\n", "VAR_10 = ''\n", "for key in VAR_7:\n", "VAR_9 = VAR_9 + ', %s'\n", "VAR_11 = []\n", "VAR_10 = VAR_10 + ', %s'\n", "VAR_12 = []\n", "VAR_11.append(VAR_5)\n", "for key in VAR_7:\n", "VAR_11.append(key)\n", "VAR_11 = VAR_11 + VAR_12\n", "VAR_12.append(VAR_7[key])\n", "VAR_13 = self.connection.cursor()\n", "VAR_13.execute('INSERT INTO %s (TIMESTAMP' + VAR_9 + ') VALUES (%s,' +\n VAR_10 + ')', VAR_11)\n", "VAR_13.close()\n", "\"\"\"string\"\"\"\n", "print('posted successfully!')\n", "print('Insert Error: %s'.format(e))\n", "def FUNC_1(self, VAR_8):...\n", "self.connection.commit()\n", "if e == missing_column:\n", "VAR_14 = type(VAR_8)\n", "print('Attempting to alter table!')\n", "if e == missing_table:\n", "if VAR_14 is str:\n", "VAR_15 = ''\n", "self.createTable(VAR_5, VAR_7)\n", "print('Failed to create table??: %s'.format(e))\n", "self.insertData(VAR_5, VAR_6, VAR_7)\n", "print('Unexpected error when reinserted!')\n", "return 'TEXT'\n", "if VAR_14 is bool:\n", "VAR_16 = []\n", "print('Created table successfully - reinserting')\n", "print('posted successfully!')\n", "return 'BOOLEAN'\n", "if VAR_14 is int:\n", "VAR_14 = self.__getType(VAR_7[VAR_15])\n", "print('Got a type error %s'.format(e))\n", "VAR_13 = self.connection.cursor()\n", "print('Failed to alter table with error e'.format(e))\n", "print('Table alteration succeeded - attempting to insert again')\n", "return 'DOUBLE PRECISION'\n", "if VAR_14 is float:\n", "VAR_16.append(VAR_5)\n", "print('Error with field %s'.format(VAR_15))\n", "VAR_13.execute('ALTER TABLE %s ADD COLUMN %s %s', VAR_16)\n", "self.insertData(VAR_5, VAR_6, VAR_7)\n", "print('Unexpected error when reinserted!')\n", "return 'DOUBLE PRECISION'\n", "if VAR_14 is list:\n", "VAR_16.append(columnkName)\n", "print('Table alteration failed')\n", "self.connection.commit()\n", "print('posted successfully!')\n", "VAR_17 = type(VAR_8[0])\n", "\"\"\"string\"\"\"\n", "VAR_16.append(VAR_14)\n", "if VAR_17 is str:\n", "def FUNC_2(self, VAR_5, VAR_7):...\n", "return 'TEXT[]'\n", "if VAR_17 is bool:\n", "VAR_9 = ''\n", "return 'BOOLEAN[]'\n", "if VAR_17 is int:\n", "for key in VAR_7:\n", "return 'DOUBLE PRECISION[]'\n", "if VAR_17 is float:\n", "VAR_9 = VAR_9 + ', %s %s'\n", "VAR_11 = []\n", "return 'DOUBLE PRECISION[]'\n", "VAR_11.append(VAR_5)\n", "for key in VAR_7:\n", "VAR_13 = self.connection.cursor()\n", "VAR_14 = self.__getType(VAR_7[key])\n", "print('Error with object %s at key %s with value %s'.format(VAR_7, key,\n VAR_7[key]))\n", "VAR_13.execute('CREATE TABLE %s (TIMESTAMP TIMESTAMPTZ NOT NULL' + VAR_9 +\n ')', VAR_11)\n", "print('CREATE TABLE Error: %s'.format(e))\n", "self.connection.commit()\n", "VAR_11.append(key)\n", "print('Caught error %s'.format(e))\n", "\"\"\"\n Checks if a table exists\n takes:\n tableName - name of the table\n \"\"\"\n", "VAR_11.append(VAR_14)\n", "def FUNC_3(self, VAR_5):...\n", "VAR_13 = self.connection.cursor()\n", "VAR_13.execute(\n 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = %s)'\n , VAR_5)\n", "return False\n", "return True\n" ]
[ "import psycopg2\n", "def __init__(self, host, port, database, username, password):...\n", "self.connection = psycopg2.connect(dbname=database, host=host, port=port,\n user=username, password=password)\n", "\"\"\"\n Inserts data into an existing table. On failure due to not enough columns\n will automatically add columns to the table as necessary.\n Takes:\n tableName - string of the table being inserted into\n timeStamp - the timeStamp of the insertion\n tableObj - a dict of key/value pairs to insert\n \"\"\"\n", "def insertData(self, tableName, timeStamp, tableObj):...\n", "cols = ''\n", "vals = ''\n", "for key in tableObj:\n", "cols = cols + ', %s'\n", "nameList = []\n", "vals = vals + ', %s'\n", "valList = []\n", "nameList.append(tableName)\n", "for key in tableObj:\n", "nameList.append(key)\n", "nameList = nameList + valList\n", "valList.append(tableObj[key])\n", "cursor = self.connection.cursor()\n", "cursor.execute('INSERT INTO %s (TIMESTAMP' + cols + ') VALUES (%s,' + vals +\n ')', nameList)\n", "cursor.close()\n", "\"\"\"\n A private function that maps a type in python to a type in postgres\n Supports Strings, bools, numbers and arrays\n\n Raises a typerror on failure\n \"\"\"\n", "print('posted successfully!')\n", "print('Insert Error: %s'.format(e))\n", "def __getType(self, value):...\n", "self.connection.commit()\n", "if e == missing_column:\n", "t = type(value)\n", "print('Attempting to alter table!')\n", "if e == missing_table:\n", "if t is str:\n", "columnName = ''\n", "self.createTable(tableName, tableObj)\n", "print('Failed to create table??: %s'.format(e))\n", "self.insertData(tableName, timeStamp, tableObj)\n", "print('Unexpected error when reinserted!')\n", "return 'TEXT'\n", "if t is bool:\n", "params = []\n", "print('Created table successfully - reinserting')\n", "print('posted successfully!')\n", "return 'BOOLEAN'\n", "if t is int:\n", "t = self.__getType(tableObj[columnName])\n", "print('Got a type error %s'.format(e))\n", "cursor = self.connection.cursor()\n", "print('Failed to alter table with error e'.format(e))\n", "print('Table alteration succeeded - attempting to insert again')\n", "return 'DOUBLE PRECISION'\n", "if t is float:\n", "params.append(tableName)\n", "print('Error with field %s'.format(columnName))\n", "cursor.execute('ALTER TABLE %s ADD COLUMN %s %s', params)\n", "self.insertData(tableName, timeStamp, tableObj)\n", "print('Unexpected error when reinserted!')\n", "return 'DOUBLE PRECISION'\n", "if t is list:\n", "params.append(columnkName)\n", "print('Table alteration failed')\n", "self.connection.commit()\n", "print('posted successfully!')\n", "t2 = type(value[0])\n", "\"\"\"\n Creates a timescaledb table with at least a timestamp field. Partitions table\n by time.\n Takes:\n tableName - string of the table being inserted into\n tableObj - a dict of key/value pairs to start the table with\n \"\"\"\n", "params.append(t)\n", "if t2 is str:\n", "def createTable(self, tableName, tableObj):...\n", "return 'TEXT[]'\n", "if t2 is bool:\n", "cols = ''\n", "return 'BOOLEAN[]'\n", "if t2 is int:\n", "for key in tableObj:\n", "return 'DOUBLE PRECISION[]'\n", "if t2 is float:\n", "cols = cols + ', %s %s'\n", "nameList = []\n", "return 'DOUBLE PRECISION[]'\n", "nameList.append(tableName)\n", "for key in tableObj:\n", "cursor = self.connection.cursor()\n", "t = self.__getType(tableObj[key])\n", "print('Error with object %s at key %s with value %s'.format(tableObj, key,\n tableObj[key]))\n", "cursor.execute('CREATE TABLE %s (TIMESTAMP TIMESTAMPTZ NOT NULL' + cols +\n ')', nameList)\n", "print('CREATE TABLE Error: %s'.format(e))\n", "self.connection.commit()\n", "nameList.append(key)\n", "print('Caught error %s'.format(e))\n", "\"\"\"\n Checks if a table exists\n takes:\n tableName - name of the table\n \"\"\"\n", "nameList.append(t)\n", "def tableExists(self, tableName):...\n", "cursor = self.connection.cursor()\n", "cursor.execute(\n 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = %s)'\n , tableName)\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 4, 4, 0, 0, 4, 0, 0, 4, 4, 0, 0, 4, 0, 4, 4, 0, 4, 0, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 4, 4, 0, 4, 0, 0, 0, 4, 4, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 4, 4, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0 ]
[ "Import'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Condition", "Assign'", "Expr'", "Expr'", "Return'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "FunctionDef'", "Return'", "Condition", "Assign'", "Return'", "Condition", "For", "Return'", "Condition", "Assign'", "Assign'", "Return'", "Expr'", "For", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def __init__(self, VAR_1):...\n", "self.debug = VAR_1.debug\n", "self.path = VAR_1.path\n", "self.repos = VAR_1.repos\n" ]
[ "def __init__(self, args):...\n", "self.debug = args.debug\n", "self.path = args.path\n", "self.repos = args.repos\n" ]
[ 0, 0, 1, 1 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0(self, VAR_2):...\n", "self.log_func(self.log_src, VAR_2)\n" ]
[ "def log(self, msg):...\n", "self.log_func(self.log_src, msg)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self, VAR_0):...\n", "os.umask(63)\n", "self.config = VAR_0\n", "self.client_list = []\n", "self.node_list = []\n", "self.master = False\n", "self.retrieved = 0\n", "self.need_local_sudo = False\n", "self.clusters = self.config['cluster_types']\n", "if not self.config['list_options']:\n", "if not self.config['tmp_dir']:\n", "self._exit('Exiting on user cancel', 130)\n", "self.create_tmp_dir()\n", "self._setup_logging()\n", "self.log_debug('Executing %s' % ' '.join(s for s in sys.argv))\n", "self.log_debug('Found cluster profiles: %s' % self.clusters.keys())\n", "self.log_debug('Found supported host types: %s' % self.config['host_types']\n .keys())\n", "self._parse_options()\n", "self.prep()\n" ]
[ "def __init__(self, config):...\n", "os.umask(63)\n", "self.config = config\n", "self.client_list = []\n", "self.node_list = []\n", "self.master = False\n", "self.retrieved = 0\n", "self.need_local_sudo = False\n", "self.clusters = self.config['cluster_types']\n", "if not self.config['list_options']:\n", "if not self.config['tmp_dir']:\n", "self._exit('Exiting on user cancel', 130)\n", "self.create_tmp_dir()\n", "self._setup_logging()\n", "self.log_debug('Executing %s' % ' '.join(s for s in sys.argv))\n", "self.log_debug('Found cluster profiles: %s' % self.clusters.keys())\n", "self.log_debug('Found supported host types: %s' % self.config['host_types']\n .keys())\n", "self._parse_options()\n", "self.prep()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, **VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_40 = 'SELECT id FROM wins_completed_wins_fy'\n", "if self.end_date:\n", "VAR_40 = f\"{VAR_40} where created <= '{self.end_date.strftime('%m-%d-%Y')}'\"\n", "VAR_31.execute(VAR_40)\n", "VAR_49 = VAR_31.fetchall()\n", "VAR_25 = Win.objects.filter(id__in=[id[0] for id in ids]).values()\n", "for VAR_5 in VAR_25:\n", "yield self._get_win_data(VAR_5)\n" ]
[ "def _make_flat_wins_csv(self, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "sql_str = 'SELECT id FROM wins_completed_wins_fy'\n", "if self.end_date:\n", "sql_str = f\"{sql_str} where created <= '{self.end_date.strftime('%m-%d-%Y')}'\"\n", "cursor.execute(sql_str)\n", "ids = cursor.fetchall()\n", "wins = Win.objects.filter(id__in=[id[0] for id in ids]).values()\n", "for win in wins:\n", "yield self._get_win_data(win)\n" ]
[ 0, 0, 4, 0, 4, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Expr'" ]
[ "def FUNC_5(self):...\n", "if keepassgtk.config_manager.has_group('history'\n", "self.logging_manager.log_debug('Found last opened database entry (' +\n keepassgtk.config_manager.get_string('history', 'last-opened-db') + ')')\n", "self.logging_manager.log_debug(\n 'No / Not valid last opened database entry found.')\n", "VAR_15 = ntpath.basename(keepassgtk.config_manager.get_string('history',\n 'last-opened-db'))\n", "VAR_28 = Gtk.Builder()\n", "self.start_database_opening_routine(VAR_15, keepassgtk.config_manager.\n get_string('history', 'last-opened-db'))\n", "VAR_28.add_from_resource('/run/terminal/KeepassGtk/main_window.ui')\n", "self.first_start_grid = VAR_28.get_object('first_start_grid')\n", "self.add(self.first_start_grid)\n" ]
[ "def first_start_screen(self):...\n", "if keepassgtk.config_manager.has_group('history'\n", "self.logging_manager.log_debug('Found last opened database entry (' +\n keepassgtk.config_manager.get_string('history', 'last-opened-db') + ')')\n", "self.logging_manager.log_debug(\n 'No / Not valid last opened database entry found.')\n", "tab_title = ntpath.basename(keepassgtk.config_manager.get_string('history',\n 'last-opened-db'))\n", "builder = Gtk.Builder()\n", "self.start_database_opening_routine(tab_title, keepassgtk.config_manager.\n get_string('history', 'last-opened-db'))\n", "builder.add_from_resource('/run/terminal/KeepassGtk/main_window.ui')\n", "self.first_start_grid = builder.get_object('first_start_grid')\n", "self.add(self.first_start_grid)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0(VAR_1=None):...\n", "\"\"\"docstring\"\"\"\n", "if __grains__['kernel'] == 'Linux':\n", "VAR_4 = 'df -P'\n", "if __grains__['kernel'] == 'OpenBSD':\n", "if VAR_1:\n", "VAR_4 = 'df -kP'\n", "VAR_4 = 'df'\n", "VAR_4 = VAR_4 + ' -' + VAR_1\n", "VAR_2 = {}\n", "VAR_3 = __salt__['cmd.run'](VAR_4).splitlines()\n", "for line in VAR_3:\n", "if not line:\n", "return VAR_2\n", "if line.startswith('Filesystem'):\n", "VAR_5 = line.split()\n", "while not VAR_5[1].isdigit():\n", "VAR_5[0] = '{0} {1}'.format(VAR_5[0], VAR_5[1])\n", "if __grains__['kernel'] == 'Darwin':\n", "VAR_0.warn('Problem parsing disk usage information')\n", "VAR_5.pop(1)\n", "VAR_2[VAR_5[8]] = {'filesystem': VAR_5[0], '512-blocks': VAR_5[1], 'used':\n VAR_5[2], 'available': VAR_5[3], 'capacity': VAR_5[4], 'iused': VAR_5[5\n ], 'ifree': VAR_5[6], '%iused': VAR_5[7]}\n", "VAR_2[VAR_5[5]] = {'filesystem': VAR_5[0], '1K-blocks': VAR_5[1], 'used':\n VAR_5[2], 'available': VAR_5[3], 'capacity': VAR_5[4]}\n", "VAR_2 = {}\n" ]
[ "def usage(args=None):...\n", "\"\"\"docstring\"\"\"\n", "if __grains__['kernel'] == 'Linux':\n", "cmd = 'df -P'\n", "if __grains__['kernel'] == 'OpenBSD':\n", "if args:\n", "cmd = 'df -kP'\n", "cmd = 'df'\n", "cmd = cmd + ' -' + args\n", "ret = {}\n", "out = __salt__['cmd.run'](cmd).splitlines()\n", "for line in out:\n", "if not line:\n", "return ret\n", "if line.startswith('Filesystem'):\n", "comps = line.split()\n", "while not comps[1].isdigit():\n", "comps[0] = '{0} {1}'.format(comps[0], comps[1])\n", "if __grains__['kernel'] == 'Darwin':\n", "log.warn('Problem parsing disk usage information')\n", "comps.pop(1)\n", "ret[comps[8]] = {'filesystem': comps[0], '512-blocks': comps[1], 'used':\n comps[2], 'available': comps[3], 'capacity': comps[4], 'iused': comps[5\n ], 'ifree': comps[6], '%iused': comps[7]}\n", "ret[comps[5]] = {'filesystem': comps[0], '1K-blocks': comps[1], 'used':\n comps[2], 'available': comps[3], 'capacity': comps[4]}\n", "ret = {}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_5(self, VAR_10=None, VAR_11=None, VAR_1=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_10 is None:\n", "VAR_10 = str(len(self._rules) + 1)\n", "if self.is_rule(VAR_10):\n", "VAR_13 = Rule(VAR_10, self, VAR_11=lineno, VAR_1=snakefile)\n", "self._rules[VAR_13.name] = VAR_13\n", "self.rule_count += 1\n", "if not self.first_rule:\n", "self.first_rule = VAR_13.name\n", "return VAR_10\n" ]
[ "def add_rule(self, name=None, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if name is None:\n", "name = str(len(self._rules) + 1)\n", "if self.is_rule(name):\n", "rule = Rule(name, self, lineno=lineno, snakefile=snakefile)\n", "self._rules[rule.name] = rule\n", "self.rule_count += 1\n", "if not self.first_rule:\n", "self.first_rule = rule.name\n", "return name\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_8(self, VAR_12=False):...\n", "VAR_86 = self.rules\n", "if VAR_12:\n", "VAR_86 = filterfalse(Rule.has_wildcards, VAR_86)\n", "for VAR_13 in VAR_86:\n", "logger.rule_info(VAR_10=rule.name, docstring=rule.docstring)\n" ]
[ "def list_rules(self, only_targets=False):...\n", "rules = self.rules\n", "if only_targets:\n", "rules = filterfalse(Rule.has_wildcards, rules)\n", "for rule in rules:\n", "logger.rule_info(name=rule.name, docstring=rule.docstring)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "For", "Docstring" ]
[ "def FUNC_0(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_1) > 2:\n", "VAR_1[0] = VAR_1[1]\n", "VAR_1[0] = []\n", "VAR_1[0].append(VAR_1[2])\n" ]
[ "def p_main(t):...\n", "\"\"\"docstring\"\"\"\n", "if len(t) > 2:\n", "t[0] = t[1]\n", "t[0] = []\n", "t[0].append(t[2])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_9(VAR_4, VAR_3):...\n", "VAR_9 = {}\n", "VAR_9['date'] = '1-Jan-' + str(VAR_4)\n", "VAR_9['close'] = VAR_3\n", "return VAR_9\n" ]
[ "def makeObj2(y, x):...\n", "z = {}\n", "z['date'] = '1-Jan-' + str(y)\n", "z['close'] = x\n", "return z\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self, VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "log.info('Figuring out the least active users...')\n", "VAR_14 = tuple(self.users.keys())\n", "VAR_10 = (\n f'SELECT chat_id FROM photo_queries_table2 WHERE chat_id in {VAR_14} GROUP BY chat_id ORDER BY MAX(time) LIMIT {VAR_6}'\n )\n", "VAR_19 = db.execute_query(VAR_10)\n", "log.error(\"Can't figure out the least active users...\")\n", "if not VAR_19.rowcount:\n", "return\n", "log.warning('There are no users in the db')\n", "VAR_15 = [VAR_0[0] for VAR_0 in VAR_19.fetchall()]\n", "return\n", "log.info('Removing %d least active users from cache...', VAR_6)\n", "VAR_16 = 0\n", "for entry in VAR_15:\n", "log.debug('Deleting %s...', entry)\n", "log.debug('%d users were removed from cache.', VAR_16)\n", "VAR_20 = self.users.pop(entry, None)\n", "if VAR_20:\n", "VAR_16 += 1\n" ]
[ "def clean_cache(self, limit):...\n", "\"\"\"docstring\"\"\"\n", "log.info('Figuring out the least active users...')\n", "user_ids = tuple(self.users.keys())\n", "query = (\n f'SELECT chat_id FROM photo_queries_table2 WHERE chat_id in {user_ids} GROUP BY chat_id ORDER BY MAX(time) LIMIT {limit}'\n )\n", "cursor = db.execute_query(query)\n", "log.error(\"Can't figure out the least active users...\")\n", "if not cursor.rowcount:\n", "return\n", "log.warning('There are no users in the db')\n", "least_active_users = [chat_id[0] for chat_id in cursor.fetchall()]\n", "return\n", "log.info('Removing %d least active users from cache...', limit)\n", "num_deleted_entries = 0\n", "for entry in least_active_users:\n", "log.debug('Deleting %s...', entry)\n", "log.debug('%d users were removed from cache.', num_deleted_entries)\n", "deleted_entry = self.users.pop(entry, None)\n", "if deleted_entry:\n", "num_deleted_entries += 1\n" ]
[ 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Assign'", "Return'", "Expr'", "Assign'", "For", "Expr'", "Expr'", "Assign'", "Condition", "AugAssign'" ]
[ "def FUNC_8(self, VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "if not self._any_targets_have_native_sources(VAR_6):\n", "return False\n", "VAR_7 = pex_build_util.targets_by_platform(VAR_6, self._python_setup)\n", "VAR_8 = list(VAR_7.keys())\n", "if not VAR_8 or VAR_8 == ['current']:\n", "return True\n", "VAR_9 = set()\n", "for platform, VAR_6 in VAR_7.items():\n", "if platform == 'current':\n", "VAR_9.update(VAR_6)\n" ]
[ "def check_build_for_current_platform_only(self, targets):...\n", "\"\"\"docstring\"\"\"\n", "if not self._any_targets_have_native_sources(targets):\n", "return False\n", "platforms_with_sources = pex_build_util.targets_by_platform(targets, self.\n _python_setup)\n", "platform_names = list(platforms_with_sources.keys())\n", "if not platform_names or platform_names == ['current']:\n", "return True\n", "bad_targets = set()\n", "for platform, targets in platforms_with_sources.items():\n", "if platform == 'current':\n", "bad_targets.update(targets)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "For", "For", "Expr'" ]
[ "@staticmethod...\n", "if not VAR_1 or not VAR_6:\n", "if not VAR_5:\n", "VAR_5 = CLASS_0.get_report(VAR_1)['analysis']\n", "VAR_16 = VAR_5['behavior']['generic']\n", "VAR_17 = [z for z in VAR_16 if z['pid'] == VAR_6]\n", "if not VAR_17:\n", "VAR_17 = VAR_17[0]\n", "VAR_9 = {}\n", "for VAR_30, VAR_24 in CLASS_0.behavioral_mapping().iteritems():\n", "for VAR_7 in VAR_24:\n", "return VAR_9\n", "if VAR_7 in VAR_17['summary']:\n", "if VAR_30 not in VAR_9:\n", "VAR_9[VAR_30] = [VAR_7]\n", "VAR_9[VAR_30].append(VAR_7)\n" ]
[ "@staticmethod...\n", "if not task_id or not pid:\n", "if not report:\n", "report = AnalysisController.get_report(task_id)['analysis']\n", "behavior_generic = report['behavior']['generic']\n", "process = [z for z in behavior_generic if z['pid'] == pid]\n", "if not process:\n", "process = process[0]\n", "data = {}\n", "for category, watchers in AnalysisController.behavioral_mapping().iteritems():\n", "for watcher in watchers:\n", "return data\n", "if watcher in process['summary']:\n", "if category not in data:\n", "data[category] = [watcher]\n", "data[category].append(watcher)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Return'", "Condition", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_19(self):...\n", "return VAR_54.as_json(self.as_dict())\n" ]
[ "def as_json(self):...\n", "return frappe.as_json(self.as_dict())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(self, VAR_5):...\n", "for oldg in VAR_5.get_ancestors():\n", "if oldg not in self.groups:\n", "if VAR_5 not in self.groups:\n", "self.add_group(oldg)\n", "self.groups.append(VAR_5)\n" ]
[ "def add_group(self, group):...\n", "for oldg in group.get_ancestors():\n", "if oldg not in self.groups:\n", "if group not in self.groups:\n", "self.add_group(oldg)\n", "self.groups.append(group)\n" ]
[ 0, 0, 0, 0, 1, 0 ]
[ "FunctionDef'", "For", "Condition", "Condition", "Expr'", "Expr'" ]
[ "import pymysql\n", "import dbconfig\n", "def FUNC_0(self, VAR_0='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ "import pymysql\n", "import dbconfig\n", "def connect(self, database='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ 0, 0, 0, 0 ]
[ "Import'", "Import'", "FunctionDef'", "Return'" ]
[ "@gallery.route('/gallery/benwa/add', methods=['GET', 'POST'])...\n", "VAR_6 = PostForm()\n", "if VAR_6.validate_on_submit():\n", "VAR_11 = VAR_6.image.data\n", "flash('There was an issue with adding the benwa')\n", "VAR_12 = secure_filename(VAR_11.filename)\n", "return render_template('image_upload.html', VAR_6=form)\n", "VAR_11.save(join(current_app.static_folder, current_app.config[\n 'STATIC_BENWA_DIR'], VAR_12))\n", "VAR_13 = '/'.join(['thumbs', VAR_12])\n", "VAR_14 = datetime.utcnow()\n", "VAR_15 = Preview(filepath=fpath, VAR_14=created)\n", "db.session.add(VAR_15)\n", "VAR_13 = '/'.join(['imgs', VAR_12])\n", "VAR_16 = Image(filepath=fpath, VAR_14=created, VAR_15=preview)\n", "db.session.add(VAR_16)\n", "VAR_0 = [Tag.query.get(1)]\n", "VAR_17 = [FUNC_5(db.session, tag)[0] for tag in VAR_6.tags.data if tag]\n", "VAR_0.extend(VAR_17)\n", "VAR_5 = Post(title=fname, VAR_14=datetime.utcnow(), VAR_16=image, VAR_0=tags)\n", "db.session.add(VAR_5)\n", "current_user.posts.append(VAR_5)\n", "db.session.commit()\n", "return redirect(url_for('gallery.show_post', VAR_1=post.id))\n" ]
[ "@gallery.route('/gallery/benwa/add', methods=['GET', 'POST'])...\n", "form = PostForm()\n", "if form.validate_on_submit():\n", "f = form.image.data\n", "flash('There was an issue with adding the benwa')\n", "fname = secure_filename(f.filename)\n", "return render_template('image_upload.html', form=form)\n", "f.save(join(current_app.static_folder, current_app.config[\n 'STATIC_BENWA_DIR'], fname))\n", "fpath = '/'.join(['thumbs', fname])\n", "created = datetime.utcnow()\n", "preview = Preview(filepath=fpath, created=created)\n", "db.session.add(preview)\n", "fpath = '/'.join(['imgs', fname])\n", "image = Image(filepath=fpath, created=created, preview=preview)\n", "db.session.add(image)\n", "tags = [Tag.query.get(1)]\n", "added_tags = [get_or_create_tag(db.session, tag)[0] for tag in form.tags.\n data if tag]\n", "tags.extend(added_tags)\n", "post = Post(title=fname, created=datetime.utcnow(), image=image, tags=tags)\n", "db.session.add(post)\n", "current_user.posts.append(post)\n", "db.session.commit()\n", "return redirect(url_for('gallery.show_post', post_id=post.id))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Return'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "VAR_0 = pickle.load(FUNC_14(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9, VAR_10 = list(\n range(0, 10))\n", "VAR_11, VAR_12, VAR_13, VAR_14, VAR_15, VAR_16, VAR_17 = list(range(0, 7))\n", "\"\"\"string\"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, VAR_18, VAR_19):...\n", "self.fs = VAR_18\n", "self.cfg = VAR_19\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def FUNC_0(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_21, directories, filenames in os.walk(VAR_20):\n", "for VAR_32 in filenames:\n", "def FUNC_1(self, VAR_21, VAR_22):...\n", "VAR_72 = os.path.join(VAR_21, VAR_32)\n", "\"\"\"docstring\"\"\"\n", "VAR_73 = '/' + os.path.relpath(VAR_72, VAR_20)\n", "VAR_55 = VAR_21.rstrip('/').split('/')\n", "VAR_24 = self.getfile(VAR_73, VAR_23=False)\n", "if VAR_21[0] == '/':\n", "if VAR_24 and VAR_24[VAR_2] == VAR_13:\n", "VAR_22 = []\n", "VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n", "self.update_realfile(VAR_24, VAR_72)\n", "while 1:\n", "if not len(VAR_55):\n", "VAR_66 = VAR_55.pop(0)\n", "return '/%s' % ('/'.join(VAR_22),)\n", "if VAR_66 == '..':\n", "if len(VAR_22):\n", "if VAR_66 in ('.', ''):\n", "VAR_22.pop()\n", "VAR_22.append(VAR_66)\n" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "PICKLE = pickle.load(open(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "(A_NAME, A_TYPE, A_UID, A_GID, A_SIZE, A_MODE, A_CTIME, A_CONTENTS,\n A_TARGET, A_REALFILE) = list(range(0, 10))\n", "T_LINK, T_DIR, T_FILE, T_BLK, T_CHR, T_SOCK, T_FIFO = list(range(0, 7))\n", "\"\"\"\n 62 ELOOP Too many levels of symbolic links. A path name lookup involved more than 8 symbolic links.\n raise OSError(errno.ELOOP, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, fs, cfg):...\n", "self.fs = fs\n", "self.cfg = cfg\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def init_honeyfs(self, honeyfs_path):...\n", "\"\"\"docstring\"\"\"\n", "for path, directories, filenames in os.walk(honeyfs_path):\n", "for filename in filenames:\n", "def resolve_path(self, path, cwd):...\n", "realfile_path = os.path.join(path, filename)\n", "\"\"\"docstring\"\"\"\n", "virtual_path = '/' + os.path.relpath(realfile_path, honeyfs_path)\n", "pieces = path.rstrip('/').split('/')\n", "f = self.getfile(virtual_path, follow_symlinks=False)\n", "if path[0] == '/':\n", "if f and f[A_TYPE] == T_FILE:\n", "cwd = []\n", "cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n", "self.update_realfile(f, realfile_path)\n", "while 1:\n", "if not len(pieces):\n", "piece = pieces.pop(0)\n", "return '/%s' % ('/'.join(cwd),)\n", "if piece == '..':\n", "if len(cwd):\n", "if piece in ('.', ''):\n", "cwd.pop()\n", "cwd.append(piece)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "For", "For", "FunctionDef'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = 'koester@jimmy.harvard.edu'\n", "__license__ = 'MIT'\n", "import re\n", "import os\n", "import sys\n", "import signal\n", "import json\n", "import urllib\n", "from collections import OrderedDict\n", "from itertools import filterfalse, chain\n", "from functools import partial\n", "from operator import attrgetter\n", "from snakemake.logging import logger, format_resources, format_resource_names\n", "from snakemake.rules import Rule, Ruleorder\n", "from snakemake.exceptions import RuleException, CreateRuleException, UnknownRuleException, NoRulesException, print_exception, WorkflowError\n", "from snakemake.shell import shell\n", "from snakemake.dag import DAG\n", "from snakemake.scheduler import JobScheduler\n", "from snakemake.parser import parse\n", "import snakemake.io\n", "from snakemake.io import protected, temp, temporary, expand, dynamic, glob_wildcards, flag, not_iterable, touch\n", "from snakemake.persistence import Persistence\n", "from snakemake.utils import update_config\n", "def __init__(self, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=None, VAR_5=...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = VAR_6\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(VAR_1)\n", "self.snakefile = os.path.abspath(VAR_1)\n", "self.snakemakepath = VAR_2\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = VAR_3\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = VAR_4\n", "self.overwrite_config = VAR_5\n", "self.overwrite_configfile = VAR_7\n", "self.config_args = VAR_8\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = VAR_9\n", "VAR_85 = dict()\n", "VAR_85.update(self.overwrite_config)\n", "VAR_86 = CLASS_3()\n", "@property...\n", "return self._subworkflows.values()\n" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = 'koester@jimmy.harvard.edu'\n", "__license__ = 'MIT'\n", "import re\n", "import os\n", "import sys\n", "import signal\n", "import json\n", "import urllib\n", "from collections import OrderedDict\n", "from itertools import filterfalse, chain\n", "from functools import partial\n", "from operator import attrgetter\n", "from snakemake.logging import logger, format_resources, format_resource_names\n", "from snakemake.rules import Rule, Ruleorder\n", "from snakemake.exceptions import RuleException, CreateRuleException, UnknownRuleException, NoRulesException, print_exception, WorkflowError\n", "from snakemake.shell import shell\n", "from snakemake.dag import DAG\n", "from snakemake.scheduler import JobScheduler\n", "from snakemake.parser import parse\n", "import snakemake.io\n", "from snakemake.io import protected, temp, temporary, expand, dynamic, glob_wildcards, flag, not_iterable, touch\n", "from snakemake.persistence import Persistence\n", "from snakemake.utils import update_config\n", "def __init__(self, snakefile=None, snakemakepath=None, jobscript=None,...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = overwrite_workdir\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(snakefile)\n", "self.snakefile = os.path.abspath(snakefile)\n", "self.snakemakepath = snakemakepath\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = jobscript\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = overwrite_shellcmd\n", "self.overwrite_config = overwrite_config\n", "self.overwrite_configfile = overwrite_configfile\n", "self.config_args = config_args\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = debug\n", "config = dict()\n", "config.update(self.overwrite_config)\n", "rules = Rules()\n", "@property...\n", "return self._subworkflows.values()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Assign'", "Assign'", "Assign'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_0(VAR_2, VAR_3):...\n", "VAR_11 = VAR_2 - sum(d['points'] for d in VAR_3)\n", "if VAR_11 < 0:\n", "VAR_11 = 0\n", "print(json.dumps({'score': VAR_11, 'deductions': VAR_3}))\n" ]
[ "def output_json(points, deductions):...\n", "score = points - sum(d['points'] for d in deductions)\n", "if score < 0:\n", "score = 0\n", "print(json.dumps({'score': score, 'deductions': deductions}))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_5(self):...\n", "" ]
[ "def run(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "super().get_common_objects()\n", "self.page = {'is_wait': 'wait' in self.request.GET}\n", "self.note('page')\n", "self.get_summary_submissions()\n" ]
[ "def get_common_objects(self):...\n", "super().get_common_objects()\n", "self.page = {'is_wait': 'wait' in self.request.GET}\n", "self.note('page')\n", "self.get_summary_submissions()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_31(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('popen_wrappers.py', VAR_2)\n" ]
[ "def test_popen_wrappers(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('popen_wrappers.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "@VAR_0.route('/chpass')...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_3('change-password-action', VAR_2=True)\n" ]
[ "@authn_views.route('/chpass')...\n", "\"\"\"docstring\"\"\"\n", "return _authn('change-password-action', force_authn=True)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@staticmethod...\n", "return [col[0] for col in VAR_5.description]\n" ]
[ "@staticmethod...\n", "return [col[0] for col in cursor.description]\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@wraps(VAR_6)...\n", "if 'logged_in' in VAR_10:\n", "return VAR_6(*VAR_8, **kwargs)\n", "flash('Unauthorized, Please login', 'danger')\n", "return redirect(url_for('login'))\n" ]
[ "@wraps(f)...\n", "if 'logged_in' in session:\n", "return f(*args, **kwargs)\n", "flash('Unauthorized, Please login', 'danger')\n", "return redirect(url_for('login'))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_6(self, VAR_11='', VAR_15=None, VAR_12=None):...\n", "return self.url_post(VAR_11, VAR_15, VAR_12=headers)\n" ]
[ "def create(self, resource='', body=None, headers=None):...\n", "return self.url_post(resource, body, headers=headers)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@classmethod...\n", "super(CLASS_2, VAR_0).setUpTestData()\n", "VAR_0.many_comments_url = reverse('ajax-comment_case_runs')\n" ]
[ "@classmethod...\n", "super(TestCommentCaseRuns, cls).setUpTestData()\n", "cls.many_comments_url = reverse('ajax-comment_case_runs')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'" ]
[ "import pymysql\n", "from config import create_connection\n", "VAR_0 = 'products'\n", "VAR_1 = 'product_types'\n", "VAR_2 = 'reviews'\n", "VAR_3 = 'users'\n", "def FUNC_0(VAR_4):...\n", "VAR_10 = (VAR_4['page'] - 1) * VAR_4['perPage']\n", "VAR_11 = f\"\"\"string{VAR_0} AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {VAR_4['rating']}\n ORDER BY p.id LIMIT {VAR_10}, {VAR_4['perPage']}\n \"\"\"\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_1(VAR_4, VAR_5):...\n", "VAR_13 = VAR_12.cursor()\n", "VAR_10 = (VAR_4['page'] - 1) * VAR_4['perPage']\n", "VAR_13.execute(VAR_11)\n", "VAR_11 = f\"\"\"string{VAR_0} AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {VAR_4['rating']}\n AND pt.name = '{VAR_5}'\n ORDER BY p.id LIMIT {VAR_10}, {VAR_4['perPage']}\n \"\"\"\n", "return VAR_13.fetchall()\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_2(VAR_4, VAR_6):...\n", "VAR_13 = VAR_12.cursor()\n", "VAR_10 = (VAR_4['page'] - 1) * VAR_4['perPage']\n", "VAR_13.execute(VAR_11)\n", "VAR_11 = f\"\"\"string{VAR_0} AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {VAR_4['rating']}\n AND p.name LIKE '%{VAR_6}%'\n ORDER BY p.id LIMIT {VAR_10}, {VAR_4['perPage']}\n \"\"\"\n", "return VAR_13.fetchall()\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_3(VAR_4):...\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"string{VAR_4['rating']}\\n \"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_4(VAR_4, VAR_5):...\n", "return VAR_13.fetchall()\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"\"\"\n SELECT COUNT(p.id) AS total\n FROM products AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {VAR_4['rating']}\n AND pt.name = '{VAR_5}'\n \"\"\"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_5(VAR_7):...\n", "return int(VAR_13.fetchone()['total'])\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"\"\"string{VAR_0} AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id WHERE p.id={VAR_7}\n \"\"\"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_6(VAR_7, VAR_4):...\n", "return int(VAR_13.fetchone()['total'])\n", "VAR_13 = VAR_12.cursor()\n", "VAR_10 = (VAR_4['page'] - 1) * VAR_4['perPage']\n", "VAR_13.execute(VAR_11)\n", "VAR_11 = f\"\"\"\n SELECT u.username, r.id, r.title, r.comment, r.rating, r.timestamp\n FROM {VAR_0} p, {VAR_2} r, {VAR_3} u\n WHERE p.id={VAR_7} && r.user_id=u.id && r.product_id=p.id\n ORDER BY r.id DESC \n LIMIT {VAR_10}, {VAR_4['perPage']}\n \"\"\"\n", "return VAR_13.fetchone()\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_7(VAR_4, VAR_6):...\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"\"\"\n SELECT COUNT(p.id) AS total\n FROM {VAR_0} AS p INNER JOIN {VAR_1} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {VAR_4['rating']}\n AND p.name LIKE '%{VAR_6}%'\n \"\"\"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_8(VAR_7):...\n", "return VAR_13.fetchall()\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"\"\"\n SELECT COUNT(r.id) AS total\n FROM {VAR_2} AS r\n WHERE r.product_id={VAR_7}\n \"\"\"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "def FUNC_9(VAR_7, VAR_8, VAR_9):...\n", "return int(VAR_13.fetchone()['total'])\n", "VAR_13 = VAR_12.cursor()\n", "VAR_11 = f\"\"\"\n INSERT INTO {VAR_2} (user_id, product_id, title, comment, rating)\n VALUES({VAR_8}, {VAR_7}, '{VAR_9['title']}',\n '{VAR_9['comment']}', {VAR_9['rating']})\n \"\"\"\n", "VAR_13.execute(VAR_11)\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "return int(VAR_13.fetchone()['total'])\n", "VAR_13 = VAR_12.cursor()\n", "VAR_13.execute(VAR_11)\n", "VAR_12.commit()\n" ]
[ "import pymysql\n", "from config import create_connection\n", "PRODUCTS_TABLE = 'products'\n", "PRODUCTS_TYPES_TABLE = 'product_types'\n", "REVIEWS_TABLE = 'reviews'\n", "USERS_TABLE = 'users'\n", "def get_products(filters):...\n", "offset = (filters['page'] - 1) * filters['perPage']\n", "sql_query = f\"\"\"\n SELECT p.id, p.ean, p.name, p.description, pt.name AS type, p.company, p.price, p.rating, p.weight, p.quantity, p.image_url\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {filters['rating']}\n ORDER BY p.id LIMIT {offset}, {filters['perPage']}\n \"\"\"\n", "connection = create_connection()\n", "connection.close()\n", "def get_department_products(filters, department):...\n", "cursor = connection.cursor()\n", "offset = (filters['page'] - 1) * filters['perPage']\n", "cursor.execute(sql_query)\n", "sql_query = f\"\"\"\n SELECT p.id, p.ean, p.name, p.description, pt.name AS type, p.company, p.price, p.rating, p.weight, p.quantity, p.image_url\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {filters['rating']}\n AND pt.name = '{department}'\n ORDER BY p.id LIMIT {offset}, {filters['perPage']}\n \"\"\"\n", "return cursor.fetchall()\n", "connection = create_connection()\n", "connection.close()\n", "def search_products(filters, search):...\n", "cursor = connection.cursor()\n", "offset = (filters['page'] - 1) * filters['perPage']\n", "cursor.execute(sql_query)\n", "sql_query = f\"\"\"\n SELECT p.id, p.ean, p.name, p.description, pt.name AS type, p.company, p.price, p.rating, p.weight, p.quantity, p.image_url\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {filters['rating']}\n AND p.name LIKE '%{search}%'\n ORDER BY p.id LIMIT {offset}, {filters['perPage']}\n \"\"\"\n", "return cursor.fetchall()\n", "connection = create_connection()\n", "connection.close()\n", "def get_total_products(filters):...\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT COUNT(p.id) AS total\n FROM products AS p\n WHERE p.rating >= {filters['rating']}\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_total_departments_products(filters, department):...\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT COUNT(p.id) AS total\n FROM products AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {filters['rating']}\n AND pt.name = '{department}'\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_product(productId):...\n", "return int(cursor.fetchone()['total'])\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT p.ean, p.name, p.description, pt.name AS type, p.company, p.price, p.rating, p.weight, p.quantity, p.image_url\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id WHERE p.id={productId}\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_product_reviews(productId, filters):...\n", "return int(cursor.fetchone()['total'])\n", "cursor = connection.cursor()\n", "offset = (filters['page'] - 1) * filters['perPage']\n", "cursor.execute(sql_query)\n", "sql_query = f\"\"\"\n SELECT u.username, r.id, r.title, r.comment, r.rating, r.timestamp\n FROM {PRODUCTS_TABLE} p, {REVIEWS_TABLE} r, {USERS_TABLE} u\n WHERE p.id={productId} && r.user_id=u.id && r.product_id=p.id\n ORDER BY r.id DESC \n LIMIT {offset}, {filters['perPage']}\n \"\"\"\n", "return cursor.fetchone()\n", "connection = create_connection()\n", "connection.close()\n", "def get_total_searched_products(filters, search):...\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT COUNT(p.id) AS total\n FROM {PRODUCTS_TABLE} AS p INNER JOIN {PRODUCTS_TYPES_TABLE} AS pt ON\n p.product_type_id=pt.id\n WHERE p.rating >= {filters['rating']}\n AND p.name LIKE '%{search}%'\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def get_total_product_reviews(productId):...\n", "return cursor.fetchall()\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n SELECT COUNT(r.id) AS total\n FROM {REVIEWS_TABLE} AS r\n WHERE r.product_id={productId}\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "def add_product_review(productId, userId, review):...\n", "return int(cursor.fetchone()['total'])\n", "cursor = connection.cursor()\n", "sql_query = f\"\"\"\n INSERT INTO {REVIEWS_TABLE} (user_id, product_id, title, comment, rating)\n VALUES({userId}, {productId}, '{review['title']}',\n '{review['comment']}', {review['rating']})\n \"\"\"\n", "cursor.execute(sql_query)\n", "connection = create_connection()\n", "connection.close()\n", "return int(cursor.fetchone()['total'])\n", "cursor = connection.cursor()\n", "cursor.execute(sql_query)\n", "connection.commit()\n" ]
[ 0, 0, 4, 4, 4, 4, 0, 0, 4, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Return'", "Assign'", "Expr'", "Expr'" ]
[ "@auth.require(acl.is_bot)...\n", "\"\"\"docstring\"\"\"\n", "VAR_28, VAR_14, VAR_9, VAR_25, VAR_24, VAR_26 = self._process()\n", "VAR_15 = VAR_25.get('sleep_streak', 0)\n", "VAR_16 = bool(VAR_26)\n", "VAR_30 = 'bot_inactive' if VAR_16 else 'bot_active'\n", "stats.add_entry(VAR_30=action, VAR_14=bot_id, VAR_24=dimensions)\n", "def FUNC_14(VAR_31, VAR_17=None, VAR_32=None):...\n", "bot_management.bot_event(VAR_31=event_type, VAR_14=bot_id, external_ip=self\n .request.remote_addr, VAR_24=dimensions, VAR_25=state, VAR_9=version,\n VAR_16=quarantined, VAR_17=task_id, VAR_32=task_name, VAR_8=quarantined_msg\n )\n", "VAR_18 = bot_code.get_bot_version(self.request.host_url)\n", "if VAR_9 != VAR_18:\n", "FUNC_14('request_update')\n", "if VAR_16:\n", "self._cmd_update(VAR_18)\n", "FUNC_14('request_sleep')\n", "VAR_33, VAR_34 = bot_management.should_restart_bot(VAR_14, VAR_25)\n", "return\n", "self._cmd_sleep(VAR_15, VAR_16)\n", "if VAR_33:\n", "return\n", "FUNC_14('request_restart')\n", "VAR_4, VAR_54 = task_scheduler.bot_reap_task(VAR_24, VAR_14, VAR_9, VAR_25.\n get('lease_expiration_ts'))\n", "self.abort(500, 'Deadline')\n", "self._cmd_restart(VAR_34)\n", "if not VAR_4:\n", "return\n", "FUNC_14('request_sleep')\n", "if VAR_4.properties.is_terminate:\n", "logging.exception('Dang, exception after reaping')\n", "self._cmd_sleep(VAR_15, VAR_16)\n", "FUNC_14('bot_terminate', VAR_17=run_result.task_id)\n", "FUNC_14('request_task', VAR_17=run_result.task_id, VAR_32=request.name)\n", "return\n", "self._cmd_terminate(VAR_54.task_id)\n", "self._cmd_run(VAR_4, VAR_54.key, VAR_14)\n" ]
[ "@auth.require(acl.is_bot)...\n", "\"\"\"docstring\"\"\"\n", "_request, bot_id, version, state, dimensions, quarantined_msg = self._process()\n", "sleep_streak = state.get('sleep_streak', 0)\n", "quarantined = bool(quarantined_msg)\n", "action = 'bot_inactive' if quarantined else 'bot_active'\n", "stats.add_entry(action=action, bot_id=bot_id, dimensions=dimensions)\n", "def bot_event(event_type, task_id=None, task_name=None):...\n", "bot_management.bot_event(event_type=event_type, bot_id=bot_id, external_ip=\n self.request.remote_addr, dimensions=dimensions, state=state, version=\n version, quarantined=quarantined, task_id=task_id, task_name=task_name,\n message=quarantined_msg)\n", "expected_version = bot_code.get_bot_version(self.request.host_url)\n", "if version != expected_version:\n", "bot_event('request_update')\n", "if quarantined:\n", "self._cmd_update(expected_version)\n", "bot_event('request_sleep')\n", "needs_restart, restart_message = bot_management.should_restart_bot(bot_id,\n state)\n", "return\n", "self._cmd_sleep(sleep_streak, quarantined)\n", "if needs_restart:\n", "return\n", "bot_event('request_restart')\n", "request, run_result = task_scheduler.bot_reap_task(dimensions, bot_id,\n version, state.get('lease_expiration_ts'))\n", "self.abort(500, 'Deadline')\n", "self._cmd_restart(restart_message)\n", "if not request:\n", "return\n", "bot_event('request_sleep')\n", "if request.properties.is_terminate:\n", "logging.exception('Dang, exception after reaping')\n", "self._cmd_sleep(sleep_streak, quarantined)\n", "bot_event('bot_terminate', task_id=run_result.task_id)\n", "bot_event('request_task', task_id=run_result.task_id, task_name=request.name)\n", "return\n", "self._cmd_terminate(run_result.task_id)\n", "self._cmd_run(request, run_result.key, bot_id)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Return'", "Expr'", "Condition", "Return'", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Return'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_1 = '/api/apps'\n", "VAR_2 = self.client.post(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_3 = VAR_2.data['id']\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 1)\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 0}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 204)\n", "chaos.START_ERROR_RATE = 0.5\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 20}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 204)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 20)\n", "VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n", "self.assertEqual(VAR_5, set(['crashed', 'up']))\n" ]
[ "def test_start_chaos(self):...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 0}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "chaos.START_ERROR_RATE = 0.5\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 20}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 20)\n", "states = set([c['state'] for c in response.data['results']])\n", "self.assertEqual(states, set(['crashed', 'up']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "self.mock(logging, 'error', lambda *VAR_15, **_kw: None)\n", "self.mock(bot_main, 'get_remote', lambda : self.server)\n", "self.mock(os_utilities, 'get_state', lambda : {'foo': 'bar'})\n", "VAR_13 = bot_main.get_attributes(None)\n", "self.expected_requests([(\n 'https://localhost:1/auth/api/v1/accounts/self/xsrf_token', {'data':\n VAR_13, 'headers': {'X-XSRF-Token-Request': '1'}}, {'xsrf_token':\n 'token'}), ('https://localhost:1/swarming/api/v1/bot/task_error/23', {\n 'data': {'id': VAR_13['dimensions']['id'][0], 'message': 'error',\n 'task_id': 23}, 'headers': {'X-XSRF-Token': 'token'}}, {})])\n", "VAR_14 = bot_main.get_bot()\n", "bot_main.post_error_task(VAR_14, 'error', 23)\n" ]
[ "def test_post_error_task(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "self.mock(logging, 'error', lambda *_, **_kw: None)\n", "self.mock(bot_main, 'get_remote', lambda : self.server)\n", "self.mock(os_utilities, 'get_state', lambda : {'foo': 'bar'})\n", "expected_attribs = bot_main.get_attributes(None)\n", "self.expected_requests([(\n 'https://localhost:1/auth/api/v1/accounts/self/xsrf_token', {'data':\n expected_attribs, 'headers': {'X-XSRF-Token-Request': '1'}}, {\n 'xsrf_token': 'token'}), (\n 'https://localhost:1/swarming/api/v1/bot/task_error/23', {'data': {'id':\n expected_attribs['dimensions']['id'][0], 'message': 'error', 'task_id':\n 23}, 'headers': {'X-XSRF-Token': 'token'}}, {})])\n", "botobj = bot_main.get_bot()\n", "bot_main.post_error_task(botobj, 'error', 23)\n" ]
[ 0, 0, 0, 0, 5, 0, 5, 0, 5 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_11(**VAR_19):...\n", "VAR_39 = {'name': VAR_19.get('name'), 'description': VAR_19.get('description')}\n", "VAR_40 = FUNC_8('contests/add', VAR_16=add_args)\n", "VAR_41 = VAR_40.text\n", "VAR_42 = re.search('string', VAR_41)\n", "if VAR_42 is not None:\n", "VAR_18 = int(VAR_42.groups()[0])\n", "FUNC_8('contest/%s' % VAR_18, VAR_16=kwargs)\n", "return VAR_18\n" ]
[ "def add_contest(**kwargs):...\n", "add_args = {'name': kwargs.get('name'), 'description': kwargs.get(\n 'description')}\n", "resp = admin_req('contests/add', args=add_args)\n", "page = resp.text\n", "match = re.search(\n '<form enctype=\"multipart/form-data\" action=\"../contest/([0-9]+)\" method=\"POST\" name=\"edit_contest\" style=\"display:inline;\">'\n , page)\n", "if match is not None:\n", "contest_id = int(match.groups()[0])\n", "admin_req('contest/%s' % contest_id, args=kwargs)\n", "return contest_id\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_1(self, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = 0\n", "while VAR_11 < len(self.observers):\n", "VAR_12 = self.observers[VAR_11][0]\n", "if VAR_12 == VAR_1:\n", "VAR_11 += 1\n" ]
[ "def remove_observer(self, func):...\n", "\"\"\"docstring\"\"\"\n", "i = 0\n", "while i < len(self.observers):\n", "ofunc = self.observers[i][0]\n", "if ofunc == func:\n", "i += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "AugAssign'" ]
[ "def FUNC_1(self, VAR_4, VAR_10, VAR_11, VAR_12, VAR_13=None, VAR_14=None):...\n", "VAR_27, VAR_28, VAR_19, VAR_29 = (self.wz_sock, self.poll, self.\n sleep_ticker, self.wz)\n", "VAR_14 = VAR_14 if VAR_14 else self.wz_poll_timeout\n", "VAR_30 = wzrpc.RequestState(VAR_4)\n", "VAR_23 = self.wz.make_req_msg(VAR_10, VAR_11, VAR_12, VAR_30.accept, VAR_13)\n", "VAR_23.insert(0, b'')\n", "VAR_27.send_multipart(VAR_23)\n", "VAR_19.tick()\n", "while self.running.is_set():\n", "VAR_28(VAR_14 * 1000)\n", "if VAR_30.finished:\n", "if VAR_30.retry:\n", "VAR_34 = VAR_19.elapsed(False)\n", "VAR_23 = self.wz.make_req_msg(VAR_10, VAR_11, VAR_12, VAR_30.accept, VAR_13)\n", "return\n", "if VAR_34 >= VAR_14:\n", "VAR_23.insert(0, b'')\n", "VAR_19.tick()\n", "VAR_27.send_multipart(VAR_23)\n", "VAR_30.accept(None, 0, 255, [VAR_34])\n", "VAR_30.finished = False\n", "VAR_30.retry = False\n" ]
[ "def wz_wait_reply(self, fun, interface, method, data, reqid=None, timeout=None...\n", "s, p, t, wz = self.wz_sock, self.poll, self.sleep_ticker, self.wz\n", "timeout = timeout if timeout else self.wz_poll_timeout\n", "rs = wzrpc.RequestState(fun)\n", "msg = self.wz.make_req_msg(interface, method, data, rs.accept, reqid)\n", "msg.insert(0, b'')\n", "s.send_multipart(msg)\n", "t.tick()\n", "while self.running.is_set():\n", "p(timeout * 1000)\n", "if rs.finished:\n", "if rs.retry:\n", "elapsed = t.elapsed(False)\n", "msg = self.wz.make_req_msg(interface, method, data, rs.accept, reqid)\n", "return\n", "if elapsed >= timeout:\n", "msg.insert(0, b'')\n", "t.tick()\n", "s.send_multipart(msg)\n", "rs.accept(None, 0, 255, [elapsed])\n", "rs.finished = False\n", "rs.retry = False\n" ]
[ 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_1(self, VAR_4, VAR_5, VAR_7=True):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5['output'] = 'XML'\n", "VAR_16, VAR_17 = self._cliq_run(VAR_4, VAR_5, VAR_7)\n", "VAR_0.debug(_('CLIQ command returned %s'), VAR_16)\n", "VAR_18 = etree.fromstring(VAR_16)\n", "if VAR_7:\n", "VAR_41 = VAR_18.find('response')\n", "return VAR_18\n", "if VAR_41 is None:\n", "VAR_22 = _(\n 'Malformed response to CLIQ command %(verb)s %(cliq_args)s. Result=%(out)s'\n ) % {'verb': VAR_4, 'cliq_args': VAR_5, 'out': VAR_16}\n", "VAR_42 = VAR_41.attrib.get('result')\n", "if VAR_42 != '0':\n", "VAR_22 = _('Error running CLIQ command %(verb)s %(cliq_args)s. Result=%(out)s'\n ) % {'verb': VAR_4, 'cliq_args': VAR_5, 'out': VAR_16}\n" ]
[ "def _cliq_run_xml(self, verb, cliq_args, check_cliq_result=True):...\n", "\"\"\"docstring\"\"\"\n", "cliq_args['output'] = 'XML'\n", "out, _err = self._cliq_run(verb, cliq_args, check_cliq_result)\n", "LOG.debug(_('CLIQ command returned %s'), out)\n", "result_xml = etree.fromstring(out)\n", "if check_cliq_result:\n", "response_node = result_xml.find('response')\n", "return result_xml\n", "if response_node is None:\n", "msg = _(\n 'Malformed response to CLIQ command %(verb)s %(cliq_args)s. Result=%(out)s'\n ) % {'verb': verb, 'cliq_args': cliq_args, 'out': out}\n", "result_code = response_node.attrib.get('result')\n", "if result_code != '0':\n", "msg = _('Error running CLIQ command %(verb)s %(cliq_args)s. Result=%(out)s'\n ) % {'verb': verb, 'cliq_args': cliq_args, 'out': out}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "\"\"\"A helper script for wrapping url calls.\"\"\"\n", "import hashlib\n", "import httplib\n", "import logging\n", "import math\n", "import os\n", "import random\n", "import socket\n", "import time\n", "import urllib\n", "import urllib2\n", "import urlparse\n", "from common import swarm_constants\n", "VAR_0 = 4\n", "VAR_1 = 5 * 60\n", "def FUNC_0(VAR_2, VAR_3=None, VAR_4=None, VAR_5=5, VAR_6=None, VAR_7='POST'):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_5 <= 0:\n", "logging.error('UrlOpen(%s): Invalid number of tries: %d', VAR_2, VAR_5)\n", "if VAR_6 and VAR_6 < 0:\n", "return None\n", "logging.error('UrlOpen(%s): Invalid wait duration: %d', VAR_2, VAR_6)\n", "VAR_3 = VAR_3 or {}\n", "return None\n", "if VAR_17.COUNT_KEY in VAR_3:\n", "logging.error(\"UrlOpen(%s): key '%s' is duplicate.\", VAR_2, VAR_17.COUNT_KEY)\n", "VAR_11 = None\n", "return None\n", "for attempt in range(VAR_5):\n", "VAR_3[VAR_17.COUNT_KEY] = attempt\n", "logging.error('UrlOpen(%s): Unable to open after %d attempts', VAR_2, VAR_5)\n", "for VAR_18, VAR_9 in VAR_3.iteritems():\n", "if e.code >= 500:\n", "if VAR_11 is not None:\n", "return None\n", "if isinstance(VAR_9, basestring):\n", "VAR_20 = urllib.urlencode(VAR_3)\n", "logging.warning('UrlOpen(%s): attempt %d: %s ', VAR_2, attempt, e)\n", "logging.exception('UrlOpen(%s): %s', VAR_2, e)\n", "logging.info('UrlOpen(%s) got %d bytes.', VAR_2, len(VAR_11))\n", "if attempt != VAR_5 - 1:\n", "VAR_3[VAR_18] = VAR_9.encode('utf-8', 'xmlcharrefreplace')\n", "if VAR_7 == 'POSTFORM':\n", "logging.warning('UrlOpen(%s): attempt %d: %s', VAR_2, attempt, e)\n", "return None\n", "return VAR_11\n", "if VAR_6 is None:\n", "VAR_16, VAR_15 = FUNC_3(VAR_10=data.iteritems(), VAR_4=files)\n", "if VAR_7 == 'POST':\n", "VAR_23 = random.random() * 3 + math.pow(1.5, attempt + 1)\n", "VAR_23 = VAR_6\n", "VAR_15 = VAR_15 or ''\n", "VAR_11 = urllib2.urlopen(VAR_2, VAR_20, timeout=URL_OPEN_TIMEOUT).read()\n", "VAR_22 = list(urlparse.urlparse(VAR_2))\n", "VAR_23 = min(10, max(0.1, VAR_23))\n", "time.sleep(VAR_23)\n", "VAR_21 = urllib2.Request(VAR_2, VAR_3=body)\n", "VAR_22[VAR_0] = VAR_20\n", "VAR_21.add_header('Content-Type', VAR_16)\n", "VAR_2 = urlparse.urlunparse(VAR_22)\n", "VAR_21.add_header('Content-Length', len(VAR_15))\n", "VAR_11 = urllib2.urlopen(VAR_2, timeout=URL_OPEN_TIMEOUT).read()\n", "VAR_11 = urllib2.urlopen(VAR_21, timeout=URL_OPEN_TIMEOUT).read()\n" ]
[ "\"\"\"A helper script for wrapping url calls.\"\"\"\n", "import hashlib\n", "import httplib\n", "import logging\n", "import math\n", "import os\n", "import random\n", "import socket\n", "import time\n", "import urllib\n", "import urllib2\n", "import urlparse\n", "from common import swarm_constants\n", "QUERY_INDEX = 4\n", "URL_OPEN_TIMEOUT = 5 * 60\n", "def UrlOpen(url, data=None, files=None, max_tries=5, wait_duration=None,...\n", "\"\"\"docstring\"\"\"\n", "if max_tries <= 0:\n", "logging.error('UrlOpen(%s): Invalid number of tries: %d', url, max_tries)\n", "if wait_duration and wait_duration < 0:\n", "return None\n", "logging.error('UrlOpen(%s): Invalid wait duration: %d', url, wait_duration)\n", "data = data or {}\n", "return None\n", "if swarm_constants.COUNT_KEY in data:\n", "logging.error(\"UrlOpen(%s): key '%s' is duplicate.\", url, swarm_constants.\n COUNT_KEY)\n", "url_response = None\n", "return None\n", "for attempt in range(max_tries):\n", "data[swarm_constants.COUNT_KEY] = attempt\n", "logging.error('UrlOpen(%s): Unable to open after %d attempts', url, max_tries)\n", "for key, value in data.iteritems():\n", "if e.code >= 500:\n", "if url_response is not None:\n", "return None\n", "if isinstance(value, basestring):\n", "encoded_data = urllib.urlencode(data)\n", "logging.warning('UrlOpen(%s): attempt %d: %s ', url, attempt, e)\n", "logging.exception('UrlOpen(%s): %s', url, e)\n", "logging.info('UrlOpen(%s) got %d bytes.', url, len(url_response))\n", "if attempt != max_tries - 1:\n", "data[key] = value.encode('utf-8', 'xmlcharrefreplace')\n", "if method == 'POSTFORM':\n", "logging.warning('UrlOpen(%s): attempt %d: %s', url, attempt, e)\n", "return None\n", "return url_response\n", "if wait_duration is None:\n", "content_type, body = EncodeMultipartFormData(fields=data.iteritems(), files\n =files)\n", "if method == 'POST':\n", "duration = random.random() * 3 + math.pow(1.5, attempt + 1)\n", "duration = wait_duration\n", "body = body or ''\n", "url_response = urllib2.urlopen(url, encoded_data, timeout=URL_OPEN_TIMEOUT\n ).read()\n", "url_parts = list(urlparse.urlparse(url))\n", "duration = min(10, max(0.1, duration))\n", "time.sleep(duration)\n", "request = urllib2.Request(url, data=body)\n", "url_parts[QUERY_INDEX] = encoded_data\n", "request.add_header('Content-Type', content_type)\n", "url = urlparse.urlunparse(url_parts)\n", "request.add_header('Content-Length', len(body))\n", "url_response = urllib2.urlopen(url, timeout=URL_OPEN_TIMEOUT).read()\n", "url_response = urllib2.urlopen(request, timeout=URL_OPEN_TIMEOUT).read()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Assign'", "Return'", "Condition", "Expr'", "Assign'", "Return'", "For", "Assign'", "Expr'", "For", "Condition", "Condition", "Return'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Condition", "Expr'", "Return'", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_19(self):...\n", "print('INFO: Reconnecting...')\n", "return DeadConnectionDetector.reconnect(self)\n" ]
[ "def reconnect(self):...\n", "print('INFO: Reconnecting...')\n", "return DeadConnectionDetector.reconnect(self)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "\"\"\"Handlers for customizing oauthclient endpoints.\"\"\"\n", "import six\n", "from flask import current_app, flash, redirect, render_template, request, session, url_for\n", "from flask.ext.login import current_user\n", "from functools import partial, wraps\n", "from werkzeug.utils import import_string\n", "from invenio.base.globals import cfg\n", "from .client import oauth, signup_handlers\n", "from .errors import OAuthClientError, OAuthError, OAuthRejectedRequestError, OAuthResponseError\n", "from .forms import EmailSignUpForm\n", "from .models import RemoteAccount, RemoteToken\n", "from .utils import oauth_authenticate, oauth_get_user, oauth_register\n", "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], VAR_0)\n" ]
[ "\"\"\"Handlers for customizing oauthclient endpoints.\"\"\"\n", "import six\n", "from flask import current_app, flash, redirect, render_template, request, session, url_for\n", "from flask.ext.login import current_user\n", "from functools import partial, wraps\n", "from werkzeug.utils import import_string\n", "from invenio.base.globals import cfg\n", "from .client import oauth, signup_handlers\n", "from .errors import OAuthClientError, OAuthError, OAuthRejectedRequestError, OAuthResponseError\n", "from .forms import EmailSignUpForm\n", "from .models import RemoteAccount, RemoteToken\n", "from .utils import oauth_authenticate, oauth_get_user, oauth_register\n", "def token_session_key(remote_app):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], remote_app)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_8(self):...\n", "self.assertEquals('123456 test course: Fall 2011 day 1 -> test_hook_url',\n str(self.course_hook))\n" ]
[ "def test_course_hook_unicode_string(self):...\n", "self.assertEquals('123456 test course: Fall 2011 day 1 -> test_hook_url',\n str(self.course_hook))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@property...\n", "return self._subworkflows.values()\n" ]
[ "@property...\n", "return self._subworkflows.values()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_2(self):...\n", "VAR_30 = [b'GLOBAL']\n", "VAR_30.extend(wzrpc.make_sig_msg(b'WZWorker', b'terminate', []))\n", "if hasattr(self, 'th_sock'):\n", "self.th_sock.send_multipart(VAR_30)\n", "if hasattr(self, 'pr_sock'):\n", "self.pr_sock.send_multipart(VAR_30)\n" ]
[ "def terminate(self):...\n", "msg = [b'GLOBAL']\n", "msg.extend(wzrpc.make_sig_msg(b'WZWorker', b'terminate', []))\n", "if hasattr(self, 'th_sock'):\n", "self.th_sock.send_multipart(msg)\n", "if hasattr(self, 'pr_sock'):\n", "self.pr_sock.send_multipart(msg)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'" ]