lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'VENIZIA', VAR_7='1')\n",
"FUNC_3(VAR_2, 'VENEZIA', VAR_7='2')\n",
"FUNC_3(VAR_2, 'VANSEA', VAR_7='3')\n",
"FUNC_3(VAR_2, 'WENSO', VAR_7='4')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='VENIZIA', VAR_10=[{'name': '----VENIZIA'}, {\n 'name': 'VENEZIA'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'VENIZIA', id='1')\n",
"seed_database_with(solr, 'VENEZIA', id='2')\n",
"seed_database_with(solr, 'VANSEA', id='3')\n",
"seed_database_with(solr, 'WENSO', id='4')\n",
"verify_results(client, jwt, query='VENIZIA', expected=[{'name':\n '----VENIZIA'}, {'name': 'VENEZIA'}])\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_64):...\n",
"self._onerror = VAR_64\n"
] | [
"def onerror(self, func):...\n",
"self._onerror = func\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_9(self, VAR_6):...\n",
"VAR_22 = \"DELETE FROM article_reference WHERE query_hash = '%s'\" % VAR_6\n",
"self.cur.execute(VAR_22)\n",
"self.conn.commit()\n"
] | [
"def del_reference_by_qhash(self, qhash):...\n",
"sql = \"DELETE FROM article_reference WHERE query_hash = '%s'\" % qhash\n",
"self.cur.execute(sql)\n",
"self.conn.commit()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@scenario('../features/pods_alive.feature', 'Exec in Pods')...\n",
""
] | [
"@scenario('../features/pods_alive.feature', 'Exec in Pods')...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_1(self):...\n",
"VAR_7 = request.form['Name']\n",
"VAR_8 = request.form['Password']\n",
"VAR_3 = VAR_2.execute(\"SELECT PASSWORD FROM USERS WHERE NAME = '\" + VAR_7 + \"'\"\n )\n",
"VAR_9 = ''\n",
"for row in VAR_3:\n",
"VAR_9 = row[0]\n",
"if VAR_9 is '':\n",
"print(VAR_9)\n",
"return 'user does not exist', 403\n",
"if VAR_9 != VAR_8:\n",
"return 'Wrong password', 403\n",
"VAR_10 = jwt.encode({'name': '' + VAR_7 + ''}, 'scalable', algorithm='HS256')\n",
"VAR_10 = VAR_10.decode('UTF-8')\n",
"return {'token': '' + VAR_10 + ''}\n"
] | [
"def post(self):...\n",
"name = request.form['Name']\n",
"password = request.form['Password']\n",
"query = conn.execute(\"SELECT PASSWORD FROM USERS WHERE NAME = '\" + name + \"'\")\n",
"realPassword = ''\n",
"for row in query:\n",
"realPassword = row[0]\n",
"if realPassword is '':\n",
"print(realPassword)\n",
"return 'user does not exist', 403\n",
"if realPassword != password:\n",
"return 'Wrong password', 403\n",
"encoded = jwt.encode({'name': '' + name + ''}, 'scalable', algorithm='HS256')\n",
"encoded = encoded.decode('UTF-8')\n",
"return {'token': '' + encoded + ''}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_43(self, VAR_41, VAR_43):...\n",
"self[VAR_41:VAR_41 + 1] = VAR_43\n",
"VAR_55 = len(VAR_43) - 1\n",
"for VAR_40, (i, j) in self._names.items():\n",
"if i > VAR_41:\n",
"self._names[VAR_40] = i + VAR_55, j + VAR_55\n",
"if i == VAR_41:\n",
"self.set_name(VAR_40, i, VAR_42=i + len(items))\n"
] | [
"def insert_items(self, index, items):...\n",
"self[index:index + 1] = items\n",
"add = len(items) - 1\n",
"for name, (i, j) in self._names.items():\n",
"if i > index:\n",
"self._names[name] = i + add, j + add\n",
"if i == index:\n",
"self.set_name(name, i, end=i + len(items))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n",
"pq.ops.All(pq.ops.Measure) | self.reg\n"
] | [
"def _deallocate(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n",
"pq.ops.All(pq.ops.Measure) | self.reg\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'"
] |
[
"def FUNC_13(VAR_7, VAR_1, VAR_12=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_7, six.text_type):\n",
"VAR_7 = import_string(VAR_7)\n",
"@wraps(VAR_7)...\n",
"if VAR_12:\n",
"return VAR_7(VAR_8[0], VAR_1, *VAR_8[1:], **kwargs)\n",
"return VAR_7(VAR_1, *VAR_8, **kwargs)\n"
] | [
"def make_handler(f, remote, with_response=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(f, six.text_type):\n",
"f = import_string(f)\n",
"@wraps(f)...\n",
"if with_response:\n",
"return f(args[0], remote, *args[1:], **kwargs)\n",
"return f(remote, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6):...\n",
"self._attributes = VAR_2\n",
"self._base_dir = VAR_5\n",
"self._remote = VAR_1\n",
"self._server = VAR_3\n",
"self._server_version = VAR_4\n",
"self._shutdown_hook = VAR_6\n",
"self._timers = []\n",
"self._timers_dying = False\n",
"self._timers_lock = threading.Lock()\n"
] | [
"def __init__(self, remote, attributes, server, server_version, base_dir,...\n",
"self._attributes = attributes\n",
"self._base_dir = base_dir\n",
"self._remote = remote\n",
"self._server = server\n",
"self._server_version = server_version\n",
"self._shutdown_hook = shutdown_hook\n",
"self._timers = []\n",
"self._timers_dying = False\n",
"self._timers_lock = threading.Lock()\n"
] | [
0,
0,
0,
5,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self, VAR_10):...\n",
"self.window.set_headerbar()\n",
"self.window.close_tab(self.parent_widget)\n"
] | [
"def on_headerbar_back_button_clicked(self, widget):...\n",
"self.window.set_headerbar()\n",
"self.window.close_tab(self.parent_widget)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"from flask import Blueprint, request\n",
"from modules.MessagesManager.api.functions import db_getMessage, db_sendMessage\n",
"VAR_0 = Blueprint('messages', __name__)\n",
"@VAR_0.route('/message/send')...\n",
"if request.method == 'PUT':\n",
"return db_sendMessage(request.get_json())\n",
"@VAR_0.route('/messages/<int:dialog_id>')...\n",
"if request.method == 'GET':\n",
"return db_getMessage(VAR_1)\n",
"@VAR_0.route('/messages')...\n",
"if request.method == 'GET':\n",
"return db_getMessage(VAR_1)\n"
] | [
"from flask import Blueprint, request\n",
"from modules.MessagesManager.api.functions import db_getMessage, db_sendMessage\n",
"messages_module = Blueprint('messages', __name__)\n",
"@messages_module.route('/message/send')...\n",
"if request.method == 'PUT':\n",
"return db_sendMessage(request.get_json())\n",
"@messages_module.route('/messages/<int:dialog_id>')...\n",
"if request.method == 'GET':\n",
"return db_getMessage(dialog_id)\n",
"@messages_module.route('/messages')...\n",
"if request.method == 'GET':\n",
"return db_getMessage(dialog_id)\n"
] | [
4,
0,
0,
4,
0,
4,
4,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_6(self, VAR_53):...\n",
"if VAR_53:\n",
"self.error()\n",
"VAR_23 = _force_utf8(VAR_53)\n",
"return self.error(errors.USER_DOESNT_EXIST)\n",
"return Account._by_name(VAR_23)\n"
] | [
"def run(self, username):...\n",
"if username:\n",
"self.error()\n",
"name = _force_utf8(username)\n",
"return self.error(errors.USER_DOESNT_EXIST)\n",
"return Account._by_name(name)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_snapshot(VAR_8)\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_snapshot(snapshot)\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_5, VAR_6, VAR_7, VAR_8, VAR_3, VAR_9):...\n",
"VAR_0, VAR_1 = GetCLInfo(VAR_8)\n",
"VAR_2 = build_util.CreateBuildId(VAR_5, VAR_6, VAR_7)\n",
"VAR_18 = FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3) and FUNC_1(VAR_5, VAR_6, VAR_7,\n VAR_0, VAR_1, VAR_3)\n",
"if VAR_18:\n",
"FUNC_2(VAR_5, VAR_6, VAR_7, VAR_8, VAR_3, VAR_9)\n",
"return VAR_18\n"
] | [
"def _UpdateSuspectedCLAndAnalysis(master_name, builder_name, build_number,...\n",
"repo_name, revision = GetCLInfo(cl_info)\n",
"build_key = build_util.CreateBuildId(master_name, builder_name, build_number)\n",
"success = _UpdateSuspectedCL(repo_name, revision, build_key, cl_status\n ) and _UpdateAnalysis(master_name, builder_name, build_number,\n repo_name, revision, cl_status)\n",
"if success:\n",
"_AppendTriageHistoryRecord(master_name, builder_name, build_number, cl_info,\n cl_status, user_name)\n",
"return success\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_2):...\n",
"ensure_server()\n",
"logging.disable('INFO')\n",
"PixelatedSite.disable_csp_requests()\n",
"VAR_0 = AppTestClient()\n",
"FUNC_0(VAR_0, UserAgentMode(is_single_user=True))\n",
"VAR_0.listenTCP()\n",
"VAR_6 = Proxy(proxy_port='8889', app_port='4567')\n",
"FeaturesResource.DISABLED_FEATURES.append('autoRefresh')\n",
"VAR_2.client = VAR_0\n",
"VAR_2.call_to_terminate_proxy = VAR_6.run_on_a_thread()\n",
"VAR_7 = AppTestClient()\n",
"FUNC_0(VAR_7, UserAgentMode(is_single_user=False))\n",
"VAR_7.listenTCP(port=MULTI_USER_PORT)\n",
"VAR_2.multi_user_client = VAR_7\n"
] | [
"def before_all(context):...\n",
"ensure_server()\n",
"logging.disable('INFO')\n",
"PixelatedSite.disable_csp_requests()\n",
"client = AppTestClient()\n",
"start_app_test_client(client, UserAgentMode(is_single_user=True))\n",
"client.listenTCP()\n",
"proxy = Proxy(proxy_port='8889', app_port='4567')\n",
"FeaturesResource.DISABLED_FEATURES.append('autoRefresh')\n",
"context.client = client\n",
"context.call_to_terminate_proxy = proxy.run_on_a_thread()\n",
"multi_user_client = AppTestClient()\n",
"start_app_test_client(multi_user_client, UserAgentMode(is_single_user=False))\n",
"multi_user_client.listenTCP(port=MULTI_USER_PORT)\n",
"context.multi_user_client = multi_user_client\n"
] | [
0,
0,
0,
0,
0,
0,
5,
5,
0,
0,
5,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def FUNC_5(VAR_8):...\n",
"VAR_32 = ['< 1 day', '1 day']\n",
"for VAR_24 in VAR_8[2:-1]:\n",
"VAR_32.append(\n f'{VAR_24.left}-{VAR_24.right - 1 if VAR_24.open_right else VAR_24.right} days'\n )\n",
"VAR_32 = VAR_32 + ['90+ days']\n",
"return VAR_32\n"
] | [
"def days_interval_to_text(interval_list):...\n",
"result = ['< 1 day', '1 day']\n",
"for i in interval_list[2:-1]:\n",
"result.append(f'{i.left}-{i.right - 1 if i.open_right else i.right} days')\n",
"result = result + ['90+ days']\n",
"return result\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"VAR_15 = self._server_popen.poll()\n",
"return VAR_15 is None\n"
] | [
"def _IsServerAlive(self):...\n",
"returncode = self._server_popen.poll()\n",
"return returncode is None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"if VAR_0 is None and VAR_1 is None:\n",
"return None, None\n",
"VAR_0 = VAR_0 if VAR_0 is not None else VAR_1\n",
"VAR_1 = VAR_1 if VAR_1 is not None else VAR_0\n",
"return min(VAR_0, VAR_1), max(VAR_0, VAR_1)\n"
] | [
"def _GetLowerAndUpperBoundCommitPositions(lower_bound, upper_bound):...\n",
"if lower_bound is None and upper_bound is None:\n",
"return None, None\n",
"lower_bound = lower_bound if lower_bound is not None else upper_bound\n",
"upper_bound = upper_bound if upper_bound is not None else lower_bound\n",
"return min(lower_bound, upper_bound), max(lower_bound, upper_bound)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(VAR_3, VAR_7, VAR_8=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {'currentProvider': None, 'providers': [], 'secondaryProviders': [],\n 'finishAuthUrl': None, 'errorMessage': None,\n 'registerFormSubmitButtonText': _('Create Account')}\n",
"if third_party_auth.is_enabled():\n",
"VAR_20 = enterprise_customer_for_request(VAR_3)\n",
"return VAR_5\n",
"if not VAR_20:\n",
"for enabled in third_party_auth.provider.Registry.displayed_for_login(VAR_8\n",
"VAR_36 = pipeline.get(VAR_3)\n",
"VAR_44 = {'id': enabled.provider_id, 'name': enabled.name, 'iconClass': \n enabled.icon_class or None, 'iconImage': enabled.icon_image.url if\n enabled.icon_image else None, 'loginUrl': pipeline.get_login_url(\n enabled.provider_id, pipeline.AUTH_ENTRY_LOGIN, redirect_url=\n redirect_to), 'registerUrl': pipeline.get_login_url(enabled.provider_id,\n pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to)}\n",
"if VAR_36 is not None:\n",
"VAR_5['providers' if not enabled.secondary else 'secondaryProviders'].append(\n VAR_44)\n",
"VAR_41 = third_party_auth.provider.Registry.get_from_pipeline(VAR_36)\n",
"for msg in messages.get_messages(VAR_3):\n",
"if VAR_41 is not None:\n",
"if msg.extra_tags.split()[0] == 'social-auth':\n",
"VAR_5['currentProvider'] = VAR_41.name\n",
"VAR_5['errorMessage'] = _(unicode(msg))\n",
"VAR_5['finishAuthUrl'] = pipeline.get_complete_url(VAR_41.backend_name)\n",
"if VAR_41.skip_registration_form:\n",
"if not VAR_20:\n",
"VAR_5['autoSubmitRegForm'] = True\n",
"VAR_5['autoRegisterWelcomeMessage'] = (\n 'Thank you for joining {}. Just a couple steps before you start learning!'\n .format(configuration_helpers.get_value('PLATFORM_NAME', settings.\n PLATFORM_NAME)))\n",
"VAR_5['registerFormSubmitButtonText'] = _('Continue')\n"
] | [
"def _third_party_auth_context(request, redirect_to, tpa_hint=None):...\n",
"\"\"\"docstring\"\"\"\n",
"context = {'currentProvider': None, 'providers': [], 'secondaryProviders':\n [], 'finishAuthUrl': None, 'errorMessage': None,\n 'registerFormSubmitButtonText': _('Create Account')}\n",
"if third_party_auth.is_enabled():\n",
"enterprise_customer = enterprise_customer_for_request(request)\n",
"return context\n",
"if not enterprise_customer:\n",
"for enabled in third_party_auth.provider.Registry.displayed_for_login(tpa_hint\n",
"running_pipeline = pipeline.get(request)\n",
"info = {'id': enabled.provider_id, 'name': enabled.name, 'iconClass': \n enabled.icon_class or None, 'iconImage': enabled.icon_image.url if\n enabled.icon_image else None, 'loginUrl': pipeline.get_login_url(\n enabled.provider_id, pipeline.AUTH_ENTRY_LOGIN, redirect_url=\n redirect_to), 'registerUrl': pipeline.get_login_url(enabled.provider_id,\n pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to)}\n",
"if running_pipeline is not None:\n",
"context['providers' if not enabled.secondary else 'secondaryProviders'].append(\n info)\n",
"current_provider = third_party_auth.provider.Registry.get_from_pipeline(\n running_pipeline)\n",
"for msg in messages.get_messages(request):\n",
"if current_provider is not None:\n",
"if msg.extra_tags.split()[0] == 'social-auth':\n",
"context['currentProvider'] = current_provider.name\n",
"context['errorMessage'] = _(unicode(msg))\n",
"context['finishAuthUrl'] = pipeline.get_complete_url(current_provider.\n backend_name)\n",
"if current_provider.skip_registration_form:\n",
"if not enterprise_customer:\n",
"context['autoSubmitRegForm'] = True\n",
"context['autoRegisterWelcomeMessage'] = (\n 'Thank you for joining {}. Just a couple steps before you start learning!'\n .format(configuration_helpers.get_value('PLATFORM_NAME', settings.\n PLATFORM_NAME)))\n",
"context['registerFormSubmitButtonText'] = _('Continue')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@staticmethod...\n",
"return FUNC_1(CLASS_0._TalkToHandlerAsync('', VAR_7, 'GET', VAR_10))\n"
] | [
"@staticmethod...\n",
"return JsonFromFuture(BaseRequest._TalkToHandlerAsync('', handler, 'GET',\n timeout))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_2(VAR_7=None, VAR_8=0, VAR_9=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_7 is None:\n",
"return True\n",
"return False\n",
"VAR_21 = re.match('^([^;]+)(; length=([0-9]+))?$', VAR_7, re.IGNORECASE)\n",
"VAR_22 = parse_http_date(VAR_21.group(1))\n",
"VAR_23 = VAR_21.group(3)\n",
"if VAR_23 and int(VAR_23) != VAR_9:\n",
"if int(VAR_8) > VAR_22:\n"
] | [
"def was_modified_since(header=None, mtime=0, size=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if header is None:\n",
"return True\n",
"return False\n",
"matches = re.match('^([^;]+)(; length=([0-9]+))?$', header, re.IGNORECASE)\n",
"header_mtime = parse_http_date(matches.group(1))\n",
"header_len = matches.group(3)\n",
"if header_len and int(header_len) != size:\n",
"if int(mtime) > header_mtime:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition"
] |
[
"def FUNC_6(self, VAR_47):...\n",
"if VAR_101.user_is_admin:\n",
"return True\n",
"if VAR_101.user_is_loggedin:\n",
"VAR_18 = Thing._by_fullname(VAR_47, data=True)\n",
"abort(403, 'forbidden')\n",
"VAR_109 = VAR_18.subreddit_slow\n",
"if VAR_109.is_special(VAR_101.user):\n",
"return True\n"
] | [
"def run(self, thing_name):...\n",
"if c.user_is_admin:\n",
"return True\n",
"if c.user_is_loggedin:\n",
"item = Thing._by_fullname(thing_name, data=True)\n",
"abort(403, 'forbidden')\n",
"subreddit = item.subreddit_slow\n",
"if subreddit.is_special(c.user):\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_7 = []\n",
"VAR_8 = self.pool.get('ir.model.data')\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_13 = self.pool.get('ir.model.data')\n",
"if not VAR_4:\n",
"VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_7.append(VAR_4[0])\n",
"VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if VAR_14:\n",
"VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n",
"VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n",
"if VAR_15:\n",
"VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
"def close_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if not ids:\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"list_statement.append(ids[0])\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if id2:\n",
"statement_obj.button_confirm_cash(cr, uid, ids, context)\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_16, *VAR_17, **VAR_10):...\n",
"super().__init__(*VAR_17, **kwargs)\n",
"self.key_name = VAR_16\n"
] | [
"def __init__(self, key_name, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"self.key_name = key_name\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def __init__(self, VAR_0):...\n",
"super(CLASS_9, self).__init__(self._init_helper(VAR_0.strip().split()) + '\\n')\n"
] | [
"def __init__(self, txt):...\n",
"super(ScissorEnv, self).__init__(self._init_helper(txt.strip().split()) + '\\n')\n"
] | [
0,
2
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(VAR_15):...\n",
"if VAR_15 == 1:\n",
"return '0.6 AND 1'\n",
"return '0.0 AND 0.4'\n"
] | [
"def get_taste_condition(value):...\n",
"if value == 1:\n",
"return '0.6 AND 1'\n",
"return '0.0 AND 0.4'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_8(self, VAR_7, VAR_9, VAR_10=False):...\n",
"if isinstance(VAR_9, list) and not VAR_10:\n",
"self.__dict__[VAR_7] = []\n",
"self.__dict__[VAR_7] = VAR_9\n",
"self.extend(VAR_7, VAR_9)\n"
] | [
"def set(self, key, value, as_value=False):...\n",
"if isinstance(value, list) and not as_value:\n",
"self.__dict__[key] = []\n",
"self.__dict__[key] = value\n",
"self.extend(key, value)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = '/api/keys'\n",
"VAR_5 = {'id': 'mykey@box.local', 'public': VAR_3}\n",
"VAR_6 = self.client.post(VAR_4, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_6.status_code, 201)\n",
"VAR_6 = self.client.post(VAR_4, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_6.status_code, 400)\n"
] | [
"def _check_duplicate_key(self, pubkey):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/keys'\n",
"body = {'id': 'mykey@box.local', 'public': pubkey}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 400)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_4, VAR_5=False, VAR_6=False):...\n",
"self.serial = VAR_4\n",
"self.fail_br = VAR_5\n",
"self.fail_br_before_N = VAR_6\n"
] | [
"def __init__(self, serial, fail_br=False, fail_br_before_N=False):...\n",
"self.serial = serial\n",
"self.fail_br = fail_br\n",
"self.fail_br_before_N = fail_br_before_N\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_7):...\n",
"return 'FROM {}'.format(VAR_7)\n"
] | [
"def create_from(self, table_or_view):...\n",
"return 'FROM {}'.format(table_or_view)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, *VAR_2, **VAR_3):...\n",
"super(CLASS_0, self).__init__(*VAR_2, **kwargs)\n",
"self.cluster_vip = None\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(HpSanISCSIDriver, self).__init__(*args, **kwargs)\n",
"self.cluster_vip = None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_29(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(VAR_21, VAR_23=False)\n"
] | [
"def lstat(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(path, follow_symlinks=False)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_19():...\n",
"populate_test_database()\n",
"FUNC_1('first playlist')\n",
"VAR_11 = VAR_3.post('/videos/1/title/thumbnail')\n",
"assert VAR_11.json['status'] == 'OK'\n",
"VAR_11 = VAR_3.delete('/videos/1/2')\n",
"assert VAR_11.json['status'] == 'NOK'\n",
"assert VAR_11.json['message'] != None\n"
] | [
"def test_should_return_a_not_ok_status_when_deleting_a_video_from_an_unknown_playlist_id(...\n",
"populate_test_database()\n",
"create_playlist('first playlist')\n",
"response = test_app.post('/videos/1/title/thumbnail')\n",
"assert response.json['status'] == 'OK'\n",
"response = test_app.delete('/videos/1/2')\n",
"assert response.json['status'] == 'NOK'\n",
"assert response.json['message'] != None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"import urllib.parse as urlparse\n",
"import pytest\n",
"import sqlalchemy as sa\n",
"from pymash import cfg\n",
"from pymash import main\n",
"from pymash import tables\n",
"@pytest.fixture(scope='session')...\n",
"return FUNC_2(VAR_0, 'postgres')\n"
] | [
"import urllib.parse as urlparse\n",
"import pytest\n",
"import sqlalchemy as sa\n",
"from pymash import cfg\n",
"from pymash import main\n",
"from pymash import tables\n",
"@pytest.fixture(scope='session')...\n",
"return _get_engine(request, 'postgres')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Condition",
"Return'"
] |
[
"@VAR_2.errorhandler(404)...\n",
"return {'status': 404, 'msg': str(VAR_15)}\n"
] | [
"@app.errorhandler(404)...\n",
"return {'status': 404, 'msg': str(error)}\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_24(self, VAR_7, VAR_17):...\n",
"self.common.extend_volume(VAR_7, VAR_17)\n"
] | [
"def extend_volume(self, volume, new_size):...\n",
"self.common.extend_volume(volume, new_size)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __call__(self):...\n",
"return self\n"
] | [
"def __call__(self):...\n",
"return self\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(VAR_2, VAR_5, VAR_4, VAR_7):...\n",
"VAR_8 = VAR_4.connect()\n",
"VAR_9 = VAR_8.cursor()\n",
"VAR_10 = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(VAR_2)\n",
"VAR_9.execute(VAR_10)\n",
"VAR_11 = VAR_9.fetchone()\n",
"if VAR_11 is None:\n",
"return False\n",
"VAR_17 = (\n \"SELECT timeSlotId FROM TimeSlots WHERE calendarId = '{0}' AND userId = {1}\"\n .format(VAR_5, VAR_11[0]))\n",
"VAR_9.execute(VAR_17)\n",
"VAR_18 = VAR_9.fetchone()\n",
"if VAR_18:\n",
"VAR_14 = 'string'.format(VAR_7.get('0', ''), VAR_7.get('1', ''), VAR_7.get(\n '2', ''), VAR_7.get('3', ''), VAR_7.get('4', ''), VAR_7.get('5', ''),\n VAR_7.get('6', ''), VAR_7.get('7', ''), VAR_7.get('8', ''), VAR_7.get(\n '9', ''), VAR_7.get('10', ''), VAR_7.get('11', ''), VAR_7.get('12', ''),\n VAR_7.get('13', ''), VAR_7.get('14', ''), VAR_7.get('15', ''), VAR_7.\n get('16', ''), VAR_7.get('17', ''), VAR_7.get('18', ''), VAR_7.get('19',\n ''), VAR_7.get('20', ''), VAR_7.get('21', ''), VAR_7.get('22', ''),\n VAR_7.get('23', ''), VAR_11[0], VAR_5)\n",
"VAR_14 = 'string'.format(VAR_7.get('0', ''), VAR_7.get('1', ''), VAR_7.get(\n '2', ''), VAR_7.get('3', ''), VAR_7.get('4', ''), VAR_7.get('5', ''),\n VAR_7.get('6', ''), VAR_7.get('7', ''), VAR_7.get('8', ''), VAR_7.get(\n '9', ''), VAR_7.get('10', ''), VAR_7.get('11', ''), VAR_7.get('12', ''),\n VAR_7.get('13', ''), VAR_7.get('14', ''), VAR_7.get('15', ''), VAR_7.\n get('16', ''), VAR_7.get('17', ''), VAR_7.get('18', ''), VAR_7.get('19',\n ''), VAR_7.get('20', ''), VAR_7.get('21', ''), VAR_7.get('22', ''),\n VAR_7.get('23', ''), VAR_11[0], VAR_5)\n",
"VAR_9.execute(VAR_14)\n",
"VAR_9.execute(VAR_14)\n",
"VAR_8.commit()\n",
"VAR_8.commit()\n",
"return True\n"
] | [
"def updateAvailability(username, calendarId, sqlInstance, timeList):...\n",
"conn = sqlInstance.connect()\n",
"cursor = conn.cursor()\n",
"userCheckQuery = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(\n username)\n",
"cursor.execute(userCheckQuery)\n",
"userResult = cursor.fetchone()\n",
"if userResult is None:\n",
"return False\n",
"timeslotQuery = (\n \"SELECT timeSlotId FROM TimeSlots WHERE calendarId = '{0}' AND userId = {1}\"\n .format(calendarId, userResult[0]))\n",
"cursor.execute(timeslotQuery)\n",
"timeSlotResult = cursor.fetchone()\n",
"if timeSlotResult:\n",
"queryString = (\n \"\"\"UPDATE TimeSlots SET zero='{0}', one='{1}', two='{2}', three='{3}', four='{4}', five='{5}', six='{6}',\n seven='{7}', eight='{8}', nine='{9}', ten='{10}', eleven='{11}', twelve='{12}', thirteen='{13}',\n fourteen='{14}', fifteen='{15}', sixteen='{16}', seventeen='{17}', eighteen='{18}', nineteen='{19}',\n twenty='{20}', twentyone='{21}', twentytwo='{22}', twentythree='{23}' WHERE userId = {24} AND calendarId='{25}'\"\"\"\n .format(timeList.get('0', ''), timeList.get('1', ''), timeList.get('2',\n ''), timeList.get('3', ''), timeList.get('4', ''), timeList.get('5', ''\n ), timeList.get('6', ''), timeList.get('7', ''), timeList.get('8', ''),\n timeList.get('9', ''), timeList.get('10', ''), timeList.get('11', ''),\n timeList.get('12', ''), timeList.get('13', ''), timeList.get('14', ''),\n timeList.get('15', ''), timeList.get('16', ''), timeList.get('17', ''),\n timeList.get('18', ''), timeList.get('19', ''), timeList.get('20', ''),\n timeList.get('21', ''), timeList.get('22', ''), timeList.get('23', ''),\n userResult[0], calendarId))\n",
"queryString = (\n \"\"\"INSERT INTO TimeSlots (zero, one, two, three, four, five, six, seven, eight, nine, ten, eleven, twelve, thirteen,\n fourteen, fifteen, sixteen, seventeen, eighteen, nineteen, twenty, twentyone, twentytwo, twentythree, userId, calendarId) VALUES ('{0}','{1}',\n '{2}','{3}','{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}','{13}','{14}','{15}','{16}','{17}','{18}',\n '{19}','{20}','{21}','{22}','{23}',{24},'{25}')\"\"\"\n .format(timeList.get('0', ''), timeList.get('1', ''), timeList.get('2',\n ''), timeList.get('3', ''), timeList.get('4', ''), timeList.get('5', ''\n ), timeList.get('6', ''), timeList.get('7', ''), timeList.get('8', ''),\n timeList.get('9', ''), timeList.get('10', ''), timeList.get('11', ''),\n timeList.get('12', ''), timeList.get('13', ''), timeList.get('14', ''),\n timeList.get('15', ''), timeList.get('16', ''), timeList.get('17', ''),\n timeList.get('18', ''), timeList.get('19', ''), timeList.get('20', ''),\n timeList.get('21', ''), timeList.get('22', ''), timeList.get('23', ''),\n userResult[0], calendarId))\n",
"cursor.execute(queryString)\n",
"cursor.execute(queryString)\n",
"conn.commit()\n",
"conn.commit()\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"return self.get_serializer_class().setup_eager_loading(State.objects.all())\n"
] | [
"def get_queryset(self):...\n",
"return self.get_serializer_class().setup_eager_loading(State.objects.all())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.threads = VAR_75\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.threads = threads\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"config.set(xsrf_token_key='abcdef')\n",
"VAR_1 = utils.XsrfTool()\n",
"VAR_3 = utils.get_timestamp(CLASS_0.TEST_NOW)\n",
"self.assertFalse(VAR_1.verify_token('NotTheRightDigest/%f' % VAR_3, 12345,\n 'test_action'))\n"
] | [
"def test_rejects_invalid_token(self):...\n",
"\"\"\"docstring\"\"\"\n",
"config.set(xsrf_token_key='abcdef')\n",
"tool = utils.XsrfTool()\n",
"timestamp = utils.get_timestamp(XsrfToolTests.TEST_NOW)\n",
"self.assertFalse(tool.verify_token('NotTheRightDigest/%f' % timestamp, \n 12345, 'test_action'))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_0, VAR_1):...\n",
"self.campaign_data = VAR_0\n",
"self.options = VAR_1\n",
"self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n",
"if os.path.exists('campaign-data/' + str(VAR_0['id']) + '/private.key'):\n",
"self.rsakey = RSAKey.from_private_key_file('campaign-data/' + str(VAR_0[\n 'id']) + '/private.key')\n",
"self.rsakey = RSAKey.generate(1024)\n",
"if self.campaign_data['use_simics']:\n",
"self.rsakey.write_private_key_file('campaign-data/' + str(VAR_0['id']) +\n '/private.key')\n",
"self.debugger = simics(VAR_0, self.result_data, VAR_1, self.rsakey)\n",
"if VAR_0['architecture'] == 'p2020':\n",
"if not self.campaign_data['use_simics']:\n",
"self.debugger = bdi_p2020(VAR_0, self.result_data, VAR_1, self.rsakey)\n",
"if VAR_0['architecture'] == 'a9':\n",
"if self.campaign_data['use_aux']:\n",
"self.debugger = openocd(VAR_0, self.result_data, VAR_1, self.rsakey)\n",
"self.debugger.aux.serial.write('\\x03')\n",
"if VAR_1.command == 'new':\n",
"self.debugger.aux.do_login()\n",
"self.debugger.reset_dut()\n",
"if VAR_1.command != 'new':\n",
"self.send_dut_files(VAR_2=True)\n"
] | [
"def __init__(self, campaign_data, options):...\n",
"self.campaign_data = campaign_data\n",
"self.options = options\n",
"self.result_data = {'campaign_id': self.campaign_data['id'], 'aux_output':\n '', 'data_diff': None, 'debugger_output': '', 'detected_errors': None,\n 'dut_output': ''}\n",
"if os.path.exists('campaign-data/' + str(campaign_data['id']) + '/private.key'\n",
"self.rsakey = RSAKey.from_private_key_file('campaign-data/' + str(\n campaign_data['id']) + '/private.key')\n",
"self.rsakey = RSAKey.generate(1024)\n",
"if self.campaign_data['use_simics']:\n",
"self.rsakey.write_private_key_file('campaign-data/' + str(campaign_data[\n 'id']) + '/private.key')\n",
"self.debugger = simics(campaign_data, self.result_data, options, self.rsakey)\n",
"if campaign_data['architecture'] == 'p2020':\n",
"if not self.campaign_data['use_simics']:\n",
"self.debugger = bdi_p2020(campaign_data, self.result_data, options, self.rsakey\n )\n",
"if campaign_data['architecture'] == 'a9':\n",
"if self.campaign_data['use_aux']:\n",
"self.debugger = openocd(campaign_data, self.result_data, options, self.rsakey)\n",
"self.debugger.aux.serial.write('\\x03')\n",
"if options.command == 'new':\n",
"self.debugger.aux.do_login()\n",
"self.debugger.reset_dut()\n",
"if options.command != 'new':\n",
"self.send_dut_files(aux=True)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_1(VAR_3):...\n",
"if VAR_3 in ('friends', 'all', ' reddit.com'):\n",
"return False\n",
"return VAR_100(VAR_3) if VAR_3 and VAR_2.match(VAR_3) else None\n",
"return None\n"
] | [
"def chksrname(x):...\n",
"if x in ('friends', 'all', ' reddit.com'):\n",
"return False\n",
"return str(x) if x and subreddit_rx.match(x) else None\n",
"return None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_20(VAR_18, VAR_20, VAR_10, VAR_26, VAR_27, VAR_28):...\n",
"VAR_48 = VAR_20, VAR_4[VAR_20]['name']\n",
"VAR_34 = FUNC_1(VAR_10)\n",
"VAR_49 = SubmitUserTestRequest(VAR_34, VAR_48, base_url=CWS_BASE_URL,\n VAR_26=submission_format, VAR_27=filenames)\n",
"VAR_49.execute()\n",
"VAR_31 = VAR_49.get_user_test_id()\n",
"if VAR_31 is None:\n",
"return VAR_31\n"
] | [
"def cws_submit_user_test(contest_id, task_id, user_id, submission_format,...\n",
"task = task_id, created_tasks[task_id]['name']\n",
"browser = get_cws_browser(user_id)\n",
"sr = SubmitUserTestRequest(browser, task, base_url=CWS_BASE_URL,\n submission_format=submission_format, filenames=filenames)\n",
"sr.execute()\n",
"user_test_id = sr.get_user_test_id()\n",
"if user_test_id is None:\n",
"return user_test_id\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"@property...\n",
"return self.data_group.group_type.code in ['CP', 'HH', 'CO']\n"
] | [
"@property...\n",
"return self.data_group.group_type.code in ['CP', 'HH', 'CO']\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_1(VAR_0):...\n",
"return 'Meetup : %s' % VAR_0.title\n"
] | [
"def meetup_article_title(meetup):...\n",
"return 'Meetup : %s' % meetup.title\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = api.network.server_security_groups(VAR_1, VAR_3)\n",
"return {'items': [s.to_dict() for s in VAR_14]}\n"
] | [
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"groups = api.network.server_security_groups(request, server_id)\n",
"return {'items': [s.to_dict() for s in groups]}\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_3):...\n",
"VAR_2 = self.frames[VAR_3]\n",
"VAR_2.tkraise()\n"
] | [
"def show_frame(self, cont):...\n",
"frame = self.frames[cont]\n",
"frame.tkraise()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"VAR_12 = 'result/' + self.cate_str + '_scope.json'\n",
"json.dump(self.scope, f)\n"
] | [
"def saveScope(self):...\n",
"file_name = 'result/' + self.cate_str + '_scope.json'\n",
"json.dump(self.scope, f)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_10):...\n",
"VAR_10 = VAR_10.strip()\n",
"debug('Txt picked up by col:', VAR_10)\n",
"VAR_30 = VAR_10.find('\\n')\n",
"VAR_36 = VAR_10[1:VAR_30].strip()\n",
"VAR_10 = VAR_10[VAR_30 + 1:]\n",
"self.percentage = self.units = 0.0\n",
"self.unspecified = 0\n",
"if len(VAR_36) == 0:\n",
"self.unspecified = 1\n",
"if VAR_36[-1:] == '%':\n",
"def FUNC_2():...\n",
"self.percentage = float(VAR_36[:-1]) * 0.01\n",
"self.units = float(VAR_36)\n",
"super(CLASS_4, self).__init__(slideParser.parse(VAR_10, slideLexer), VAR_2='\\n'\n )\n",
"CLASS_2.parsingQ.insert(0, FUNC_2)\n"
] | [
"def __init__(self, txt):...\n",
"txt = txt.strip()\n",
"debug('Txt picked up by col:', txt)\n",
"i = txt.find('\\n')\n",
"head = txt[1:i].strip()\n",
"txt = txt[i + 1:]\n",
"self.percentage = self.units = 0.0\n",
"self.unspecified = 0\n",
"if len(head) == 0:\n",
"self.unspecified = 1\n",
"if head[-1:] == '%':\n",
"def innerFunc():...\n",
"self.percentage = float(head[:-1]) * 0.01\n",
"self.units = float(head)\n",
"super(Column, self).__init__(slideParser.parse(txt, slideLexer), after='\\n')\n",
"Slide.parsingQ.insert(0, innerFunc)\n"
] | [
0,
0,
2,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='Why you should not use video guides', VAR_3=\n discord.Color.dark_orange())\n",
"VAR_7.description = 'string'\n",
"VAR_7.add_field(name='Recommended', value=\n \"The recommended thing to do is to use [Plailect's written complete guide for boot9strap](https://3ds.guide). It is the most up to date one and is recommended for everyone.\"\n )\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='Why you should not use video guides', color=\n discord.Color.dark_orange())\n",
"embed.description = \"\"\"\"Video guides\" for custom firmware and arm9loaderhax/boot9strap are not recommended for use. Their contents generally become outdated very quickly for them to be of any use, and they are harder to update unlike a written guide.\n\nWhen this happens, video guides become more complicated than current methods, having users do certain tasks which may not be required anymore.\n\nThere is also a risk of the uploader spreading misinformation or including potentially harmful files, sometimes unintentionally. Using other people's files to install arm9loaderhax can cause serious issues and even brick your system.\"\"\"\n",
"embed.add_field(name='Recommended', value=\n \"The recommended thing to do is to use [Plailect's written complete guide for boot9strap](https://3ds.guide). It is the most up to date one and is recommended for everyone.\"\n )\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_24=None):...\n",
"self.logger = logging.getLogger(__name__)\n",
"self.logger.setLevel(logging.DEBUG)\n",
"self.configfile = VAR_24\n",
"self.nodes = {}\n",
"self.server = []\n",
"self.host_list = []\n",
"if VAR_24:\n",
"self.load_config(VAR_24)\n",
"self.config = None\n",
"self.session_name = self.config['name']\n",
"dump(self.config, outfile, default_flow_style=False)\n",
"self.logger.debug('Loading config was successful')\n",
"self.server = Server()\n",
"if self.server.has_session(self.session_name):\n",
"self.session = self.server.find_where({'session_name': self.session_name})\n",
"self.logger.info('starting new session by name \"%s\" on server' % self.\n session_name)\n",
"self.logger.info('found running session by name \"%s\" on server' % self.\n session_name)\n",
"self.session = self.server.new_session(VAR_30=self.session_name, VAR_15='Main')\n"
] | [
"def __init__(self, configfile=None):...\n",
"self.logger = logging.getLogger(__name__)\n",
"self.logger.setLevel(logging.DEBUG)\n",
"self.configfile = configfile\n",
"self.nodes = {}\n",
"self.server = []\n",
"self.host_list = []\n",
"if configfile:\n",
"self.load_config(configfile)\n",
"self.config = None\n",
"self.session_name = self.config['name']\n",
"dump(self.config, outfile, default_flow_style=False)\n",
"self.logger.debug('Loading config was successful')\n",
"self.server = Server()\n",
"if self.server.has_session(self.session_name):\n",
"self.session = self.server.find_where({'session_name': self.session_name})\n",
"self.logger.info('starting new session by name \"%s\" on server' % self.\n session_name)\n",
"self.logger.info('found running session by name \"%s\" on server' % self.\n session_name)\n",
"self.session = self.server.new_session(session_name=self.session_name,\n window_name='Main')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe import _\n",
"from frappe.website.website_generator import WebsiteGenerator\n",
"from frappe.website.render import clear_cache\n",
"from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown\n",
"from frappe.website.utils import find_first_image, get_comment_list\n",
"VAR_8 = frappe._dict(VAR_7='published_on desc')\n",
"def FUNC_5(self):...\n",
"if not self.route:\n",
"return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n",
"def FUNC_6(self):...\n",
"return self.title\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe import _\n",
"from frappe.website.website_generator import WebsiteGenerator\n",
"from frappe.website.render import clear_cache\n",
"from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown\n",
"from frappe.website.utils import find_first_image, get_comment_list\n",
"website = frappe._dict(order_by='published_on desc')\n",
"def make_route(self):...\n",
"if not self.route:\n",
"return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n",
"def get_feed(self):...\n",
"return self.title\n"
] | [
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_21, **VAR_1):...\n",
"super().__init__(**kwargs)\n",
"if VAR_44 is None:\n",
"logging.warn('Timezone support disabled, install pytz to enable.')\n",
"self._timezone = VAR_44.timezone(VAR_21)\n",
"self._timezone = None\n"
] | [
"def __init__(self, timezone, **kwargs):...\n",
"super().__init__(**kwargs)\n",
"if pytz is None:\n",
"logging.warn('Timezone support disabled, install pytz to enable.')\n",
"self._timezone = pytz.timezone(timezone)\n",
"self._timezone = None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"VAR_2 = FUNC_0(self.window)\n",
"VAR_3 = FUNC_1(self.window)\n",
"VAR_16 = []\n",
"for VAR_6 in VAR_3:\n",
"VAR_20 = []\n",
"self.sorted_menu = sorted(VAR_16, key=lambda item: item[1], reverse=True)\n",
"VAR_12 = Urtext.meta.NodeMetadata(os.path.join(VAR_2, VAR_6))\n",
"self.display_menu = []\n",
"VAR_20.append(VAR_12.get_tag('title')[0])\n",
"for VAR_20 in self.sorted_menu:\n",
"VAR_21 = re.search('\\\\b\\\\d{14}\\\\b', VAR_6).group(0)\n",
"VAR_22 = [VAR_20[0], VAR_20[1].strftime('<%a., %b. %d, %Y, %I:%M %p>')]\n",
"def FUNC_8(VAR_17):...\n",
"print(VAR_21)\n",
"self.display_menu.append(VAR_22)\n",
"if VAR_17 != -1:\n",
"VAR_20.append(Urtext.datestimes.date_from_reverse_date(VAR_21))\n",
"VAR_24 = CLASS_0(self.sorted_menu[VAR_17][2])\n",
"self.window.show_quick_panel(self.display_menu, FUNC_8)\n",
"VAR_20.append(VAR_12.filename)\n",
"VAR_25 = self.window.open_file(self.sorted_menu[VAR_17][2])\n",
"VAR_16.append(VAR_20)\n"
] | [
"def run(self):...\n",
"path = get_path(self.window)\n",
"files = get_all_files(self.window)\n",
"menu = []\n",
"for filename in files:\n",
"item = []\n",
"self.sorted_menu = sorted(menu, key=lambda item: item[1], reverse=True)\n",
"metadata = Urtext.meta.NodeMetadata(os.path.join(path, filename))\n",
"self.display_menu = []\n",
"item.append(metadata.get_tag('title')[0])\n",
"for item in self.sorted_menu:\n",
"node_id = re.search('\\\\b\\\\d{14}\\\\b', filename).group(0)\n",
"new_item = [item[0], item[1].strftime('<%a., %b. %d, %Y, %I:%M %p>')]\n",
"def open_the_file(index):...\n",
"print(node_id)\n",
"self.display_menu.append(new_item)\n",
"if index != -1:\n",
"item.append(Urtext.datestimes.date_from_reverse_date(node_id))\n",
"urtext_file = UrtextFile(self.sorted_menu[index][2])\n",
"self.window.show_quick_panel(self.display_menu, open_the_file)\n",
"item.append(metadata.filename)\n",
"new_view = self.window.open_file(self.sorted_menu[index][2])\n",
"menu.append(item)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_0):...\n",
"VAR_2 = []\n",
"VAR_5 = {}\n",
"VAR_6 = 's.transaction_date' if VAR_0['based_on'\n ] == 'Sales Order' else 's.posting_date'\n",
"VAR_7 = frappe.db.sql('string'.format(VAR_6=date_field, doctype=filters[\n 'based_on']), as_dict=1)\n",
"for d in VAR_7:\n",
"VAR_5.setdefault(d.item_name, d)\n",
"return VAR_5\n"
] | [
"def get_sales_details(filters):...\n",
"data = []\n",
"item_details_map = {}\n",
"date_field = 's.transaction_date' if filters['based_on'\n ] == 'Sales Order' else 's.posting_date'\n",
"sales_data = frappe.db.sql(\n \"\"\"\n\t\tselect s.territory, s.customer, si.item_group, si.item_name, si.qty, {date_field} as last_order_date,\n\t\tDATEDIFF(CURDATE(), {date_field}) as days_since_last_order\n\t\tfrom `tab{doctype}` s, `tab{doctype} Item` si\n\t\twhere s.name = si.parent and s.docstatus = 1\n\t\tgroup by si.name order by days_since_last_order \"\"\"\n .format(date_field=date_field, doctype=filters['based_on']), as_dict=1)\n",
"for d in sales_data:\n",
"item_details_map.setdefault(d.item_name, d)\n",
"return item_details_map\n"
] | [
0,
0,
0,
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"def FUNC_0(VAR_0):...\n",
"if VAR_0 == '':\n",
"return True\n",
"if len(VAR_0.split()) > 1:\n",
"return True\n",
"VAR_2 = requests.get('http://codeforces.com/submissions/' + VAR_0)\n",
"VAR_3 = BeautifulSoup(VAR_2.text, 'lxml')\n",
"if VAR_3.find(attrs={'class': 'verdict'}) == None:\n",
"return True\n",
"return False\n"
] | [
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"def check_username(username):...\n",
"if username == '':\n",
"return True\n",
"if len(username.split()) > 1:\n",
"return True\n",
"r = requests.get('http://codeforces.com/submissions/' + username)\n",
"soup = BeautifulSoup(r.text, 'lxml')\n",
"if soup.find(attrs={'class': 'verdict'}) == None:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(**VAR_12) ->typing.Callable[[typing.Any], commands.Group]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12.setdefault('cls', CLASS_2)\n",
"return commands.command(**kwargs)\n"
] | [
"def group(**kwargs) ->typing.Callable[[typing.Any], commands.Group]:...\n",
"\"\"\"docstring\"\"\"\n",
"kwargs.setdefault('cls', NekoGroup)\n",
"return commands.command(**kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
"def check_for_setup_error(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@functools.wraps(VAR_0)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return VAR_0(self, *VAR_5, **kwargs)\n"
] | [
"@functools.wraps(f)...\n",
"if not self.request.headers.get('X-XSRF-Token-Request'):\n",
"return f(self, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return ''.join(VAR_8 + '/\\n' if os.path.isdir(os.path.join(self.filename,\n VAR_8)) else VAR_8 + '\\n' for VAR_8 in os.listdir(self.filename))\n"
] | [
"def index(self):...\n",
"return ''.join(filename + '/\\n' if os.path.isdir(os.path.join(self.filename,\n filename)) else filename + '\\n' for filename in os.listdir(self.filename))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"me@example.com\" \n discard;\n}\n\"\"\")\n"
] | [
"def test_nonopened_block(self):...\n",
"self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"me@example.com\" \n discard;\n}\n\"\"\")\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_12, VAR_13, VAR_14=None):...\n",
""
] | [
"def insert(self, table, values, updater=None):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_29(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['tmp_dir_created']:\n",
"self.delete_tmp_dir()\n",
"for f in os.listdir(self.config['tmp_dir']):\n",
"if re.search('*sosreport-*tar*', f):\n",
"os.remove(os.path.join(self.config['tmp_dir'], f))\n"
] | [
"def cleanup(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['tmp_dir_created']:\n",
"self.delete_tmp_dir()\n",
"for f in os.listdir(self.config['tmp_dir']):\n",
"if re.search('*sosreport-*tar*', f):\n",
"os.remove(os.path.join(self.config['tmp_dir'], f))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
""
] | [
"def SaveDocumentCollection(dc, filenameedges, filenamedata):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_1(self, VAR_13, VAR_11, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.gives_corrected:\n",
"return self._process_corrected(VAR_13, VAR_11, VAR_12)\n",
"return self._process_issues(VAR_13, VAR_11)\n"
] | [
"def process_output(self, output, filename, file):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.gives_corrected:\n",
"return self._process_corrected(output, filename, file)\n",
"return self._process_issues(output, filename)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"@app.route('/api/indi_service', methods=['GET'])...\n",
"return controller.indi_service.to_map()\n"
] | [
"@app.route('/api/indi_service', methods=['GET'])...\n",
"return controller.indi_service.to_map()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_6(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = VAR_16['errata_list']\n",
"VAR_24 = filter(None, VAR_24)\n",
"VAR_25 = {}\n",
"if not VAR_24:\n",
"return VAR_25\n",
"VAR_26 = 'SELECT errata.id, errata.name, synopsis, severity.name, description,'\n",
"VAR_26 += ' solution, issued, updated'\n",
"VAR_26 += ' FROM errata'\n",
"VAR_26 += ' LEFT JOIN severity ON severity_id = severity.id'\n",
"VAR_26 += ' WHERE errata.name IN %s'\n",
"self.cursor.execute(VAR_26, [tuple(VAR_24)])\n",
"VAR_27 = self.cursor.fetchall()\n",
"VAR_28 = []\n",
"for VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7 in VAR_27:\n",
"VAR_30 = CLASS_0(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7)\n",
"VAR_29 = {}\n",
"VAR_30.set_cve_names(self.get_cve_names_for_erratum_id(VAR_0))\n",
"for VAR_31 in VAR_28:\n",
"VAR_30.set_packages(self.get_package_list_for_erratum_id(VAR_0))\n",
"VAR_29[VAR_31.get_val('name')] = VAR_31.get_val('mydict')\n",
"VAR_25['errata_list'] = VAR_29\n",
"VAR_28.append(VAR_30)\n",
"return VAR_25\n"
] | [
"def process_list(self, data):...\n",
"\"\"\"docstring\"\"\"\n",
"errata_to_process = data['errata_list']\n",
"errata_to_process = filter(None, errata_to_process)\n",
"answer = {}\n",
"if not errata_to_process:\n",
"return answer\n",
"errata_query = (\n 'SELECT errata.id, errata.name, synopsis, severity.name, description,')\n",
"errata_query += ' solution, issued, updated'\n",
"errata_query += ' FROM errata'\n",
"errata_query += ' LEFT JOIN severity ON severity_id = severity.id'\n",
"errata_query += ' WHERE errata.name IN %s'\n",
"self.cursor.execute(errata_query, [tuple(errata_to_process)])\n",
"errata = self.cursor.fetchall()\n",
"erratum_list = []\n",
"for id, name, synopsis, severity, description, solution, issued, updated in errata:\n",
"new_erratum = Errata(id, name, synopsis, severity, description, solution,\n issued, updated)\n",
"errata_dict = {}\n",
"new_erratum.set_cve_names(self.get_cve_names_for_erratum_id(id))\n",
"for e in erratum_list:\n",
"new_erratum.set_packages(self.get_package_list_for_erratum_id(id))\n",
"errata_dict[e.get_val('name')] = e.get_val('mydict')\n",
"answer['errata_list'] = errata_dict\n",
"erratum_list.append(new_erratum)\n",
"return answer\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
4,
4,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_2(self, VAR_3, VAR_4):...\n",
"api.nova.keypair_delete(VAR_3, VAR_4)\n"
] | [
"def delete(self, request, obj_id):...\n",
"api.nova.keypair_delete(request, obj_id)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_8(VAR_5=20):...\n",
"\"\"\"docstring\"\"\"\n",
"for i in range(VAR_5):\n",
"VAR_1.info('Waiting for JupyterHub to come up ({}/{} tries)'.format(i + 1,\n VAR_5))\n",
"if h.code in [404, 502, 503]:\n",
"urlopen('http://127.0.0.1')\n",
"time.sleep(1)\n",
"if isinstance(e.reason, ConnectionRefusedError):\n",
"return\n",
"time.sleep(1)\n"
] | [
"def ensure_jupyterhub_running(times=20):...\n",
"\"\"\"docstring\"\"\"\n",
"for i in range(times):\n",
"logger.info('Waiting for JupyterHub to come up ({}/{} tries)'.format(i + 1,\n times))\n",
"if h.code in [404, 502, 503]:\n",
"urlopen('http://127.0.0.1')\n",
"time.sleep(1)\n",
"if isinstance(e.reason, ConnectionRefusedError):\n",
"return\n",
"time.sleep(1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'"
] |
[
"@VAR_2.route('/web')...\n",
"if VAR_0 == None:\n",
"FUNC_16()\n",
"return json.dumps(get_web(VAR_5, VAR_0=db))\n"
] | [
"@endpoints.route('/web')...\n",
"if db == None:\n",
"init()\n",
"return json.dumps(get_web(tag, db=db))\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_27, VAR_28, VAR_19=None):...\n",
"super(CLASS_4, self).__init__(VAR_27, VAR_19)\n",
"self._result_getter = VAR_28\n"
] | [
"def __init__(self, name, result_getter, event=None):...\n",
"super(AwaitableEvent, self).__init__(name, event)\n",
"self._result_getter = result_getter\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_15(self, VAR_14):...\n",
""
] | [
"def is_date(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_11(VAR_34):...\n",
"if VAR_34.startswith('!'):\n",
"return VAR_34[1:]\n",
"return VAR_34.format(VAR_11=url, VAR_36=dest.name)\n"
] | [
"def formatCommand(e):...\n",
"if e.startswith('!'):\n",
"return e[1:]\n",
"return e.format(url=url, dest=dest.name)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_3 = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"self.driver._eql_execute('volume', 'select', VAR_3['volume_name'],\n 'snapshot', 'delete', VAR_3['name'])\n",
"self.mox.ReplayAll()\n",
"self.driver.delete_snapshot(VAR_3)\n"
] | [
"def test_delete_snapshot(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"snapshot = {'name': 'fakesnap', 'volume_name': 'fakevolume_name'}\n",
"self.driver._eql_execute('volume', 'select', snapshot['volume_name'],\n 'snapshot', 'delete', snapshot['name'])\n",
"self.mox.ReplayAll()\n",
"self.driver.delete_snapshot(snapshot)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertTrue(result in DatabaseQuery('DocType').execute(filters={'name':\n ['in', 'DocType,DocField']}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertFalse(result in DatabaseQuery('DocType').execute(filters={'name':\n ['not in', 'DocType,DocField']}))\n"
] | [
"def test_in_not_in_filters(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertTrue(result in DatabaseQuery('DocType').execute(filters={'name':\n ['in', 'DocType,DocField']}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertFalse(result in DatabaseQuery('DocType').execute(filters={'name':\n ['not in', 'DocType,DocField']}))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"For",
"Expr'",
"For",
"Expr'"
] |
[
"def __init__(self, VAR_2, VAR_3, VAR_4='member', VAR_5=100, VAR_6=0):...\n",
"self.id = VAR_2\n",
"self.nickname = VAR_3\n",
"self.rank = VAR_4\n",
"self.balance = VAR_5\n",
"self.events_attd = VAR_6\n"
] | [
"def __init__(self, id, nickname, rank='member', balance=100, events_attd=0):...\n",
"self.id = id\n",
"self.nickname = nickname\n",
"self.rank = rank\n",
"self.balance = balance\n",
"self.events_attd = events_attd\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@VAR_0.route('/add', methods=['POST'])...\n",
""
] | [
"@app.route('/add', methods=['POST'])...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe.utils import getdate, add_days, today, cint\n",
"from frappe import _\n",
"def FUNC_0(VAR_0=None):...\n",
"VAR_1 = FUNC_1()\n",
"VAR_2 = FUNC_2(VAR_0)\n",
"return VAR_1, VAR_2\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe\n",
"from frappe.utils import getdate, add_days, today, cint\n",
"from frappe import _\n",
"def execute(filters=None):...\n",
"columns = get_columns()\n",
"data = get_data(filters)\n",
"return columns, data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"if self.test_server:\n",
"if hasattr(self.test_server, 'kill'):\n",
"self.test_server.kill()\n",
"os.kill(self.test_server.pid, signal.SIGKILL)\n",
"self.test_server = None\n",
"self.port = None\n",
"self.url = None\n",
"if self.tmp_db:\n",
"os.remove(self.tmp_db)\n",
"self.tmp_db = None\n"
] | [
"def stop_server(self):...\n",
"if self.test_server:\n",
"if hasattr(self.test_server, 'kill'):\n",
"self.test_server.kill()\n",
"os.kill(self.test_server.pid, signal.SIGKILL)\n",
"self.test_server = None\n",
"self.port = None\n",
"self.url = None\n",
"if self.tmp_db:\n",
"os.remove(self.tmp_db)\n",
"self.tmp_db = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"@VAR_0.route('/submit/', methods=['POST'])...\n",
"VAR_8 = getConnexion()\n",
"if flask.request.method == 'POST':\n",
"VAR_20 = flask.request.json['general']['observateur']\n",
"return Response(flask.json.dumps('success'), mimetype='application/json')\n",
"VAR_21 = flask.request.json['general']['taxon']['cd_ref']\n",
"VAR_22 = flask.request.json['general']['loc_exact']\n",
"VAR_23 = str()\n",
"VAR_24 = flask.request.json['general']['coord']\n",
"VAR_25 = str(VAR_24['lng'])\n",
"VAR_26 = str(VAR_24['lat'])\n",
"VAR_27 = 'POINT(' + VAR_25 + ' ' + VAR_26 + ')'\n",
"VAR_23 = flask.request.json['general']['code_maille']\n",
"VAR_28 = flask.request.json['general']['date']\n",
"VAR_29 = flask.request.json['general']['commentaire']\n",
"VAR_30 = flask.request.json['general']['comm_loc']\n",
"VAR_31 = flask.request.json['protocole']\n",
"VAR_32 = VAR_31['nom_schema'] + '.' + VAR_31['nom_table']\n",
"VAR_33 = VAR_31['nom_table']\n",
"VAR_34 = VAR_31['id_projet']\n",
"VAR_35 = None\n",
"if not VAR_22:\n",
"VAR_27 = None\n",
"VAR_36 = 'string'\n",
"VAR_9 = 'string'\n",
"if VAR_22:\n",
"VAR_10 = [VAR_23]\n",
"VAR_10 = [VAR_27, config['MAP']['PROJECTION']]\n",
"VAR_10 = [VAR_35, config['MAP']['PROJECTION']]\n",
"VAR_8.cur.execute(VAR_9, VAR_10)\n",
"VAR_8.cur.execute(VAR_36, VAR_10)\n",
"VAR_11 = VAR_8.cur.fetchone()\n",
"VAR_11 = VAR_8.cur.fetchone()\n",
"if VAR_11 != None:\n",
"VAR_37 = None\n",
"VAR_35 = VAR_11[0]\n",
"if VAR_11 != None:\n",
"VAR_37 = VAR_11[0]\n",
"VAR_38 = 'string'\n",
"if VAR_22:\n",
"VAR_10 = [VAR_27, config['MAP']['PROJECTION']]\n",
"VAR_10 = [VAR_35, config['MAP']['PROJECTION']]\n",
"VAR_8.cur.execute(VAR_38, VAR_10)\n",
"VAR_11 = VAR_8.cur.fetchone()\n",
"VAR_39 = None\n",
"if VAR_11 != None:\n",
"VAR_39 = VAR_11[0]\n",
"VAR_40 = session['id_structure']\n",
"VAR_41 = False\n",
"VAR_42 = [VAR_34, VAR_20, VAR_28, VAR_21, VAR_27, VAR_39, VAR_29, VAR_41,\n VAR_37, VAR_22, VAR_23, VAR_40, VAR_30]\n",
"VAR_43 = 'INSERT INTO ' + VAR_32 + 'string'\n",
"VAR_44 = ''\n",
"if VAR_22:\n",
"VAR_44 = (\n 'VALUES (%s, %s, %s, %s, ST_Transform(ST_PointFromText(%s, 4326),' +\n str(config['MAP']['PROJECTION']) + '), %s, %s, %s, %s, %s, %s, %s, %s')\n",
"VAR_44 = 'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s'\n",
"VAR_45 = FUNC_7()['keys']\n",
"VAR_46 = FUNC_7()['values']\n",
"for k in VAR_45:\n",
"VAR_43 += ', ' + k\n",
"VAR_43 += ')'\n",
"VAR_44 += ', %s'\n",
"VAR_44 += ')'\n",
"for v in VAR_46:\n",
"VAR_42.append(v)\n",
"VAR_10 = VAR_42\n",
"VAR_9 = VAR_43 + VAR_44\n",
"VAR_8.cur.execute(VAR_9, VAR_10)\n",
"VAR_8.conn.commit()\n",
"VAR_8.closeAll()\n"
] | [
"@addObs.route('/submit/', methods=['POST'])...\n",
"db = getConnexion()\n",
"if flask.request.method == 'POST':\n",
"observateur = flask.request.json['general']['observateur']\n",
"return Response(flask.json.dumps('success'), mimetype='application/json')\n",
"cd_nom = flask.request.json['general']['taxon']['cd_ref']\n",
"loc_exact = flask.request.json['general']['loc_exact']\n",
"code_maille = str()\n",
"loc = flask.request.json['general']['coord']\n",
"x = str(loc['lng'])\n",
"y = str(loc['lat'])\n",
"point = 'POINT(' + x + ' ' + y + ')'\n",
"code_maille = flask.request.json['general']['code_maille']\n",
"date = flask.request.json['general']['date']\n",
"commentaire = flask.request.json['general']['commentaire']\n",
"comm_loc = flask.request.json['general']['comm_loc']\n",
"protocoleObject = flask.request.json['protocole']\n",
"fullTableName = protocoleObject['nom_schema'] + '.' + protocoleObject[\n 'nom_table']\n",
"protocoleName = protocoleObject['nom_table']\n",
"id_projet = protocoleObject['id_projet']\n",
"centroid = None\n",
"if not loc_exact:\n",
"point = None\n",
"sql_foret = (\n ' SELECT ccod_frt FROM layers.perimetre_forets WHERE ST_INTERSECTS(geom,(ST_Transform(ST_GeomFromText(%s, 4326),%s)))'\n )\n",
"sql = (\n 'SELECT ST_AsText(ST_Centroid(ST_TRANSFORM(geom, 4326))) FROM layers.maille_1_2 WHERE id_maille = %s '\n )\n",
"if loc_exact:\n",
"params = [code_maille]\n",
"params = [point, config['MAP']['PROJECTION']]\n",
"params = [centroid, config['MAP']['PROJECTION']]\n",
"db.cur.execute(sql, params)\n",
"db.cur.execute(sql_foret, params)\n",
"res = db.cur.fetchone()\n",
"res = db.cur.fetchone()\n",
"if res != None:\n",
"ccod_frt = None\n",
"centroid = res[0]\n",
"if res != None:\n",
"ccod_frt = res[0]\n",
"sql_insee = (\n ' SELECT code_insee FROM layers.commune WHERE ST_INTERSECTS(geom,(ST_Transform(ST_GeomFromText(%s, 4326),%s)))'\n )\n",
"if loc_exact:\n",
"params = [point, config['MAP']['PROJECTION']]\n",
"params = [centroid, config['MAP']['PROJECTION']]\n",
"db.cur.execute(sql_insee, params)\n",
"res = db.cur.fetchone()\n",
"insee = None\n",
"if res != None:\n",
"insee = res[0]\n",
"id_structure = session['id_structure']\n",
"valide = False\n",
"generalValues = [id_projet, observateur, date, cd_nom, point, insee,\n commentaire, valide, ccod_frt, loc_exact, code_maille, id_structure,\n comm_loc]\n",
"stringInsert = ('INSERT INTO ' + fullTableName +\n '(id_projet, observateur, date, cd_nom, geom_point, insee, commentaire, valide, ccod_frt, loc_exact, code_maille, id_structure, comm_loc'\n )\n",
"stringValues = ''\n",
"if loc_exact:\n",
"stringValues = (\n 'VALUES (%s, %s, %s, %s, ST_Transform(ST_PointFromText(%s, 4326),' +\n str(config['MAP']['PROJECTION']) + '), %s, %s, %s, %s, %s, %s, %s, %s')\n",
"stringValues = 'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s'\n",
"keys = getParmeters()['keys']\n",
"values = getParmeters()['values']\n",
"for k in keys:\n",
"stringInsert += ', ' + k\n",
"stringInsert += ')'\n",
"stringValues += ', %s'\n",
"stringValues += ')'\n",
"for v in values:\n",
"generalValues.append(v)\n",
"params = generalValues\n",
"sql = stringInsert + stringValues\n",
"db.cur.execute(sql, params)\n",
"db.conn.commit()\n",
"db.closeAll()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"import os\n",
"import re\n",
"import operator\n",
"from functools import partial\n",
"import pyramid.events\n",
"import pyramid.request\n",
"import pyramid.config\n",
"from pyramid.session import SignedCookieSessionFactory\n",
"from pyramid.i18n import get_localizer, TranslationStringFactory\n",
"from externals.lib.misc import convert_str_with_type, read_json, extract_subkeys, json_serializer, file_scan\n",
"from externals.lib.pyramid_helpers.auto_format2 import setup_pyramid_autoformater\n",
"from externals.lib.pyramid_helpers.session_identity2 import session_identity\n",
"from externals.lib.social._login import NullLoginProvider, FacebookLogin, GoogleLogin\n",
"from externals.lib.multisocket.auth_echo_server import AuthEchoServerManager\n",
"from .traversal import TraversalGlobalRootFactory\n",
"from .templates import helpers as template_helpers\n",
"from .auth import ComunityUserStore, NullComunityUserStore\n",
"from .model import init_DBSession\n",
"import logging\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = TranslationStringFactory('karakara')\n",
"def FUNC_0(VAR_2, **VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"init_DBSession(VAR_3)\n",
"VAR_5 = pyramid.config.Configurator(VAR_3=settings, root_factory=\n TraversalGlobalRootFactory)\n",
"def FUNC_3(VAR_6):...\n",
"for settings_key in VAR_12:\n",
"assert VAR_5.registry.settings.get(settings_key)\n",
"VAR_5.include('pyramid_mako')\n",
"for VAR_12 in VAR_5.registry.settings.keys():\n",
"VAR_18 = os.getenv(VAR_12.replace('.', '_').upper(), ''\n ) or VAR_5.registry.settings[VAR_12]\n",
"VAR_5.add_request_method(partial(session_identity, session_keys={'id',\n 'admin', 'faves', 'user'}), 'session_identity', reify=True)\n",
"VAR_5.registry.settings[VAR_12] = convert_str_with_type(VAR_18)\n",
"setup_pyramid_autoformater(VAR_5)\n",
"VAR_5.add_translation_dirs(VAR_5.registry.settings['i18n.translation_dirs'])\n",
"VAR_7 = extract_subkeys(VAR_5.registry.settings, 'session.')\n",
"VAR_8 = SignedCookieSessionFactory(serializer=json_serializer, **\n session_settings)\n",
"VAR_5.set_session_factory(VAR_8)\n",
"if not VAR_5.registry.settings['server.etag.cache_buster']:\n",
"from .model.actions import last_update\n",
"import karakara.views.search\n",
"VAR_5.registry.settings['server.etag.cache_buster'] = 'last_update:{0}'.format(\n str(last_update()))\n",
"karakara.views.search.search_config = read_json(VAR_5.registry.settings[\n 'karakara.search.view.config'])\n",
"assert karakara.views.search.search_config, 'search_config data required'\n",
"def FUNC_7(self, *VAR_16, **VAR_17):...\n",
"VAR_9 = CLASS_0()\n",
"if VAR_5.registry.settings.get('karakara.websocket.port'):\n",
"def FUNC_8(VAR_12):...\n",
"VAR_5.registry['socket_manager'] = VAR_9\n",
"\"\"\"docstring\"\"\"\n",
"from .views.comunity_login import social_login\n",
"VAR_14 = pyramid.request.Request({'HTTP_COOKIE': '{0}={1}'.format(VAR_5.\n registry.settings['session.cookie_name'], VAR_12)})\n",
"social_login.user_store = ComunityUserStore()\n",
"VAR_20 = VAR_8(VAR_14)\n",
"VAR_10 = VAR_5.registry.settings.get('login.provider.enabled')\n",
"return VAR_20 and VAR_20.get('admin')\n",
"if 'facebook' in VAR_10:\n",
"FUNC_3(('login.facebook.appid', 'login.facebook.secret'), message=\n 'To use facebook as a login provider appid and secret must be provided')\n",
"if 'google' in VAR_10:\n",
"social_login.add_login_provider(FacebookLogin(appid=config.registry.\n settings.get('login.facebook.appid'), secret=config.registry.settings.\n get('login.facebook.secret'), permissions=config.registry.settings.get(\n 'login.facebook.permissions')))\n",
"social_login.add_login_provider(GoogleLogin(client_secret_file=config.\n registry.settings.get('login.google.client_secret_file')))\n",
"if not VAR_10 and VAR_5.registry.settings.get('karakara.server.mode'\n",
"social_login.add_login_provider(NullLoginProvider())\n",
"VAR_11.javascript_inline['comunity'] = social_login.html_includes\n",
"social_login.user_store = NullComunityUserStore()\n",
"def FUNC_4(VAR_12):...\n",
"VAR_19 = os.path.join(os.getcwd(), VAR_5.registry.settings[VAR_12])\n",
"if not os.path.isdir(VAR_19):\n",
"VAR_0.error(f'Unable to add_static_view {VAR_12}:{VAR_19}')\n",
"return VAR_19\n"
] | [
"import os\n",
"import re\n",
"import operator\n",
"from functools import partial\n",
"import pyramid.events\n",
"import pyramid.request\n",
"import pyramid.config\n",
"from pyramid.session import SignedCookieSessionFactory\n",
"from pyramid.i18n import get_localizer, TranslationStringFactory\n",
"from externals.lib.misc import convert_str_with_type, read_json, extract_subkeys, json_serializer, file_scan\n",
"from externals.lib.pyramid_helpers.auto_format2 import setup_pyramid_autoformater\n",
"from externals.lib.pyramid_helpers.session_identity2 import session_identity\n",
"from externals.lib.social._login import NullLoginProvider, FacebookLogin, GoogleLogin\n",
"from externals.lib.multisocket.auth_echo_server import AuthEchoServerManager\n",
"from .traversal import TraversalGlobalRootFactory\n",
"from .templates import helpers as template_helpers\n",
"from .auth import ComunityUserStore, NullComunityUserStore\n",
"from .model import init_DBSession\n",
"import logging\n",
"log = logging.getLogger(__name__)\n",
"translation_string_factory = TranslationStringFactory('karakara')\n",
"def main(global_config, **settings):...\n",
"\"\"\"docstring\"\"\"\n",
"init_DBSession(settings)\n",
"config = pyramid.config.Configurator(settings=settings, root_factory=\n TraversalGlobalRootFactory)\n",
"def assert_settings_keys(keys):...\n",
"for settings_key in key:\n",
"assert config.registry.settings.get(settings_key)\n",
"config.include('pyramid_mako')\n",
"for key in config.registry.settings.keys():\n",
"value = os.getenv(key.replace('.', '_').upper(), ''\n ) or config.registry.settings[key]\n",
"config.add_request_method(partial(session_identity, session_keys={'id',\n 'admin', 'faves', 'user'}), 'session_identity', reify=True)\n",
"config.registry.settings[key] = convert_str_with_type(value)\n",
"setup_pyramid_autoformater(config)\n",
"config.add_translation_dirs(config.registry.settings['i18n.translation_dirs'])\n",
"session_settings = extract_subkeys(config.registry.settings, 'session.')\n",
"session_factory = SignedCookieSessionFactory(serializer=json_serializer, **\n session_settings)\n",
"config.set_session_factory(session_factory)\n",
"if not config.registry.settings['server.etag.cache_buster']:\n",
"from .model.actions import last_update\n",
"import karakara.views.search\n",
"config.registry.settings['server.etag.cache_buster'\n ] = 'last_update:{0}'.format(str(last_update()))\n",
"karakara.views.search.search_config = read_json(config.registry.settings[\n 'karakara.search.view.config'])\n",
"assert karakara.views.search.search_config, 'search_config data required'\n",
"def recv(self, *args, **kwargs):...\n",
"socket_manager = NullAuthEchoServerManager()\n",
"if config.registry.settings.get('karakara.websocket.port'):\n",
"def authenticator(key):...\n",
"config.registry['socket_manager'] = socket_manager\n",
"\"\"\"docstring\"\"\"\n",
"from .views.comunity_login import social_login\n",
"request = pyramid.request.Request({'HTTP_COOKIE': '{0}={1}'.format(config.\n registry.settings['session.cookie_name'], key)})\n",
"social_login.user_store = ComunityUserStore()\n",
"session_data = session_factory(request)\n",
"login_providers = config.registry.settings.get('login.provider.enabled')\n",
"return session_data and session_data.get('admin')\n",
"if 'facebook' in login_providers:\n",
"assert_settings_keys(('login.facebook.appid', 'login.facebook.secret'),\n message=\n 'To use facebook as a login provider appid and secret must be provided')\n",
"if 'google' in login_providers:\n",
"social_login.add_login_provider(FacebookLogin(appid=config.registry.\n settings.get('login.facebook.appid'), secret=config.registry.settings.\n get('login.facebook.secret'), permissions=config.registry.settings.get(\n 'login.facebook.permissions')))\n",
"social_login.add_login_provider(GoogleLogin(client_secret_file=config.\n registry.settings.get('login.google.client_secret_file')))\n",
"if not login_providers and config.registry.settings.get('karakara.server.mode'\n",
"social_login.add_login_provider(NullLoginProvider())\n",
"template_helpers.javascript_inline['comunity'] = social_login.html_includes\n",
"social_login.user_store = NullComunityUserStore()\n",
"def settings_path(key):...\n",
"path = os.path.join(os.getcwd(), config.registry.settings[key])\n",
"if not os.path.isdir(path):\n",
"log.error(f'Unable to add_static_view {key}:{path}')\n",
"return path\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"FunctionDef'",
"For",
"Assert'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Assert'",
"FunctionDef'",
"Assign'",
"Condition",
"FunctionDef'",
"Assign'",
"Docstring",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_5(self, VAR_5, VAR_6, VAR_18, VAR_13=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_55 = sorted(self.get_flat_tree(VAR_5, VAR_6, VAR_18), key=itemgetter(\n 'sequence'))\n",
"VAR_15 = [VAR_14['id'] for VAR_14 in VAR_55]\n",
"return self.browse(VAR_5, VAR_6, VAR_15, VAR_13=context)\n"
] | [
"def get_sorted_list(self, cr, uid, root_id, context=None):...\n",
"\"\"\"docstring\"\"\"\n",
"flat_tree = sorted(self.get_flat_tree(cr, uid, root_id), key=itemgetter(\n 'sequence'))\n",
"item_ids = [item['id'] for item in flat_tree]\n",
"return self.browse(cr, uid, item_ids, context=context)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_44(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 1, 'MEDIUM': 2}, 'CONFIDENCE': {'MEDIUM': 3}}\n",
"self.check_example('secret-config-option.py', VAR_2)\n"
] | [
"def test_secret_config_option(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 1, 'MEDIUM': 2}, 'CONFIDENCE': {'MEDIUM': 3}}\n",
"self.check_example('secret-config-option.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_1):...\n",
"return frappe.db.get_value('Blog Category', {'name': VAR_1}, 'title') or VAR_1\n"
] | [
"def get_blog_category(route):...\n",
"return frappe.db.get_value('Blog Category', {'name': route}, 'title') or route\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@login_required...\n",
"VAR_15 = get_object_or_404(Candidate, id=candidate_id)\n",
"if VAR_15.person.netid != VAR_2.user.username:\n",
"if not VAR_2.user.has_perm('etd_app.change_candidate'):\n",
"if not VAR_15.thesis.current_file_name:\n",
"return HttpResponseForbidden(\n \"You don't have permission to view this candidate's thesis.\")\n",
"return HttpResponse(\n \"Couldn't find a file: please email %s if there should be one.\" % VAR_0)\n",
"VAR_25 = os.path.join(settings.MEDIA_ROOT, VAR_15.thesis.current_file_name)\n",
"VAR_26 = FileResponse(open(VAR_25, 'rb'), content_type='application/pdf')\n",
"VAR_26['Content-Disposition'\n ] = 'attachment; filename=\"%s\"' % VAR_15.thesis.original_file_name\n",
"return VAR_26\n"
] | [
"@login_required...\n",
"candidate = get_object_or_404(Candidate, id=candidate_id)\n",
"if candidate.person.netid != request.user.username:\n",
"if not request.user.has_perm('etd_app.change_candidate'):\n",
"if not candidate.thesis.current_file_name:\n",
"return HttpResponseForbidden(\n \"You don't have permission to view this candidate's thesis.\")\n",
"return HttpResponse(\n \"Couldn't find a file: please email %s if there should be one.\" % BDR_EMAIL\n )\n",
"file_path = os.path.join(settings.MEDIA_ROOT, candidate.thesis.\n current_file_name)\n",
"response = FileResponse(open(file_path, 'rb'), content_type='application/pdf')\n",
"response['Content-Disposition'\n ] = 'attachment; filename=\"%s\"' % candidate.thesis.original_file_name\n",
"return response\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_7):...\n",
"self.name = VAR_0\n",
"self.href = VAR_7\n"
] | [
"def __init__(self, name, href):...\n",
"self.name = name\n",
"self.href = href\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_3):...\n",
"VAR_5 = 'select userid from comment_like where commentid=%d' % VAR_3\n",
"VAR_6 = sql.queryDB(self.conn, VAR_5)\n",
"return VAR_6\n"
] | [
"def getCommentsLike(self, commentid):...\n",
"sqlText = 'select userid from comment_like where commentid=%d' % commentid\n",
"result = sql.queryDB(self.conn, sqlText)\n",
"return result\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"return CLASS_3(VAR_1, VAR_2=True)\n"
] | [
"def r_strict(item):...\n",
"return Roamer(item, _raise=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(VAR_3):...\n",
"@wraps(VAR_3)...\n",
"VAR_6 = VAR_5.headers.get(VAR_1)\n",
"VAR_11 = VAR_5.session.get_csrf_token()\n",
"if VAR_6 == VAR_11:\n",
"return VAR_3(VAR_9, VAR_5)\n",
"return FUNC_5\n"
] | [
"def csrf(fn):...\n",
"@wraps(fn)...\n",
"token = request.headers.get(HEADER_NAME)\n",
"session_token = request.session.get_csrf_token()\n",
"if token == session_token:\n",
"return fn(context, request)\n",
"return wrapper\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_27(VAR_13):...\n",
"self.assertEqual('/request', VAR_13.path)\n",
"VAR_12.append('not_applicable')\n",
"return None\n"
] | [
"def not_applicable(request):...\n",
"self.assertEqual('/request', request.path)\n",
"calls.append('not_applicable')\n",
"return None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"self.runner.policy.strict_check = True\n",
"self.runall(self.checks)\n",
"VAR_15 = self.runner.stats\n",
"self.assertEqual(7, VAR_15.num_cases())\n",
"self.assertEqual(5, len(VAR_15.failures()))\n",
"self.assertEqual(2, self._num_failures_stage('setup'))\n",
"self.assertEqual(1, self._num_failures_stage('sanity'))\n",
"self.assertEqual(2, self._num_failures_stage('performance'))\n"
] | [
"def test_strict_performance_check(self):...\n",
"self.runner.policy.strict_check = True\n",
"self.runall(self.checks)\n",
"stats = self.runner.stats\n",
"self.assertEqual(7, stats.num_cases())\n",
"self.assertEqual(5, len(stats.failures()))\n",
"self.assertEqual(2, self._num_failures_stage('setup'))\n",
"self.assertEqual(1, self._num_failures_stage('sanity'))\n",
"self.assertEqual(2, self._num_failures_stage('performance'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"if self._hosts_cache is None:\n",
"self._hosts_cache = self._get_hosts()\n",
"return self._hosts_cache\n"
] | [
"def get_hosts(self):...\n",
"if self._hosts_cache is None:\n",
"self._hosts_cache = self._get_hosts()\n",
"return self._hosts_cache\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"self._test_strtype('varchar', u'')\n"
] | [
"def test_text_upperlatin(self):...\n",
"self._test_strtype('varchar', u'')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"VAR_7 = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n",
"VAR_8 = VAR_7.cursor()\n",
"VAR_8.execute('string')\n",
"VAR_7.commit()\n",
"VAR_8.close()\n",
"VAR_7.close()\n"
] | [
"def create_tables(pg_connection):...\n",
"conn = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n",
"cur = conn.cursor()\n",
"cur.execute(\n \"\"\"\n CREATE TABLE IF NOT EXISTS quests\n (id SERIAL PRIMARY KEY, tier VARCHAR, description VARCHAR, creator VARCHAR, completed BOOLEAN);\n \"\"\"\n )\n",
"conn.commit()\n",
"cur.close()\n",
"conn.close()\n"
] | [
0,
4,
4,
4,
0,
4,
4
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not site_settings.OPTIONAL_PATH_PREFIX:\n",
"return False\n",
"VAR_14 = self.request.path[1:]\n",
"if VAR_14 == site_settings.OPTIONAL_PATH_PREFIX:\n",
"return True\n",
"return VAR_14.startswith('%s/' % site_settings.OPTIONAL_PATH_PREFIX)\n"
] | [
"def _request_is_for_prefixed_path(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not site_settings.OPTIONAL_PATH_PREFIX:\n",
"return False\n",
"req_path = self.request.path[1:]\n",
"if req_path == site_settings.OPTIONAL_PATH_PREFIX:\n",
"return True\n",
"return req_path.startswith('%s/' % site_settings.OPTIONAL_PATH_PREFIX)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"from django.views.generic import TemplateView, FormView, DetailView\n",
"from django.urls import reverse\n",
"from .entryform import EntryForm, entry_form_config, build_question_flag\n",
"from .models import LifeCondition, Benefit, BenefitRequirement\n",
"VAR_0 = 'core/benefit_overview.html'\n",
"def FUNC_0(self):...\n",
"VAR_7 = super().get_context_data()\n",
"VAR_7['life_conditions'] = LifeCondition.objects.with_benefits()\n",
"return VAR_7\n"
] | [
"from django.views.generic import TemplateView, FormView, DetailView\n",
"from django.urls import reverse\n",
"from .entryform import EntryForm, entry_form_config, build_question_flag\n",
"from .models import LifeCondition, Benefit, BenefitRequirement\n",
"template_name = 'core/benefit_overview.html'\n",
"def get_context_data(self):...\n",
"data = super().get_context_data()\n",
"data['life_conditions'] = LifeCondition.objects.with_benefits()\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@api.depends('transitions_to', 'automaton')...\n",
"for record in self:\n",
"if len(record.transitions_to) == 0 or record.transitions_to is False:\n",
"record.is_start_state = True\n",
"record.is_start_state = False\n"
] | [
"@api.depends('transitions_to', 'automaton')...\n",
"for record in self:\n",
"if len(record.transitions_to) == 0 or record.transitions_to is False:\n",
"record.is_start_state = True\n",
"record.is_start_state = False\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"For",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_2, VAR_4, VAR_5):...\n",
"VAR_17 = VAR_2.user\n",
"return not isinstance(VAR_5, self.model\n ) or VAR_17.is_staff or VAR_17.is_superuser or self.is_object_visible(VAR_2\n , VAR_4, VAR_5)\n"
] | [
"def has_object_permission(self, request, view, obj):...\n",
"user = request.user\n",
"return not isinstance(obj, self.model\n ) or user.is_staff or user.is_superuser or self.is_object_visible(request,\n view, obj)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
End of preview. Expand
in Dataset Viewer.
Dataset Card for "vul_lines"
Original Paper: https://www.sciencedirect.com/science/article/abs/pii/S0167739X24004680
bibtex
@article{TRAN2024107504,
title = {DetectVul: A statement-level code vulnerability detection for Python},
journal = {Future Generation Computer Systems},
pages = {107504},
year = {2024},
issn = {0167-739X},
doi = {https://doi.org/10.1016/j.future.2024.107504},
url = {https://www.sciencedirect.com/science/article/pii/S0167739X24004680},
author = {Hoai-Chau Tran and Anh-Duy Tran and Kim-Hung Le},
keywords = {Source code vulnerability detection, Deep learning, Natural language processing},
abstract = {Detecting vulnerabilities in source code using graph neural networks (GNN) has gained significant attention in recent years. However, the detection performance of these approaches relies highly on the graph structure, and constructing meaningful graphs is expensive. Moreover, they often operate at a coarse level of granularity (such as function-level), which limits their applicability to other scripting languages like Python and their effectiveness in identifying vulnerabilities. To address these limitations, we propose DetectVul, a new approach that accurately detects vulnerable patterns in Python source code at the statement level. DetectVul applies self-attention to directly learn patterns and interactions between statements in a raw Python function; thus, it eliminates the complicated graph extraction process without sacrificing model performance. In addition, the information about each type of statement is also leveraged to enhance the model’s detection accuracy. In our experiments, we used two datasets, CVEFixes and Vudenc, with 211,317 Python statements in 21,571 functions from real-world projects on GitHub, covering seven vulnerability types. Our experiments show that DetectVul outperforms GNN-based models using control flow graphs, achieving the best F1 score of 74.47%, which is 25.45% and 18.05% higher than the best GCN and GAT models, respectively.}
}
- Downloads last month
- 59