id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
410
def floating_ip_get_pools(context): return IMPL.floating_ip_get_pools(context)
[ "def", "floating_ip_get_pools", "(", "context", ")", ":", "return", "IMPL", ".", "floating_ip_get_pools", "(", "context", ")" ]
returns a list of floating ip pools .
train
false
411
@with_device def getprop(name=None): with context.quiet: if name: return process(['getprop', name]).recvall().strip() result = process(['getprop']).recvall() expr = '\\[([^\\]]+)\\]: \\[(.*)\\]' props = {} for line in result.splitlines(): if (not line.startswith('[')): continue (name, value) = re.search(expr, line).groups() if value.isdigit(): value = int(value) props[name] = value return props
[ "@", "with_device", "def", "getprop", "(", "name", "=", "None", ")", ":", "with", "context", ".", "quiet", ":", "if", "name", ":", "return", "process", "(", "[", "'getprop'", ",", "name", "]", ")", ".", "recvall", "(", ")", ".", "strip", "(", ")", "result", "=", "process", "(", "[", "'getprop'", "]", ")", ".", "recvall", "(", ")", "expr", "=", "'\\\\[([^\\\\]]+)\\\\]: \\\\[(.*)\\\\]'", "props", "=", "{", "}", "for", "line", "in", "result", ".", "splitlines", "(", ")", ":", "if", "(", "not", "line", ".", "startswith", "(", "'['", ")", ")", ":", "continue", "(", "name", ",", "value", ")", "=", "re", ".", "search", "(", "expr", ",", "line", ")", ".", "groups", "(", ")", "if", "value", ".", "isdigit", "(", ")", ":", "value", "=", "int", "(", "value", ")", "props", "[", "name", "]", "=", "value", "return", "props" ]
reads a properties from the system property store .
train
false
412
def list_remote_pythons(host): result = host.run('ls /usr/bin/python[0-9]*') return result.stdout.splitlines()
[ "def", "list_remote_pythons", "(", "host", ")", ":", "result", "=", "host", ".", "run", "(", "'ls /usr/bin/python[0-9]*'", ")", "return", "result", ".", "stdout", ".", "splitlines", "(", ")" ]
list out installed pythons on host .
train
false
413
def _FindAncestorAtIndent(node, indent): if (node.parent.parent is None): return node parent_indent = pytree_utils.GetNodeAnnotation(node.parent, pytree_utils.Annotation.CHILD_INDENT) if ((parent_indent is not None) and indent.startswith(parent_indent)): return node else: return _FindAncestorAtIndent(node.parent, indent)
[ "def", "_FindAncestorAtIndent", "(", "node", ",", "indent", ")", ":", "if", "(", "node", ".", "parent", ".", "parent", "is", "None", ")", ":", "return", "node", "parent_indent", "=", "pytree_utils", ".", "GetNodeAnnotation", "(", "node", ".", "parent", ",", "pytree_utils", ".", "Annotation", ".", "CHILD_INDENT", ")", "if", "(", "(", "parent_indent", "is", "not", "None", ")", "and", "indent", ".", "startswith", "(", "parent_indent", ")", ")", ":", "return", "node", "else", ":", "return", "_FindAncestorAtIndent", "(", "node", ".", "parent", ",", "indent", ")" ]
find an ancestor of node with the given indentation .
train
false
414
def _detect_os(): os_family = __grains__['os_family'] if (os_family == 'RedHat'): return 'apachectl' elif ((os_family == 'Debian') or (os_family == 'SUSE')): return 'apache2ctl' else: return 'apachectl'
[ "def", "_detect_os", "(", ")", ":", "os_family", "=", "__grains__", "[", "'os_family'", "]", "if", "(", "os_family", "==", "'RedHat'", ")", ":", "return", "'apachectl'", "elif", "(", "(", "os_family", "==", "'Debian'", ")", "or", "(", "os_family", "==", "'SUSE'", ")", ")", ":", "return", "'apache2ctl'", "else", ":", "return", "'apachectl'" ]
apache commands and paths differ depending on packaging .
train
false
416
def __ipv4_netmask(value): (valid, errmsg) = (False, 'dotted quad or integer CIDR (0->32)') (valid, value, _) = __int(value) if (not (valid and (0 <= value <= 32))): valid = salt.utils.validate.net.netmask(value) return (valid, value, errmsg)
[ "def", "__ipv4_netmask", "(", "value", ")", ":", "(", "valid", ",", "errmsg", ")", "=", "(", "False", ",", "'dotted quad or integer CIDR (0->32)'", ")", "(", "valid", ",", "value", ",", "_", ")", "=", "__int", "(", "value", ")", "if", "(", "not", "(", "valid", "and", "(", "0", "<=", "value", "<=", "32", ")", ")", ")", ":", "valid", "=", "salt", ".", "utils", ".", "validate", ".", "net", ".", "netmask", "(", "value", ")", "return", "(", "valid", ",", "value", ",", "errmsg", ")" ]
validate an ipv4 dotted quad or integer cidr netmask .
train
true
417
@frappe.whitelist() def get_contact_list(txt): txt = txt.replace(u'%', u'') def get_users(): return filter(None, frappe.db.sql_list(u'select email from tabUser where email like %s', ((u'%' + txt) + u'%'))) try: out = filter(None, frappe.db.sql_list(u'select email_id from `tabContact`\n DCTB DCTB DCTB where `email_id` like %(txt)s order by\n DCTB DCTB DCTB if (locate( %(_txt)s, email_id), locate( %(_txt)s, email_id), 99999)', {u'txt': (u'%%%s%%' % frappe.db.escape(txt)), u'_txt': txt.replace(u'%', u'')})) if (not out): out = get_users() except Exception as e: if (e.args[0] == 1146): out = get_users() else: raise return out
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_contact_list", "(", "txt", ")", ":", "txt", "=", "txt", ".", "replace", "(", "u'%'", ",", "u''", ")", "def", "get_users", "(", ")", ":", "return", "filter", "(", "None", ",", "frappe", ".", "db", ".", "sql_list", "(", "u'select email from tabUser where email like %s'", ",", "(", "(", "u'%'", "+", "txt", ")", "+", "u'%'", ")", ")", ")", "try", ":", "out", "=", "filter", "(", "None", ",", "frappe", ".", "db", ".", "sql_list", "(", "u'select email_id from `tabContact`\\n DCTB DCTB DCTB where `email_id` like %(txt)s order by\\n DCTB DCTB DCTB if (locate( %(_txt)s, email_id), locate( %(_txt)s, email_id), 99999)'", ",", "{", "u'txt'", ":", "(", "u'%%%s%%'", "%", "frappe", ".", "db", ".", "escape", "(", "txt", ")", ")", ",", "u'_txt'", ":", "txt", ".", "replace", "(", "u'%'", ",", "u''", ")", "}", ")", ")", "if", "(", "not", "out", ")", ":", "out", "=", "get_users", "(", ")", "except", "Exception", "as", "e", ":", "if", "(", "e", ".", "args", "[", "0", "]", "==", "1146", ")", ":", "out", "=", "get_users", "(", ")", "else", ":", "raise", "return", "out" ]
returns contacts .
train
false
418
def nginx_restart(nginx_ctl, nginx_conf='/etc/nginx.conf'): try: proc = subprocess.Popen([nginx_ctl, '-c', nginx_conf, '-s', 'reload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() if (proc.returncode != 0): nginx_proc = subprocess.Popen([nginx_ctl, '-c', nginx_conf], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = nginx_proc.communicate() if (nginx_proc.returncode != 0): raise errors.MisconfigurationError(('nginx restart failed:\n%s\n%s' % (stdout, stderr))) except (OSError, ValueError): raise errors.MisconfigurationError('nginx restart failed') time.sleep(1)
[ "def", "nginx_restart", "(", "nginx_ctl", ",", "nginx_conf", "=", "'/etc/nginx.conf'", ")", ":", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "[", "nginx_ctl", ",", "'-c'", ",", "nginx_conf", ",", "'-s'", ",", "'reload'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "proc", ".", "communicate", "(", ")", "if", "(", "proc", ".", "returncode", "!=", "0", ")", ":", "nginx_proc", "=", "subprocess", ".", "Popen", "(", "[", "nginx_ctl", ",", "'-c'", ",", "nginx_conf", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "nginx_proc", ".", "communicate", "(", ")", "if", "(", "nginx_proc", ".", "returncode", "!=", "0", ")", ":", "raise", "errors", ".", "MisconfigurationError", "(", "(", "'nginx restart failed:\\n%s\\n%s'", "%", "(", "stdout", ",", "stderr", ")", ")", ")", "except", "(", "OSError", ",", "ValueError", ")", ":", "raise", "errors", ".", "MisconfigurationError", "(", "'nginx restart failed'", ")", "time", ".", "sleep", "(", "1", ")" ]
restarts the nginx server .
train
false
419
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
420
def match_hostname(cert, hostname): if (not cert): raise ValueError('empty or no certificate') dnsnames = [] san = cert.get('subjectAltName', ()) for (key, value) in san: if (key == 'DNS'): if _dnsname_to_pat(value).match(hostname): return dnsnames.append(value) if (not dnsnames): for sub in cert.get('subject', ()): for (key, value) in sub: if (key == 'commonName'): if _dnsname_to_pat(value).match(hostname): return dnsnames.append(value) if (len(dnsnames) > 1): raise CertificateError(("hostname %r doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))) elif (len(dnsnames) == 1): raise CertificateError(("hostname %r doesn't match %r" % (hostname, dnsnames[0]))) else: raise CertificateError('no appropriate commonName or subjectAltName fields were found')
[ "def", "match_hostname", "(", "cert", ",", "hostname", ")", ":", "if", "(", "not", "cert", ")", ":", "raise", "ValueError", "(", "'empty or no certificate'", ")", "dnsnames", "=", "[", "]", "san", "=", "cert", ".", "get", "(", "'subjectAltName'", ",", "(", ")", ")", "for", "(", "key", ",", "value", ")", "in", "san", ":", "if", "(", "key", "==", "'DNS'", ")", ":", "if", "_dnsname_to_pat", "(", "value", ")", ".", "match", "(", "hostname", ")", ":", "return", "dnsnames", ".", "append", "(", "value", ")", "if", "(", "not", "dnsnames", ")", ":", "for", "sub", "in", "cert", ".", "get", "(", "'subject'", ",", "(", ")", ")", ":", "for", "(", "key", ",", "value", ")", "in", "sub", ":", "if", "(", "key", "==", "'commonName'", ")", ":", "if", "_dnsname_to_pat", "(", "value", ")", ".", "match", "(", "hostname", ")", ":", "return", "dnsnames", ".", "append", "(", "value", ")", "if", "(", "len", "(", "dnsnames", ")", ">", "1", ")", ":", "raise", "CertificateError", "(", "(", "\"hostname %r doesn't match either of %s\"", "%", "(", "hostname", ",", "', '", ".", "join", "(", "map", "(", "repr", ",", "dnsnames", ")", ")", ")", ")", ")", "elif", "(", "len", "(", "dnsnames", ")", "==", "1", ")", ":", "raise", "CertificateError", "(", "(", "\"hostname %r doesn't match %r\"", "%", "(", "hostname", ",", "dnsnames", "[", "0", "]", ")", ")", ")", "else", ":", "raise", "CertificateError", "(", "'no appropriate commonName or subjectAltName fields were found'", ")" ]
verify that *cert* (in decoded format as returned by sslsocket .
train
true
422
def hydrate_bundles(bundles_field, files_digest_list, excluded_files_list): bundles = [] zipped = zip(bundles_field.bundles, bundles_field.filespecs_list, files_digest_list, excluded_files_list) for (bundle, filespecs, files_digest, excluded_files) in zipped: spec_path = bundles_field.address.spec_path kwargs = bundle.kwargs() kwargs[u'fileset'] = _eager_fileset_with_spec(getattr(bundle, u'rel_path', spec_path), filespecs, files_digest, excluded_files) bundles.append(BundleAdaptor(**kwargs)) return HydratedField(u'bundles', bundles)
[ "def", "hydrate_bundles", "(", "bundles_field", ",", "files_digest_list", ",", "excluded_files_list", ")", ":", "bundles", "=", "[", "]", "zipped", "=", "zip", "(", "bundles_field", ".", "bundles", ",", "bundles_field", ".", "filespecs_list", ",", "files_digest_list", ",", "excluded_files_list", ")", "for", "(", "bundle", ",", "filespecs", ",", "files_digest", ",", "excluded_files", ")", "in", "zipped", ":", "spec_path", "=", "bundles_field", ".", "address", ".", "spec_path", "kwargs", "=", "bundle", ".", "kwargs", "(", ")", "kwargs", "[", "u'fileset'", "]", "=", "_eager_fileset_with_spec", "(", "getattr", "(", "bundle", ",", "u'rel_path'", ",", "spec_path", ")", ",", "filespecs", ",", "files_digest", ",", "excluded_files", ")", "bundles", ".", "append", "(", "BundleAdaptor", "(", "**", "kwargs", ")", ")", "return", "HydratedField", "(", "u'bundles'", ",", "bundles", ")" ]
given a bundlesfield and filesdigest for each of its filesets create a list of bundleadaptors .
train
false
423
def test_disk_store_alt_name_abspath(): with TestConfig(DISK_TEST_CONFIG) as (directory, object_store): empty_dataset = MockDataset(1) directory.write('', 'files1/000/dataset_1.dat') absfoo = os.path.abspath(os.path.join(directory.temp_directory, 'foo.txt')) with open(absfoo, 'w') as f: f.write('foo') try: assert (object_store.get_data(empty_dataset, extra_dir='dataset_1_files', alt_name=absfoo) != 'foo') except ObjectInvalid: pass
[ "def", "test_disk_store_alt_name_abspath", "(", ")", ":", "with", "TestConfig", "(", "DISK_TEST_CONFIG", ")", "as", "(", "directory", ",", "object_store", ")", ":", "empty_dataset", "=", "MockDataset", "(", "1", ")", "directory", ".", "write", "(", "''", ",", "'files1/000/dataset_1.dat'", ")", "absfoo", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "directory", ".", "temp_directory", ",", "'foo.txt'", ")", ")", "with", "open", "(", "absfoo", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'foo'", ")", "try", ":", "assert", "(", "object_store", ".", "get_data", "(", "empty_dataset", ",", "extra_dir", "=", "'dataset_1_files'", ",", "alt_name", "=", "absfoo", ")", "!=", "'foo'", ")", "except", "ObjectInvalid", ":", "pass" ]
test that alt_name cannot be used to access arbitrary paths using a absolute path .
train
false
425
def split_axis(x, indices_or_sections, axis, force_tuple=False): res = SplitAxis(indices_or_sections, axis)(x) if (force_tuple and isinstance(res, chainer.Variable)): res = (res,) return res
[ "def", "split_axis", "(", "x", ",", "indices_or_sections", ",", "axis", ",", "force_tuple", "=", "False", ")", ":", "res", "=", "SplitAxis", "(", "indices_or_sections", ",", "axis", ")", "(", "x", ")", "if", "(", "force_tuple", "and", "isinstance", "(", "res", ",", "chainer", ".", "Variable", ")", ")", ":", "res", "=", "(", "res", ",", ")", "return", "res" ]
splits given variables along an axis .
train
false
427
def expr_to_config(expr, conditions, hps): expr = as_apply(expr) if (conditions is None): conditions = () assert isinstance(expr, Apply) _expr_to_config(expr, conditions, hps) _remove_allpaths(hps, conditions)
[ "def", "expr_to_config", "(", "expr", ",", "conditions", ",", "hps", ")", ":", "expr", "=", "as_apply", "(", "expr", ")", "if", "(", "conditions", "is", "None", ")", ":", "conditions", "=", "(", ")", "assert", "isinstance", "(", "expr", ",", "Apply", ")", "_expr_to_config", "(", "expr", ",", "conditions", ",", "hps", ")", "_remove_allpaths", "(", "hps", ",", "conditions", ")" ]
populate dictionary hps with the hyperparameters in pyll graph expr and conditions for participation in the evaluation of expr .
train
false
428
def create_vs(lb, name, ip, port, protocol, profile, pool_name): if __opts__['load_balancers'].get(lb, None): (username, password) = list(__opts__['load_balancers'][lb].values()) else: raise Exception('Unable to find `{0}` load balancer'.format(lb)) F5 = F5Mgmt(lb, username, password) F5.create_vs(name, ip, port, protocol, profile, pool_name) return True
[ "def", "create_vs", "(", "lb", ",", "name", ",", "ip", ",", "port", ",", "protocol", ",", "profile", ",", "pool_name", ")", ":", "if", "__opts__", "[", "'load_balancers'", "]", ".", "get", "(", "lb", ",", "None", ")", ":", "(", "username", ",", "password", ")", "=", "list", "(", "__opts__", "[", "'load_balancers'", "]", "[", "lb", "]", ".", "values", "(", ")", ")", "else", ":", "raise", "Exception", "(", "'Unable to find `{0}` load balancer'", ".", "format", "(", "lb", ")", ")", "F5", "=", "F5Mgmt", "(", "lb", ",", "username", ",", "password", ")", "F5", ".", "create_vs", "(", "name", ",", "ip", ",", "port", ",", "protocol", ",", "profile", ",", "pool_name", ")", "return", "True" ]
create a virtual server cli examples: .
train
true
429
def QuoteForRspFile(arg): arg = windows_quoter_regex.sub((lambda mo: ((2 * mo.group(1)) + '\\"')), arg) arg = arg.replace('%', '%%') return (('"' + arg) + '"')
[ "def", "QuoteForRspFile", "(", "arg", ")", ":", "arg", "=", "windows_quoter_regex", ".", "sub", "(", "(", "lambda", "mo", ":", "(", "(", "2", "*", "mo", ".", "group", "(", "1", ")", ")", "+", "'\\\\\"'", ")", ")", ",", "arg", ")", "arg", "=", "arg", ".", "replace", "(", "'%'", ",", "'%%'", ")", "return", "(", "(", "'\"'", "+", "arg", ")", "+", "'\"'", ")" ]
quote a command line argument so that it appears as one argument when processed via cmd .
train
false
430
def decode_embedded_strs(src): if (not six.PY3): return src if isinstance(src, dict): return _decode_embedded_dict(src) elif isinstance(src, list): return _decode_embedded_list(src) elif isinstance(src, bytes): try: return src.decode() except UnicodeError: return src else: return src
[ "def", "decode_embedded_strs", "(", "src", ")", ":", "if", "(", "not", "six", ".", "PY3", ")", ":", "return", "src", "if", "isinstance", "(", "src", ",", "dict", ")", ":", "return", "_decode_embedded_dict", "(", "src", ")", "elif", "isinstance", "(", "src", ",", "list", ")", ":", "return", "_decode_embedded_list", "(", "src", ")", "elif", "isinstance", "(", "src", ",", "bytes", ")", ":", "try", ":", "return", "src", ".", "decode", "(", ")", "except", "UnicodeError", ":", "return", "src", "else", ":", "return", "src" ]
convert enbedded bytes to strings if possible .
train
true
432
def should_switch_to_postcopy(memory_iteration, current_data_remaining, previous_data_remaining, migration_status): if ((migration_status == 'running (post-copy)') or (previous_data_remaining <= 0)): return False if (memory_iteration > 1): progress_percentage = round((((previous_data_remaining - current_data_remaining) * 100) / previous_data_remaining)) if (progress_percentage < 10): return True return False
[ "def", "should_switch_to_postcopy", "(", "memory_iteration", ",", "current_data_remaining", ",", "previous_data_remaining", ",", "migration_status", ")", ":", "if", "(", "(", "migration_status", "==", "'running (post-copy)'", ")", "or", "(", "previous_data_remaining", "<=", "0", ")", ")", ":", "return", "False", "if", "(", "memory_iteration", ">", "1", ")", ":", "progress_percentage", "=", "round", "(", "(", "(", "(", "previous_data_remaining", "-", "current_data_remaining", ")", "*", "100", ")", "/", "previous_data_remaining", ")", ")", "if", "(", "progress_percentage", "<", "10", ")", ":", "return", "True", "return", "False" ]
determine if the migration should be switched to postcopy mode .
train
false
433
def decode_ascii(s): return s
[ "def", "decode_ascii", "(", "s", ")", ":", "return", "s" ]
in python 2 this is a no-op .
train
false
434
def pulled(name, tag='latest', force=False, insecure_registry=False, *args, **kwargs): inspect_image = __salt__['docker.inspect_image'] image_name = _get_image_name(name, tag) image_infos = inspect_image(image_name) if (image_infos['status'] and (not force)): return _valid(name=name, comment='Image already pulled: {0}'.format(image_name)) if __opts__['test']: comment = 'Image {0} will be pulled'.format(image_name) return _ret_status(name=name, comment=comment) previous_id = (image_infos['out']['Id'] if image_infos['status'] else None) pull = __salt__['docker.pull'] returned = pull(name, tag=tag, insecure_registry=insecure_registry) if (previous_id != returned['id']): changes = {name: {'old': previous_id, 'new': returned['id']}} comment = 'Image {0} pulled'.format(image_name) else: changes = {} comment = '' return _ret_status(returned, name, changes=changes, comment=comment)
[ "def", "pulled", "(", "name", ",", "tag", "=", "'latest'", ",", "force", "=", "False", ",", "insecure_registry", "=", "False", ",", "*", "args", ",", "**", "kwargs", ")", ":", "inspect_image", "=", "__salt__", "[", "'docker.inspect_image'", "]", "image_name", "=", "_get_image_name", "(", "name", ",", "tag", ")", "image_infos", "=", "inspect_image", "(", "image_name", ")", "if", "(", "image_infos", "[", "'status'", "]", "and", "(", "not", "force", ")", ")", ":", "return", "_valid", "(", "name", "=", "name", ",", "comment", "=", "'Image already pulled: {0}'", ".", "format", "(", "image_name", ")", ")", "if", "__opts__", "[", "'test'", "]", ":", "comment", "=", "'Image {0} will be pulled'", ".", "format", "(", "image_name", ")", "return", "_ret_status", "(", "name", "=", "name", ",", "comment", "=", "comment", ")", "previous_id", "=", "(", "image_infos", "[", "'out'", "]", "[", "'Id'", "]", "if", "image_infos", "[", "'status'", "]", "else", "None", ")", "pull", "=", "__salt__", "[", "'docker.pull'", "]", "returned", "=", "pull", "(", "name", ",", "tag", "=", "tag", ",", "insecure_registry", "=", "insecure_registry", ")", "if", "(", "previous_id", "!=", "returned", "[", "'id'", "]", ")", ":", "changes", "=", "{", "name", ":", "{", "'old'", ":", "previous_id", ",", "'new'", ":", "returned", "[", "'id'", "]", "}", "}", "comment", "=", "'Image {0} pulled'", ".", "format", "(", "image_name", ")", "else", ":", "changes", "=", "{", "}", "comment", "=", "''", "return", "_ret_status", "(", "returned", ",", "name", ",", "changes", "=", "changes", ",", "comment", "=", "comment", ")" ]
pull an image from a docker registry .
train
false
435
def find_tag_definition(block, offset): (block, boundary) = next_tag_boundary(block, offset, forward=False) if ((not boundary) or (not boundary.is_start)): return (None, False) tag_start = boundary closing = tag_start.closing tag = tag_start.name if tag_start.prefix: tag = ((tag_start.prefix + u':') + tag) return (tag, closing)
[ "def", "find_tag_definition", "(", "block", ",", "offset", ")", ":", "(", "block", ",", "boundary", ")", "=", "next_tag_boundary", "(", "block", ",", "offset", ",", "forward", "=", "False", ")", "if", "(", "(", "not", "boundary", ")", "or", "(", "not", "boundary", ".", "is_start", ")", ")", ":", "return", "(", "None", ",", "False", ")", "tag_start", "=", "boundary", "closing", "=", "tag_start", ".", "closing", "tag", "=", "tag_start", ".", "name", "if", "tag_start", ".", "prefix", ":", "tag", "=", "(", "(", "tag_start", ".", "prefix", "+", "u':'", ")", "+", "tag", ")", "return", "(", "tag", ",", "closing", ")" ]
return the <tag | > definition .
train
false
436
def space2depth(X, r): return Space2Depth(r)(X)
[ "def", "space2depth", "(", "X", ",", "r", ")", ":", "return", "Space2Depth", "(", "r", ")", "(", "X", ")" ]
computes the space2depth transformation for subpixel calculations .
train
false
437
def random_complex_number(a=2, b=(-1), c=3, d=1, rational=False): (A, B) = (uniform(a, c), uniform(b, d)) if (not rational): return (A + (I * B)) return (nsimplify(A, rational=True) + (I * nsimplify(B, rational=True)))
[ "def", "random_complex_number", "(", "a", "=", "2", ",", "b", "=", "(", "-", "1", ")", ",", "c", "=", "3", ",", "d", "=", "1", ",", "rational", "=", "False", ")", ":", "(", "A", ",", "B", ")", "=", "(", "uniform", "(", "a", ",", "c", ")", ",", "uniform", "(", "b", ",", "d", ")", ")", "if", "(", "not", "rational", ")", ":", "return", "(", "A", "+", "(", "I", "*", "B", ")", ")", "return", "(", "nsimplify", "(", "A", ",", "rational", "=", "True", ")", "+", "(", "I", "*", "nsimplify", "(", "B", ",", "rational", "=", "True", ")", ")", ")" ]
return a random complex number .
train
false
438
def fetch_image_stream_optimized(context, instance, session, vm_name, ds_name, vm_folder_ref, res_pool_ref): image_ref = instance.image_ref LOG.debug("Downloading image file data %(image_ref)s to the ESX as VM named '%(vm_name)s'", {'image_ref': image_ref, 'vm_name': vm_name}, instance=instance) metadata = IMAGE_API.get(context, image_ref) file_size = int(metadata['size']) vm_import_spec = _build_import_spec_for_import_vapp(session, vm_name, ds_name) read_iter = IMAGE_API.download(context, image_ref) read_handle = rw_handles.ImageReadHandle(read_iter) write_handle = rw_handles.VmdkWriteHandle(session, session._host, session._port, res_pool_ref, vm_folder_ref, vm_import_spec, file_size) image_transfer(read_handle, write_handle) imported_vm_ref = write_handle.get_imported_vm() LOG.info(_LI('Downloaded image file data %(image_ref)s'), {'image_ref': instance.image_ref}, instance=instance) vmdk = vm_util.get_vmdk_info(session, imported_vm_ref, vm_name) session._call_method(session.vim, 'UnregisterVM', imported_vm_ref) LOG.info(_LI('The imported VM was unregistered'), instance=instance) return vmdk.capacity_in_bytes
[ "def", "fetch_image_stream_optimized", "(", "context", ",", "instance", ",", "session", ",", "vm_name", ",", "ds_name", ",", "vm_folder_ref", ",", "res_pool_ref", ")", ":", "image_ref", "=", "instance", ".", "image_ref", "LOG", ".", "debug", "(", "\"Downloading image file data %(image_ref)s to the ESX as VM named '%(vm_name)s'\"", ",", "{", "'image_ref'", ":", "image_ref", ",", "'vm_name'", ":", "vm_name", "}", ",", "instance", "=", "instance", ")", "metadata", "=", "IMAGE_API", ".", "get", "(", "context", ",", "image_ref", ")", "file_size", "=", "int", "(", "metadata", "[", "'size'", "]", ")", "vm_import_spec", "=", "_build_import_spec_for_import_vapp", "(", "session", ",", "vm_name", ",", "ds_name", ")", "read_iter", "=", "IMAGE_API", ".", "download", "(", "context", ",", "image_ref", ")", "read_handle", "=", "rw_handles", ".", "ImageReadHandle", "(", "read_iter", ")", "write_handle", "=", "rw_handles", ".", "VmdkWriteHandle", "(", "session", ",", "session", ".", "_host", ",", "session", ".", "_port", ",", "res_pool_ref", ",", "vm_folder_ref", ",", "vm_import_spec", ",", "file_size", ")", "image_transfer", "(", "read_handle", ",", "write_handle", ")", "imported_vm_ref", "=", "write_handle", ".", "get_imported_vm", "(", ")", "LOG", ".", "info", "(", "_LI", "(", "'Downloaded image file data %(image_ref)s'", ")", ",", "{", "'image_ref'", ":", "instance", ".", "image_ref", "}", ",", "instance", "=", "instance", ")", "vmdk", "=", "vm_util", ".", "get_vmdk_info", "(", "session", ",", "imported_vm_ref", ",", "vm_name", ")", "session", ".", "_call_method", "(", "session", ".", "vim", ",", "'UnregisterVM'", ",", "imported_vm_ref", ")", "LOG", ".", "info", "(", "_LI", "(", "'The imported VM was unregistered'", ")", ",", "instance", "=", "instance", ")", "return", "vmdk", ".", "capacity_in_bytes" ]
fetch image from glance to esx datastore .
train
false
439
def jsonp_is_valid(funcname): func_regex = re.compile('\n ^[a-zA-Z_\\$]\n [a-zA-Z0-9_\\$]*\n (\\[[a-zA-Z0-9_\\$]*\\])*\n (\\.[a-zA-Z0-9_\\$]+\n (\\[[a-zA-Z0-9_\\$]*\\])*\n )*$\n ', re.VERBOSE) return bool(func_regex.match(funcname))
[ "def", "jsonp_is_valid", "(", "funcname", ")", ":", "func_regex", "=", "re", ".", "compile", "(", "'\\n ^[a-zA-Z_\\\\$]\\n [a-zA-Z0-9_\\\\$]*\\n (\\\\[[a-zA-Z0-9_\\\\$]*\\\\])*\\n (\\\\.[a-zA-Z0-9_\\\\$]+\\n (\\\\[[a-zA-Z0-9_\\\\$]*\\\\])*\\n )*$\\n '", ",", "re", ".", "VERBOSE", ")", "return", "bool", "(", "func_regex", ".", "match", "(", "funcname", ")", ")" ]
returns whether the jsonp function name is valid :arg funcname: the name of the jsonp function :returns: true or false .
train
false
441
def _create_whitelist_set(whitelist_path): f = open(whitelist_path, 'r') whitelist_set = set([line.strip() for line in f]) f.close() return whitelist_set
[ "def", "_create_whitelist_set", "(", "whitelist_path", ")", ":", "f", "=", "open", "(", "whitelist_path", ",", "'r'", ")", "whitelist_set", "=", "set", "(", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "f", "]", ")", "f", ".", "close", "(", ")", "return", "whitelist_set" ]
create a set with contents from a whitelist file for membership testing .
train
false
443
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
commit the current directory .
train
false
444
def in6_6to4ExtractAddr(addr): try: addr = inet_pton(socket.AF_INET6, addr) except: return None if (addr[:2] != ' \x02'): return None return inet_ntop(socket.AF_INET, addr[2:6])
[ "def", "in6_6to4ExtractAddr", "(", "addr", ")", ":", "try", ":", "addr", "=", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "addr", ")", "except", ":", "return", "None", "if", "(", "addr", "[", ":", "2", "]", "!=", "' \\x02'", ")", ":", "return", "None", "return", "inet_ntop", "(", "socket", ".", "AF_INET", ",", "addr", "[", "2", ":", "6", "]", ")" ]
extract ipv4 address embedded in 6to4 address .
train
true
445
def create_commands_eb(all_files, ispaired, base_output_dir, optional_params='', leading_text='', trailing_text='', include_input_dir_path=False, remove_filepath_in_name=False): commands = [] extensions = ['.fastq.gz', '.fastq', '.fq.gz', '.fq'] for curr_fp in all_files: if include_input_dir_path: added_output_str = curr_fp.split('/')[(-2)] else: added_output_str = '' if (not remove_filepath_in_name): for extension in extensions: if (extension in curr_fp): curr_ext = extension added_output_str += basename(curr_fp).split(curr_ext)[0] curr_outputdir = join(base_output_dir, added_output_str) if ispaired: command = ('%sextract_barcodes.py %s -f %s -r %s -o %s %s' % (_clean_leading_text(leading_text), optional_params, curr_fp, all_files[curr_fp], curr_outputdir, trailing_text)) else: command = ('%sextract_barcodes.py %s -f %s -o %s %s' % (_clean_leading_text(leading_text), optional_params, curr_fp, curr_outputdir, trailing_text)) commands.append([(('extract_barcodes.py: %s' % curr_fp), command)]) return commands
[ "def", "create_commands_eb", "(", "all_files", ",", "ispaired", ",", "base_output_dir", ",", "optional_params", "=", "''", ",", "leading_text", "=", "''", ",", "trailing_text", "=", "''", ",", "include_input_dir_path", "=", "False", ",", "remove_filepath_in_name", "=", "False", ")", ":", "commands", "=", "[", "]", "extensions", "=", "[", "'.fastq.gz'", ",", "'.fastq'", ",", "'.fq.gz'", ",", "'.fq'", "]", "for", "curr_fp", "in", "all_files", ":", "if", "include_input_dir_path", ":", "added_output_str", "=", "curr_fp", ".", "split", "(", "'/'", ")", "[", "(", "-", "2", ")", "]", "else", ":", "added_output_str", "=", "''", "if", "(", "not", "remove_filepath_in_name", ")", ":", "for", "extension", "in", "extensions", ":", "if", "(", "extension", "in", "curr_fp", ")", ":", "curr_ext", "=", "extension", "added_output_str", "+=", "basename", "(", "curr_fp", ")", ".", "split", "(", "curr_ext", ")", "[", "0", "]", "curr_outputdir", "=", "join", "(", "base_output_dir", ",", "added_output_str", ")", "if", "ispaired", ":", "command", "=", "(", "'%sextract_barcodes.py %s -f %s -r %s -o %s %s'", "%", "(", "_clean_leading_text", "(", "leading_text", ")", ",", "optional_params", ",", "curr_fp", ",", "all_files", "[", "curr_fp", "]", ",", "curr_outputdir", ",", "trailing_text", ")", ")", "else", ":", "command", "=", "(", "'%sextract_barcodes.py %s -f %s -o %s %s'", "%", "(", "_clean_leading_text", "(", "leading_text", ")", ",", "optional_params", ",", "curr_fp", ",", "curr_outputdir", ",", "trailing_text", ")", ")", "commands", ".", "append", "(", "[", "(", "(", "'extract_barcodes.py: %s'", "%", "curr_fp", ")", ",", "command", ")", "]", ")", "return", "commands" ]
creates commands for extract_barcodes .
train
false
446
def get_lexer_by_name(_alias, **options): for (module_name, name, aliases, _, _) in LEXERS.itervalues(): if (_alias in aliases): if (name not in _lexer_cache): _load_lexers(module_name) return _lexer_cache[name](**options) for cls in find_plugin_lexers(): if (_alias in cls.aliases): return cls(**options) raise ClassNotFound(('no lexer for alias %r found' % _alias))
[ "def", "get_lexer_by_name", "(", "_alias", ",", "**", "options", ")", ":", "for", "(", "module_name", ",", "name", ",", "aliases", ",", "_", ",", "_", ")", "in", "LEXERS", ".", "itervalues", "(", ")", ":", "if", "(", "_alias", "in", "aliases", ")", ":", "if", "(", "name", "not", "in", "_lexer_cache", ")", ":", "_load_lexers", "(", "module_name", ")", "return", "_lexer_cache", "[", "name", "]", "(", "**", "options", ")", "for", "cls", "in", "find_plugin_lexers", "(", ")", ":", "if", "(", "_alias", "in", "cls", ".", "aliases", ")", ":", "return", "cls", "(", "**", "options", ")", "raise", "ClassNotFound", "(", "(", "'no lexer for alias %r found'", "%", "_alias", ")", ")" ]
get a lexer by an alias .
train
false
448
def get_starttime(length, nseconds, padding): maximum = ((length - padding) - nseconds) if (padding > maximum): return 0 return random.randint(padding, maximum)
[ "def", "get_starttime", "(", "length", ",", "nseconds", ",", "padding", ")", ":", "maximum", "=", "(", "(", "length", "-", "padding", ")", "-", "nseconds", ")", "if", "(", "padding", ">", "maximum", ")", ":", "return", "0", "return", "random", ".", "randint", "(", "padding", ",", "maximum", ")" ]
length is total audio length in seconds nseconds is amount of time to sample in seconds padding is off-limits seconds at beginning and ending .
train
false
449
def write_cache_entry(f, entry): beginoffset = f.tell() (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = entry write_cache_time(f, ctime) write_cache_time(f, mtime) flags = (len(name) | (flags & (~ 4095))) f.write(struct.pack('>LLLLLL20sH', (dev & 4294967295), (ino & 4294967295), mode, uid, gid, size, hex_to_sha(sha), flags)) f.write(name) real_size = (((f.tell() - beginoffset) + 8) & (~ 7)) f.write(('\x00' * ((beginoffset + real_size) - f.tell())))
[ "def", "write_cache_entry", "(", "f", ",", "entry", ")", ":", "beginoffset", "=", "f", ".", "tell", "(", ")", "(", "name", ",", "ctime", ",", "mtime", ",", "dev", ",", "ino", ",", "mode", ",", "uid", ",", "gid", ",", "size", ",", "sha", ",", "flags", ")", "=", "entry", "write_cache_time", "(", "f", ",", "ctime", ")", "write_cache_time", "(", "f", ",", "mtime", ")", "flags", "=", "(", "len", "(", "name", ")", "|", "(", "flags", "&", "(", "~", "4095", ")", ")", ")", "f", ".", "write", "(", "struct", ".", "pack", "(", "'>LLLLLL20sH'", ",", "(", "dev", "&", "4294967295", ")", ",", "(", "ino", "&", "4294967295", ")", ",", "mode", ",", "uid", ",", "gid", ",", "size", ",", "hex_to_sha", "(", "sha", ")", ",", "flags", ")", ")", "f", ".", "write", "(", "name", ")", "real_size", "=", "(", "(", "(", "f", ".", "tell", "(", ")", "-", "beginoffset", ")", "+", "8", ")", "&", "(", "~", "7", ")", ")", "f", ".", "write", "(", "(", "'\\x00'", "*", "(", "(", "beginoffset", "+", "real_size", ")", "-", "f", ".", "tell", "(", ")", ")", ")", ")" ]
write an index entry to a file .
train
false
450
def nullspace(A, atol=1e-13, rtol=0): A = np.atleast_2d(A) (u, s, vh) = svd(A) tol = max(atol, (rtol * s[0])) nnz = (s >= tol).sum() ns = vh[nnz:].conj().T return ns
[ "def", "nullspace", "(", "A", ",", "atol", "=", "1e-13", ",", "rtol", "=", "0", ")", ":", "A", "=", "np", ".", "atleast_2d", "(", "A", ")", "(", "u", ",", "s", ",", "vh", ")", "=", "svd", "(", "A", ")", "tol", "=", "max", "(", "atol", ",", "(", "rtol", "*", "s", "[", "0", "]", ")", ")", "nnz", "=", "(", "s", ">=", "tol", ")", ".", "sum", "(", ")", "ns", "=", "vh", "[", "nnz", ":", "]", ".", "conj", "(", ")", ".", "T", "return", "ns" ]
compute an approximate basis for the nullspace of a .
train
true
451
def has_pbzip2(): try: os_dep.command('pbzip2') except ValueError: return False return True
[ "def", "has_pbzip2", "(", ")", ":", "try", ":", "os_dep", ".", "command", "(", "'pbzip2'", ")", "except", "ValueError", ":", "return", "False", "return", "True" ]
check if parallel bzip2 is available on this system .
train
false
453
def test_Ellipse2D_circular(): amplitude = 7.5 radius = 10 size = ((radius * 2) + 1) (y, x) = np.mgrid[0:size, 0:size] ellipse = models.Ellipse2D(amplitude, radius, radius, radius, radius, theta=0)(x, y) disk = models.Disk2D(amplitude, radius, radius, radius)(x, y) assert np.all((ellipse == disk))
[ "def", "test_Ellipse2D_circular", "(", ")", ":", "amplitude", "=", "7.5", "radius", "=", "10", "size", "=", "(", "(", "radius", "*", "2", ")", "+", "1", ")", "(", "y", ",", "x", ")", "=", "np", ".", "mgrid", "[", "0", ":", "size", ",", "0", ":", "size", "]", "ellipse", "=", "models", ".", "Ellipse2D", "(", "amplitude", ",", "radius", ",", "radius", ",", "radius", ",", "radius", ",", "theta", "=", "0", ")", "(", "x", ",", "y", ")", "disk", "=", "models", ".", "Disk2D", "(", "amplitude", ",", "radius", ",", "radius", ",", "radius", ")", "(", "x", ",", "y", ")", "assert", "np", ".", "all", "(", "(", "ellipse", "==", "disk", ")", ")" ]
test that circular ellipse2d agrees with disk2d [3736] .
train
false
454
def _get_fragments_coord(frags): if (not frags): return [] init = [0] return reduce((lambda acc, frag: (acc + [(acc[(-1)] + len(frag))])), frags[:(-1)], init)
[ "def", "_get_fragments_coord", "(", "frags", ")", ":", "if", "(", "not", "frags", ")", ":", "return", "[", "]", "init", "=", "[", "0", "]", "return", "reduce", "(", "(", "lambda", "acc", ",", "frag", ":", "(", "acc", "+", "[", "(", "acc", "[", "(", "-", "1", ")", "]", "+", "len", "(", "frag", ")", ")", "]", ")", ")", ",", "frags", "[", ":", "(", "-", "1", ")", "]", ",", "init", ")" ]
returns the letter coordinate of the given list of fragments .
train
false
455
def nesting_exception_view(request): client = Client() client.get('/get_view/') raise Exception('exception message')
[ "def", "nesting_exception_view", "(", "request", ")", ":", "client", "=", "Client", "(", ")", "client", ".", "get", "(", "'/get_view/'", ")", "raise", "Exception", "(", "'exception message'", ")" ]
a view that uses a nested client to call another view and then raises an exception .
train
false
456
def search_pairs(item): (title, artist) = (item.title, item.artist) titles = [title] artists = [artist] pattern = '(.*?) {0}'.format(plugins.feat_tokens()) match = re.search(pattern, artist, re.IGNORECASE) if match: artists.append(match.group(1)) pattern = '(.+?)\\s+[(].*[)]$' match = re.search(pattern, title, re.IGNORECASE) if match: titles.append(match.group(1)) pattern = '(.*?) {0}'.format(plugins.feat_tokens(for_artist=False)) for title in titles[:]: match = re.search(pattern, title, re.IGNORECASE) if match: titles.append(match.group(1)) multi_titles = [] for title in titles: multi_titles.append([title]) if ('/' in title): multi_titles.append([x.strip() for x in title.split('/')]) return itertools.product(artists, multi_titles)
[ "def", "search_pairs", "(", "item", ")", ":", "(", "title", ",", "artist", ")", "=", "(", "item", ".", "title", ",", "item", ".", "artist", ")", "titles", "=", "[", "title", "]", "artists", "=", "[", "artist", "]", "pattern", "=", "'(.*?) {0}'", ".", "format", "(", "plugins", ".", "feat_tokens", "(", ")", ")", "match", "=", "re", ".", "search", "(", "pattern", ",", "artist", ",", "re", ".", "IGNORECASE", ")", "if", "match", ":", "artists", ".", "append", "(", "match", ".", "group", "(", "1", ")", ")", "pattern", "=", "'(.+?)\\\\s+[(].*[)]$'", "match", "=", "re", ".", "search", "(", "pattern", ",", "title", ",", "re", ".", "IGNORECASE", ")", "if", "match", ":", "titles", ".", "append", "(", "match", ".", "group", "(", "1", ")", ")", "pattern", "=", "'(.*?) {0}'", ".", "format", "(", "plugins", ".", "feat_tokens", "(", "for_artist", "=", "False", ")", ")", "for", "title", "in", "titles", "[", ":", "]", ":", "match", "=", "re", ".", "search", "(", "pattern", ",", "title", ",", "re", ".", "IGNORECASE", ")", "if", "match", ":", "titles", ".", "append", "(", "match", ".", "group", "(", "1", ")", ")", "multi_titles", "=", "[", "]", "for", "title", "in", "titles", ":", "multi_titles", ".", "append", "(", "[", "title", "]", ")", "if", "(", "'/'", "in", "title", ")", ":", "multi_titles", ".", "append", "(", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "title", ".", "split", "(", "'/'", ")", "]", ")", "return", "itertools", ".", "product", "(", "artists", ",", "multi_titles", ")" ]
yield a pairs of artists and titles to search for .
train
false
460
def read_header(ofile): i = next(ofile) while r_comment.match(i): i = next(ofile) relation = None attributes = [] while (not r_datameta.match(i)): m = r_headerline.match(i) if m: isattr = r_attribute.match(i) if isattr: (name, type, i) = tokenize_attribute(ofile, i) attributes.append((name, type)) else: isrel = r_relation.match(i) if isrel: relation = isrel.group(1) else: raise ValueError(('Error parsing line %s' % i)) i = next(ofile) else: i = next(ofile) return (relation, attributes)
[ "def", "read_header", "(", "ofile", ")", ":", "i", "=", "next", "(", "ofile", ")", "while", "r_comment", ".", "match", "(", "i", ")", ":", "i", "=", "next", "(", "ofile", ")", "relation", "=", "None", "attributes", "=", "[", "]", "while", "(", "not", "r_datameta", ".", "match", "(", "i", ")", ")", ":", "m", "=", "r_headerline", ".", "match", "(", "i", ")", "if", "m", ":", "isattr", "=", "r_attribute", ".", "match", "(", "i", ")", "if", "isattr", ":", "(", "name", ",", "type", ",", "i", ")", "=", "tokenize_attribute", "(", "ofile", ",", "i", ")", "attributes", ".", "append", "(", "(", "name", ",", "type", ")", ")", "else", ":", "isrel", "=", "r_relation", ".", "match", "(", "i", ")", "if", "isrel", ":", "relation", "=", "isrel", ".", "group", "(", "1", ")", "else", ":", "raise", "ValueError", "(", "(", "'Error parsing line %s'", "%", "i", ")", ")", "i", "=", "next", "(", "ofile", ")", "else", ":", "i", "=", "next", "(", "ofile", ")", "return", "(", "relation", ",", "attributes", ")" ]
parameters f : file or gzip .
train
false
461
def available_attrs(fn): return WRAPPER_ASSIGNMENTS
[ "def", "available_attrs", "(", "fn", ")", ":", "return", "WRAPPER_ASSIGNMENTS" ]
return the list of functools-wrappable attributes on a callable .
train
false
462
def _run_code(code, run_globals, init_globals=None, mod_name=None, mod_fname=None, mod_loader=None, pkg_name=None): if (init_globals is not None): run_globals.update(init_globals) run_globals.update(__name__=mod_name, __file__=mod_fname, __loader__=mod_loader, __package__=pkg_name) exec code in run_globals return run_globals
[ "def", "_run_code", "(", "code", ",", "run_globals", ",", "init_globals", "=", "None", ",", "mod_name", "=", "None", ",", "mod_fname", "=", "None", ",", "mod_loader", "=", "None", ",", "pkg_name", "=", "None", ")", ":", "if", "(", "init_globals", "is", "not", "None", ")", ":", "run_globals", ".", "update", "(", "init_globals", ")", "run_globals", ".", "update", "(", "__name__", "=", "mod_name", ",", "__file__", "=", "mod_fname", ",", "__loader__", "=", "mod_loader", ",", "__package__", "=", "pkg_name", ")", "exec", "code", "in", "run_globals", "return", "run_globals" ]
helper to run code in nominated namespace .
train
true
463
def exists_in(dirname, indexname=None): if os.path.exists(dirname): try: ix = open_dir(dirname, indexname=indexname) return (ix.latest_generation() > (-1)) except EmptyIndexError: pass return False
[ "def", "exists_in", "(", "dirname", ",", "indexname", "=", "None", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "dirname", ")", ":", "try", ":", "ix", "=", "open_dir", "(", "dirname", ",", "indexname", "=", "indexname", ")", "return", "(", "ix", ".", "latest_generation", "(", ")", ">", "(", "-", "1", ")", ")", "except", "EmptyIndexError", ":", "pass", "return", "False" ]
returns true if dirname contains a whoosh index .
train
false
464
@app.task def error_handler(task_id, task_name): logger = get_task_logger(__name__) result = app.AsyncResult(task_id) excep = result.get(propagate=False) logger.error('#####FAILURE LOG BEGIN#####\nTask {0} raised exception: {0}\n\\{0}\n#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
[ "@", "app", ".", "task", "def", "error_handler", "(", "task_id", ",", "task_name", ")", ":", "logger", "=", "get_task_logger", "(", "__name__", ")", "result", "=", "app", ".", "AsyncResult", "(", "task_id", ")", "excep", "=", "result", ".", "get", "(", "propagate", "=", "False", ")", "logger", ".", "error", "(", "'#####FAILURE LOG BEGIN#####\\nTask {0} raised exception: {0}\\n\\\\{0}\\n#####FAILURE LOG STOP#####'", ".", "format", "(", "task_name", ",", "excep", ",", "result", ".", "traceback", ")", ")" ]
logs detailed message about tasks that raise exceptions .
train
false
465
def test_import_invalid_module(): invalid_module_names = {'foo-bar', 'foo:bar', '10foo'} valid_module_names = {'foobar'} with TemporaryDirectory() as tmpdir: sys.path.insert(0, tmpdir) for name in (invalid_module_names | valid_module_names): filename = os.path.join(tmpdir, (name + '.py')) open(filename, 'w').close() s = set(module_completion('import foo')) intersection = s.intersection(invalid_module_names) nt.assert_equal(intersection, set()) assert valid_module_names.issubset(s), valid_module_names.intersection(s)
[ "def", "test_import_invalid_module", "(", ")", ":", "invalid_module_names", "=", "{", "'foo-bar'", ",", "'foo:bar'", ",", "'10foo'", "}", "valid_module_names", "=", "{", "'foobar'", "}", "with", "TemporaryDirectory", "(", ")", "as", "tmpdir", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "tmpdir", ")", "for", "name", "in", "(", "invalid_module_names", "|", "valid_module_names", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "(", "name", "+", "'.py'", ")", ")", "open", "(", "filename", ",", "'w'", ")", ".", "close", "(", ")", "s", "=", "set", "(", "module_completion", "(", "'import foo'", ")", ")", "intersection", "=", "s", ".", "intersection", "(", "invalid_module_names", ")", "nt", ".", "assert_equal", "(", "intersection", ",", "set", "(", ")", ")", "assert", "valid_module_names", ".", "issubset", "(", "s", ")", ",", "valid_module_names", ".", "intersection", "(", "s", ")" ]
testing of issue URL .
train
false
467
def ArgList(args, lparen=LParen(), rparen=RParen()): node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) if args: node.insert_child(1, Node(syms.arglist, args)) return node
[ "def", "ArgList", "(", "args", ",", "lparen", "=", "LParen", "(", ")", ",", "rparen", "=", "RParen", "(", ")", ")", ":", "node", "=", "Node", "(", "syms", ".", "trailer", ",", "[", "lparen", ".", "clone", "(", ")", ",", "rparen", ".", "clone", "(", ")", "]", ")", "if", "args", ":", "node", ".", "insert_child", "(", "1", ",", "Node", "(", "syms", ".", "arglist", ",", "args", ")", ")", "return", "node" ]
a parenthesised argument list .
train
true
468
def _cm_send_json(registration_ids, data, cloud_type='GCM', **kwargs): values = ({'registration_ids': registration_ids} if registration_ids else {}) if (data is not None): values['data'] = data for (k, v) in kwargs.items(): if v: values[k] = v data = json.dumps(values, separators=(',', ':'), sort_keys=True).encode('utf-8') if (cloud_type == 'GCM'): response = json.loads(_gcm_send(data, 'application/json')) elif (cloud_type == 'FCM'): response = json.loads(_fcm_send(data, 'application/json')) else: raise ImproperlyConfigured(('cloud_type must be GCM or FCM not %s' % str(cloud_type))) return _handler_cm_message_json(registration_ids, response, cloud_type)
[ "def", "_cm_send_json", "(", "registration_ids", ",", "data", ",", "cloud_type", "=", "'GCM'", ",", "**", "kwargs", ")", ":", "values", "=", "(", "{", "'registration_ids'", ":", "registration_ids", "}", "if", "registration_ids", "else", "{", "}", ")", "if", "(", "data", "is", "not", "None", ")", ":", "values", "[", "'data'", "]", "=", "data", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "items", "(", ")", ":", "if", "v", ":", "values", "[", "k", "]", "=", "v", "data", "=", "json", ".", "dumps", "(", "values", ",", "separators", "=", "(", "','", ",", "':'", ")", ",", "sort_keys", "=", "True", ")", ".", "encode", "(", "'utf-8'", ")", "if", "(", "cloud_type", "==", "'GCM'", ")", ":", "response", "=", "json", ".", "loads", "(", "_gcm_send", "(", "data", ",", "'application/json'", ")", ")", "elif", "(", "cloud_type", "==", "'FCM'", ")", ":", "response", "=", "json", ".", "loads", "(", "_fcm_send", "(", "data", ",", "'application/json'", ")", ")", "else", ":", "raise", "ImproperlyConfigured", "(", "(", "'cloud_type must be GCM or FCM not %s'", "%", "str", "(", "cloud_type", ")", ")", ")", "return", "_handler_cm_message_json", "(", "registration_ids", ",", "response", ",", "cloud_type", ")" ]
sends a gcm notification to one or more registration_ids .
train
false
469
@requires_application() def test_image(): size = (100, 50) with TestingCanvas(size=size, bgcolor='w') as c: image = Image(cmap='grays', clim=[0, 1], parent=c.scene) for three_d in (True, False): shape = (((size[1] - 10), (size[0] - 10)) + ((3,) if three_d else ())) np.random.seed(379823) data = np.random.rand(*shape) image.set_data(data) assert_image_approved(c.render(), ('visuals/image%s.png' % ('_rgb' if three_d else '_mono')))
[ "@", "requires_application", "(", ")", "def", "test_image", "(", ")", ":", "size", "=", "(", "100", ",", "50", ")", "with", "TestingCanvas", "(", "size", "=", "size", ",", "bgcolor", "=", "'w'", ")", "as", "c", ":", "image", "=", "Image", "(", "cmap", "=", "'grays'", ",", "clim", "=", "[", "0", ",", "1", "]", ",", "parent", "=", "c", ".", "scene", ")", "for", "three_d", "in", "(", "True", ",", "False", ")", ":", "shape", "=", "(", "(", "(", "size", "[", "1", "]", "-", "10", ")", ",", "(", "size", "[", "0", "]", "-", "10", ")", ")", "+", "(", "(", "3", ",", ")", "if", "three_d", "else", "(", ")", ")", ")", "np", ".", "random", ".", "seed", "(", "379823", ")", "data", "=", "np", ".", "random", ".", "rand", "(", "*", "shape", ")", "image", ".", "set_data", "(", "data", ")", "assert_image_approved", "(", "c", ".", "render", "(", ")", ",", "(", "'visuals/image%s.png'", "%", "(", "'_rgb'", "if", "three_d", "else", "'_mono'", ")", ")", ")" ]
ensure that its possible to output images with hug .
train
false
470
def _parse_pool_options(options): max_pool_size = options.get('maxpoolsize', common.MAX_POOL_SIZE) min_pool_size = options.get('minpoolsize', common.MIN_POOL_SIZE) max_idle_time_ms = options.get('maxidletimems', common.MAX_IDLE_TIME_MS) if ((max_pool_size is not None) and (min_pool_size > max_pool_size)): raise ValueError('minPoolSize must be smaller or equal to maxPoolSize') connect_timeout = options.get('connecttimeoutms', common.CONNECT_TIMEOUT) socket_keepalive = options.get('socketkeepalive', False) socket_timeout = options.get('sockettimeoutms') wait_queue_timeout = options.get('waitqueuetimeoutms') wait_queue_multiple = options.get('waitqueuemultiple') event_listeners = options.get('event_listeners') appname = options.get('appname') (ssl_context, ssl_match_hostname) = _parse_ssl_options(options) return PoolOptions(max_pool_size, min_pool_size, max_idle_time_ms, connect_timeout, socket_timeout, wait_queue_timeout, wait_queue_multiple, ssl_context, ssl_match_hostname, socket_keepalive, _EventListeners(event_listeners), appname)
[ "def", "_parse_pool_options", "(", "options", ")", ":", "max_pool_size", "=", "options", ".", "get", "(", "'maxpoolsize'", ",", "common", ".", "MAX_POOL_SIZE", ")", "min_pool_size", "=", "options", ".", "get", "(", "'minpoolsize'", ",", "common", ".", "MIN_POOL_SIZE", ")", "max_idle_time_ms", "=", "options", ".", "get", "(", "'maxidletimems'", ",", "common", ".", "MAX_IDLE_TIME_MS", ")", "if", "(", "(", "max_pool_size", "is", "not", "None", ")", "and", "(", "min_pool_size", ">", "max_pool_size", ")", ")", ":", "raise", "ValueError", "(", "'minPoolSize must be smaller or equal to maxPoolSize'", ")", "connect_timeout", "=", "options", ".", "get", "(", "'connecttimeoutms'", ",", "common", ".", "CONNECT_TIMEOUT", ")", "socket_keepalive", "=", "options", ".", "get", "(", "'socketkeepalive'", ",", "False", ")", "socket_timeout", "=", "options", ".", "get", "(", "'sockettimeoutms'", ")", "wait_queue_timeout", "=", "options", ".", "get", "(", "'waitqueuetimeoutms'", ")", "wait_queue_multiple", "=", "options", ".", "get", "(", "'waitqueuemultiple'", ")", "event_listeners", "=", "options", ".", "get", "(", "'event_listeners'", ")", "appname", "=", "options", ".", "get", "(", "'appname'", ")", "(", "ssl_context", ",", "ssl_match_hostname", ")", "=", "_parse_ssl_options", "(", "options", ")", "return", "PoolOptions", "(", "max_pool_size", ",", "min_pool_size", ",", "max_idle_time_ms", ",", "connect_timeout", ",", "socket_timeout", ",", "wait_queue_timeout", ",", "wait_queue_multiple", ",", "ssl_context", ",", "ssl_match_hostname", ",", "socket_keepalive", ",", "_EventListeners", "(", "event_listeners", ")", ",", "appname", ")" ]
parse connection pool options .
train
true
471
def origin_from_request(request): rv = request.META.get('HTTP_ORIGIN', 'null') if (rv in ('', 'null')): rv = origin_from_url(request.META.get('HTTP_REFERER')) return rv
[ "def", "origin_from_request", "(", "request", ")", ":", "rv", "=", "request", ".", "META", ".", "get", "(", "'HTTP_ORIGIN'", ",", "'null'", ")", "if", "(", "rv", "in", "(", "''", ",", "'null'", ")", ")", ":", "rv", "=", "origin_from_url", "(", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ")", ")", "return", "rv" ]
returns either the origin or referer value from the request headers .
train
false
472
def disable_curdir_processing(method): def decorated(*args, **kwargs): original = populators.PROCESS_CURDIR populators.PROCESS_CURDIR = False try: return method(*args, **kwargs) finally: populators.PROCESS_CURDIR = original return decorated
[ "def", "disable_curdir_processing", "(", "method", ")", ":", "def", "decorated", "(", "*", "args", ",", "**", "kwargs", ")", ":", "original", "=", "populators", ".", "PROCESS_CURDIR", "populators", ".", "PROCESS_CURDIR", "=", "False", "try", ":", "return", "method", "(", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "populators", ".", "PROCESS_CURDIR", "=", "original", "return", "decorated" ]
decorator to disable processing ${curdir} variable .
train
false
473
def p_struct_or_union_specifier_1(t): pass
[ "def", "p_struct_or_union_specifier_1", "(", "t", ")", ":", "pass" ]
struct_or_union_specifier : struct_or_union id lbrace struct_declaration_list rbrace .
train
false
474
def last_visible_window(): try: return get('last-visible-main-window') except KeyError: return last_focused_window()
[ "def", "last_visible_window", "(", ")", ":", "try", ":", "return", "get", "(", "'last-visible-main-window'", ")", "except", "KeyError", ":", "return", "last_focused_window", "(", ")" ]
get the last visible window .
train
false
475
def change_NZB_DIR(nzb_dir): if (nzb_dir == ''): sickbeard.NZB_DIR = '' return True if (ek(os.path.normpath, sickbeard.NZB_DIR) != ek(os.path.normpath, nzb_dir)): if helpers.makeDir(nzb_dir): sickbeard.NZB_DIR = ek(os.path.normpath, nzb_dir) logger.log((u'Changed NZB folder to ' + nzb_dir)) else: return False return True
[ "def", "change_NZB_DIR", "(", "nzb_dir", ")", ":", "if", "(", "nzb_dir", "==", "''", ")", ":", "sickbeard", ".", "NZB_DIR", "=", "''", "return", "True", "if", "(", "ek", "(", "os", ".", "path", ".", "normpath", ",", "sickbeard", ".", "NZB_DIR", ")", "!=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "nzb_dir", ")", ")", ":", "if", "helpers", ".", "makeDir", "(", "nzb_dir", ")", ":", "sickbeard", ".", "NZB_DIR", "=", "ek", "(", "os", ".", "path", ".", "normpath", ",", "nzb_dir", ")", "logger", ".", "log", "(", "(", "u'Changed NZB folder to '", "+", "nzb_dir", ")", ")", "else", ":", "return", "False", "return", "True" ]
change nzb folder .
train
false
476
@open_file(0, mode='rb') def read_graphml(path, node_type=str): reader = GraphMLReader(node_type=node_type) glist = list(reader(path=path)) return glist[0]
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rb'", ")", "def", "read_graphml", "(", "path", ",", "node_type", "=", "str", ")", ":", "reader", "=", "GraphMLReader", "(", "node_type", "=", "node_type", ")", "glist", "=", "list", "(", "reader", "(", "path", "=", "path", ")", ")", "return", "glist", "[", "0", "]" ]
read graph in graphml format from path .
train
false
478
def vserver_sslcert_delete(v_name, sc_name, **connection_args): ret = True if (not vserver_sslcert_exists(v_name, sc_name, **connection_args)): return False nitro = _connect(**connection_args) if (nitro is None): return False sslcert = NSSSLVServerSSLCertKeyBinding() sslcert.set_vservername(v_name) sslcert.set_certkeyname(sc_name) try: NSSSLVServerSSLCertKeyBinding.delete(nitro, sslcert) except NSNitroError as error: log.debug('netscaler module error - NSSSLVServerSSLCertKeyBinding.delete() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "vserver_sslcert_delete", "(", "v_name", ",", "sc_name", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "if", "(", "not", "vserver_sslcert_exists", "(", "v_name", ",", "sc_name", ",", "**", "connection_args", ")", ")", ":", "return", "False", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "sslcert", "=", "NSSSLVServerSSLCertKeyBinding", "(", ")", "sslcert", ".", "set_vservername", "(", "v_name", ")", "sslcert", ".", "set_certkeyname", "(", "sc_name", ")", "try", ":", "NSSSLVServerSSLCertKeyBinding", ".", "delete", "(", "nitro", ",", "sslcert", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSSSLVServerSSLCertKeyBinding.delete() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
unbinds a ssl certificate from a vserver cli example: .
train
true
479
def observe_value(observation_key, target_func): @extension.make_extension(trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[observation_key] = target_func(trainer) return _observe_value
[ "def", "observe_value", "(", "observation_key", ",", "target_func", ")", ":", "@", "extension", ".", "make_extension", "(", "trigger", "=", "(", "1", ",", "'epoch'", ")", ",", "priority", "=", "extension", ".", "PRIORITY_WRITER", ")", "def", "_observe_value", "(", "trainer", ")", ":", "trainer", ".", "observation", "[", "observation_key", "]", "=", "target_func", "(", "trainer", ")", "return", "_observe_value" ]
returns a trainer extension to continuously record a value .
train
false
480
def _hadoop_prefix_from_bin(hadoop_bin): if ('/' not in hadoop_bin): hadoop_bin = which(hadoop_bin) if (not hadoop_bin): return None hadoop_home = posixpath.abspath(posixpath.join(posixpath.realpath(posixpath.dirname(hadoop_bin)), '..')) if (hadoop_home in _BAD_HADOOP_HOMES): return None return hadoop_home
[ "def", "_hadoop_prefix_from_bin", "(", "hadoop_bin", ")", ":", "if", "(", "'/'", "not", "in", "hadoop_bin", ")", ":", "hadoop_bin", "=", "which", "(", "hadoop_bin", ")", "if", "(", "not", "hadoop_bin", ")", ":", "return", "None", "hadoop_home", "=", "posixpath", ".", "abspath", "(", "posixpath", ".", "join", "(", "posixpath", ".", "realpath", "(", "posixpath", ".", "dirname", "(", "hadoop_bin", ")", ")", ",", "'..'", ")", ")", "if", "(", "hadoop_home", "in", "_BAD_HADOOP_HOMES", ")", ":", "return", "None", "return", "hadoop_home" ]
given a path to the hadoop binary .
train
false
481
@cli.command() @click.argument('result-file', type=click.Path(exists=True), required=True) def plot(result_file): results_df = pd.read_pickle(result_file) show_draw_result(result_file, results_df)
[ "@", "cli", ".", "command", "(", ")", "@", "click", ".", "argument", "(", "'result-file'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "True", ")", ",", "required", "=", "True", ")", "def", "plot", "(", "result_file", ")", ":", "results_df", "=", "pd", ".", "read_pickle", "(", "result_file", ")", "show_draw_result", "(", "result_file", ",", "results_df", ")" ]
plot data from files written by a cmadatalogger .
train
false
483
def _neg_impl(expr, op, **kw): return UnaryExpression(expr, operator=operators.neg, type_=expr.type)
[ "def", "_neg_impl", "(", "expr", ",", "op", ",", "**", "kw", ")", ":", "return", "UnaryExpression", "(", "expr", ",", "operator", "=", "operators", ".", "neg", ",", "type_", "=", "expr", ".", "type", ")" ]
see :meth: .
train
false
485
def get_all_bears_names(): return [bear.name for bear in get_all_bears()]
[ "def", "get_all_bears_names", "(", ")", ":", "return", "[", "bear", ".", "name", "for", "bear", "in", "get_all_bears", "(", ")", "]" ]
get a list of names of all available bears .
train
false
486
def p_set_type(p): p[0] = (TType.SET, p[3])
[ "def", "p_set_type", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "TType", ".", "SET", ",", "p", "[", "3", "]", ")" ]
set_type : set < field_type > .
train
false
487
def _git_diff_names_only(left, right='HEAD'): diff_cmd = (GIT_CMD_DIFF_NAMES_ONLY_FORMAT_STRING % (left, right)) return _run_cmd(diff_cmd).splitlines()
[ "def", "_git_diff_names_only", "(", "left", ",", "right", "=", "'HEAD'", ")", ":", "diff_cmd", "=", "(", "GIT_CMD_DIFF_NAMES_ONLY_FORMAT_STRING", "%", "(", "left", ",", "right", ")", ")", "return", "_run_cmd", "(", "diff_cmd", ")", ".", "splitlines", "(", ")" ]
get names of changed files from git .
train
false
488
def requestHandler(config_hint, path_info, query_string=None): (status_code, headers, content) = requestHandler2(config_hint, path_info, query_string) mimetype = headers.get('Content-Type') return (mimetype, content)
[ "def", "requestHandler", "(", "config_hint", ",", "path_info", ",", "query_string", "=", "None", ")", ":", "(", "status_code", ",", "headers", ",", "content", ")", "=", "requestHandler2", "(", "config_hint", ",", "path_info", ",", "query_string", ")", "mimetype", "=", "headers", ".", "get", "(", "'Content-Type'", ")", "return", "(", "mimetype", ",", "content", ")" ]
generate a mime-type and response body for a given request .
train
false
489
def save_topic_similarities(topic_similarities): topic_similarities_entity = recommendations_models.TopicSimilaritiesModel.get(recommendations_models.TOPIC_SIMILARITIES_ID, strict=False) if (topic_similarities_entity is None): topic_similarities_entity = recommendations_models.TopicSimilaritiesModel(id=recommendations_models.TOPIC_SIMILARITIES_ID, content=json.dumps(topic_similarities)) else: topic_similarities_entity.content = json.dumps(topic_similarities) topic_similarities_entity.put() return topic_similarities_entity
[ "def", "save_topic_similarities", "(", "topic_similarities", ")", ":", "topic_similarities_entity", "=", "recommendations_models", ".", "TopicSimilaritiesModel", ".", "get", "(", "recommendations_models", ".", "TOPIC_SIMILARITIES_ID", ",", "strict", "=", "False", ")", "if", "(", "topic_similarities_entity", "is", "None", ")", ":", "topic_similarities_entity", "=", "recommendations_models", ".", "TopicSimilaritiesModel", "(", "id", "=", "recommendations_models", ".", "TOPIC_SIMILARITIES_ID", ",", "content", "=", "json", ".", "dumps", "(", "topic_similarities", ")", ")", "else", ":", "topic_similarities_entity", ".", "content", "=", "json", ".", "dumps", "(", "topic_similarities", ")", "topic_similarities_entity", ".", "put", "(", ")", "return", "topic_similarities_entity" ]
stores topic similarities in the datastore .
train
false
492
def migrate_oauth_template_facts(facts): if (('master' in facts) and ('oauth_template' in facts['master'])): if ('oauth_templates' not in facts['master']): facts['master']['oauth_templates'] = {'login': facts['master']['oauth_template']} elif ('login' not in facts['master']['oauth_templates']): facts['master']['oauth_templates']['login'] = facts['master']['oauth_template'] return facts
[ "def", "migrate_oauth_template_facts", "(", "facts", ")", ":", "if", "(", "(", "'master'", "in", "facts", ")", "and", "(", "'oauth_template'", "in", "facts", "[", "'master'", "]", ")", ")", ":", "if", "(", "'oauth_templates'", "not", "in", "facts", "[", "'master'", "]", ")", ":", "facts", "[", "'master'", "]", "[", "'oauth_templates'", "]", "=", "{", "'login'", ":", "facts", "[", "'master'", "]", "[", "'oauth_template'", "]", "}", "elif", "(", "'login'", "not", "in", "facts", "[", "'master'", "]", "[", "'oauth_templates'", "]", ")", ":", "facts", "[", "'master'", "]", "[", "'oauth_templates'", "]", "[", "'login'", "]", "=", "facts", "[", "'master'", "]", "[", "'oauth_template'", "]", "return", "facts" ]
migrate an old oauth template fact to a newer format if its present .
train
false
493
@db_api.api_context_manager.writer def _set_inventory(context, rp, inv_list): _ensure_rc_cache(context) conn = context.session.connection() existing_resources = _get_current_inventory_resources(conn, rp) these_resources = set([_RC_CACHE.id_from_string(r.resource_class) for r in inv_list.objects]) to_add = (these_resources - existing_resources) to_delete = (existing_resources - these_resources) to_update = (these_resources & existing_resources) exceeded = [] with conn.begin(): if to_delete: _delete_inventory_from_provider(conn, rp, to_delete) if to_add: _add_inventory_to_provider(conn, rp, inv_list, to_add) if to_update: exceeded = _update_inventory_for_provider(conn, rp, inv_list, to_update) rp.generation = _increment_provider_generation(conn, rp) return exceeded
[ "@", "db_api", ".", "api_context_manager", ".", "writer", "def", "_set_inventory", "(", "context", ",", "rp", ",", "inv_list", ")", ":", "_ensure_rc_cache", "(", "context", ")", "conn", "=", "context", ".", "session", ".", "connection", "(", ")", "existing_resources", "=", "_get_current_inventory_resources", "(", "conn", ",", "rp", ")", "these_resources", "=", "set", "(", "[", "_RC_CACHE", ".", "id_from_string", "(", "r", ".", "resource_class", ")", "for", "r", "in", "inv_list", ".", "objects", "]", ")", "to_add", "=", "(", "these_resources", "-", "existing_resources", ")", "to_delete", "=", "(", "existing_resources", "-", "these_resources", ")", "to_update", "=", "(", "these_resources", "&", "existing_resources", ")", "exceeded", "=", "[", "]", "with", "conn", ".", "begin", "(", ")", ":", "if", "to_delete", ":", "_delete_inventory_from_provider", "(", "conn", ",", "rp", ",", "to_delete", ")", "if", "to_add", ":", "_add_inventory_to_provider", "(", "conn", ",", "rp", ",", "inv_list", ",", "to_add", ")", "if", "to_update", ":", "exceeded", "=", "_update_inventory_for_provider", "(", "conn", ",", "rp", ",", "inv_list", ",", "to_update", ")", "rp", ".", "generation", "=", "_increment_provider_generation", "(", "conn", ",", "rp", ")", "return", "exceeded" ]
given an inventorylist object .
train
false
494
def plot_test_results(net, test_inputs): fig = plt.figure() ax = fig.add_subplot(111) images_in = [test_inputs[j].reshape((-1), 28) for j in range(10)] images_out = [net.feedforward(test_inputs[j]).reshape((-1), 28) for j in range(10)] image_in = np.concatenate(images_in, axis=1) image_out = np.concatenate(images_out, axis=1) image = np.concatenate([image_in, image_out]) ax.matshow(image, cmap=matplotlib.cm.binary) plt.xticks(np.array([])) plt.yticks(np.array([])) plt.show()
[ "def", "plot_test_results", "(", "net", ",", "test_inputs", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "images_in", "=", "[", "test_inputs", "[", "j", "]", ".", "reshape", "(", "(", "-", "1", ")", ",", "28", ")", "for", "j", "in", "range", "(", "10", ")", "]", "images_out", "=", "[", "net", ".", "feedforward", "(", "test_inputs", "[", "j", "]", ")", ".", "reshape", "(", "(", "-", "1", ")", ",", "28", ")", "for", "j", "in", "range", "(", "10", ")", "]", "image_in", "=", "np", ".", "concatenate", "(", "images_in", ",", "axis", "=", "1", ")", "image_out", "=", "np", ".", "concatenate", "(", "images_out", ",", "axis", "=", "1", ")", "image", "=", "np", ".", "concatenate", "(", "[", "image_in", ",", "image_out", "]", ")", "ax", ".", "matshow", "(", "image", ",", "cmap", "=", "matplotlib", ".", "cm", ".", "binary", ")", "plt", ".", "xticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "yticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "show", "(", ")" ]
plot the results after passing the first ten test mnist digits through the autoencoder net .
train
false
498
def assert_permissions(path, permission, log): if (not check_permissions(util.syspath(path), permission)): log.warning(u'could not set permissions on {}', util.displayable_path(path)) log.debug(u'set permissions to {}, but permissions are now {}', permission, (os.stat(util.syspath(path)).st_mode & 511))
[ "def", "assert_permissions", "(", "path", ",", "permission", ",", "log", ")", ":", "if", "(", "not", "check_permissions", "(", "util", ".", "syspath", "(", "path", ")", ",", "permission", ")", ")", ":", "log", ".", "warning", "(", "u'could not set permissions on {}'", ",", "util", ".", "displayable_path", "(", "path", ")", ")", "log", ".", "debug", "(", "u'set permissions to {}, but permissions are now {}'", ",", "permission", ",", "(", "os", ".", "stat", "(", "util", ".", "syspath", "(", "path", ")", ")", ".", "st_mode", "&", "511", ")", ")" ]
check whether the files permissions are as expected .
train
false
500
def _sphere_constraint(rd, r0, R_adj): return (R_adj - np.sqrt(np.sum(((rd - r0) ** 2))))
[ "def", "_sphere_constraint", "(", "rd", ",", "r0", ",", "R_adj", ")", ":", "return", "(", "R_adj", "-", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "(", "rd", "-", "r0", ")", "**", "2", ")", ")", ")", ")" ]
sphere fitting constraint .
train
false
501
def _make_bucket_resp(project=None, now=None): now_time = _datetime_to_gcptime(now) return {u'etag': u'CAE=', u'kind': u'storage#bucket', u'location': u'US', u'metageneration': u'1', u'owner': {u'entity': u'project-owners-1234567890'}, u'projectNumber': u'1234567890', u'storageClass': u'STANDARD', u'timeCreated': now_time, u'updated': now_time, u'_projectName': project}
[ "def", "_make_bucket_resp", "(", "project", "=", "None", ",", "now", "=", "None", ")", ":", "now_time", "=", "_datetime_to_gcptime", "(", "now", ")", "return", "{", "u'etag'", ":", "u'CAE='", ",", "u'kind'", ":", "u'storage#bucket'", ",", "u'location'", ":", "u'US'", ",", "u'metageneration'", ":", "u'1'", ",", "u'owner'", ":", "{", "u'entity'", ":", "u'project-owners-1234567890'", "}", ",", "u'projectNumber'", ":", "u'1234567890'", ",", "u'storageClass'", ":", "u'STANDARD'", ",", "u'timeCreated'", ":", "now_time", ",", "u'updated'", ":", "now_time", ",", "u'_projectName'", ":", "project", "}" ]
fake gcs bucket metadata .
train
false
502
def p_command_print_bad(p): p[0] = 'MALFORMED PRINT STATEMENT'
[ "def", "p_command_print_bad", "(", "p", ")", ":", "p", "[", "0", "]", "=", "'MALFORMED PRINT STATEMENT'" ]
command : print error .
train
false
503
def pchip_interpolate(xi, yi, x, der=0, axis=0): P = PchipInterpolator(xi, yi, axis=axis) if (der == 0): return P(x) elif _isscalar(der): return P.derivative(der)(x) else: return [P.derivative(nu)(x) for nu in der]
[ "def", "pchip_interpolate", "(", "xi", ",", "yi", ",", "x", ",", "der", "=", "0", ",", "axis", "=", "0", ")", ":", "P", "=", "PchipInterpolator", "(", "xi", ",", "yi", ",", "axis", "=", "axis", ")", "if", "(", "der", "==", "0", ")", ":", "return", "P", "(", "x", ")", "elif", "_isscalar", "(", "der", ")", ":", "return", "P", ".", "derivative", "(", "der", ")", "(", "x", ")", "else", ":", "return", "[", "P", ".", "derivative", "(", "nu", ")", "(", "x", ")", "for", "nu", "in", "der", "]" ]
convenience function for pchip interpolation .
train
false
504
def timeoutDeferred(reactor, deferred, seconds): def timedOutCall(): deferred.cancel() delayedTimeOutCall = reactor.callLater(seconds, timedOutCall) def cancelTimeout(result): if delayedTimeOutCall.active(): delayedTimeOutCall.cancel() return result deferred.addBoth(cancelTimeout) return delayedTimeOutCall
[ "def", "timeoutDeferred", "(", "reactor", ",", "deferred", ",", "seconds", ")", ":", "def", "timedOutCall", "(", ")", ":", "deferred", ".", "cancel", "(", ")", "delayedTimeOutCall", "=", "reactor", ".", "callLater", "(", "seconds", ",", "timedOutCall", ")", "def", "cancelTimeout", "(", "result", ")", ":", "if", "delayedTimeOutCall", ".", "active", "(", ")", ":", "delayedTimeOutCall", ".", "cancel", "(", ")", "return", "result", "deferred", ".", "addBoth", "(", "cancelTimeout", ")", "return", "delayedTimeOutCall" ]
cancel a l{deferred} if it does not have a result available within the given amount of time .
train
false
506
def _item_to_variable(iterator, resource): return Variable.from_api_repr(resource, iterator.config)
[ "def", "_item_to_variable", "(", "iterator", ",", "resource", ")", ":", "return", "Variable", ".", "from_api_repr", "(", "resource", ",", "iterator", ".", "config", ")" ]
convert a json variable to the native object .
train
false
507
def prob_words(context, vocab, temperature=1.0): dot = np.dot(vocab, context) prob = _softmax((dot / temperature)) return prob
[ "def", "prob_words", "(", "context", ",", "vocab", ",", "temperature", "=", "1.0", ")", ":", "dot", "=", "np", ".", "dot", "(", "vocab", ",", "context", ")", "prob", "=", "_softmax", "(", "(", "dot", "/", "temperature", ")", ")", "return", "prob" ]
this calculates a softmax over the vocabulary as a function of the dot product of context and word .
train
false
508
def jt_ha(funct): def decorate(api, *args, **kwargs): try: return funct(api, *args, **kwargs) except Exception as ex: if ('Could not connect to' in str(ex)): LOG.info(('JobTracker not available, trying JT plugin HA: %s.' % ex)) jt_ha = get_next_ha_mrcluster() if (jt_ha is not None): if (jt_ha[1].host == api.jt.host): raise ex (config, api.jt) = jt_ha return funct(api, *args, **kwargs) raise ex return wraps(funct)(decorate)
[ "def", "jt_ha", "(", "funct", ")", ":", "def", "decorate", "(", "api", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "funct", "(", "api", ",", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as", "ex", ":", "if", "(", "'Could not connect to'", "in", "str", "(", "ex", ")", ")", ":", "LOG", ".", "info", "(", "(", "'JobTracker not available, trying JT plugin HA: %s.'", "%", "ex", ")", ")", "jt_ha", "=", "get_next_ha_mrcluster", "(", ")", "if", "(", "jt_ha", "is", "not", "None", ")", ":", "if", "(", "jt_ha", "[", "1", "]", ".", "host", "==", "api", ".", "jt", ".", "host", ")", ":", "raise", "ex", "(", "config", ",", "api", ".", "jt", ")", "=", "jt_ha", "return", "funct", "(", "api", ",", "*", "args", ",", "**", "kwargs", ")", "raise", "ex", "return", "wraps", "(", "funct", ")", "(", "decorate", ")" ]
support jt plugin ha by trying other mr cluster .
train
false
509
@with_setup(step_runner_environ) def test_skipped_steps_can_be_retrieved_as_steps(): f = Feature.from_string(FEATURE1) feature_result = f.run() scenario_result = feature_result.scenario_results[0] for step in scenario_result.steps_skipped: assert_equals(type(step), Step)
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_skipped_steps_can_be_retrieved_as_steps", "(", ")", ":", "f", "=", "Feature", ".", "from_string", "(", "FEATURE1", ")", "feature_result", "=", "f", ".", "run", "(", ")", "scenario_result", "=", "feature_result", ".", "scenario_results", "[", "0", "]", "for", "step", "in", "scenario_result", ".", "steps_skipped", ":", "assert_equals", "(", "type", "(", "step", ")", ",", "Step", ")" ]
skipped steps can be retrieved as steps .
train
false
510
def resource_show(resource_id, extra_args=None, cibfile=None): return item_show(item='resource', item_id=resource_id, extra_args=extra_args, cibfile=cibfile)
[ "def", "resource_show", "(", "resource_id", ",", "extra_args", "=", "None", ",", "cibfile", "=", "None", ")", ":", "return", "item_show", "(", "item", "=", "'resource'", ",", "item_id", "=", "resource_id", ",", "extra_args", "=", "extra_args", ",", "cibfile", "=", "cibfile", ")" ]
return the metadata of a resource .
train
true
513
@then(u'the command output should not contain "{text}"') def step_command_output_should_not_contain_text(context, text): expected_text = text if (('{__WORKDIR__}' in text) or ('{__CWD__}' in text)): expected_text = textutil.template_substitute(text, __WORKDIR__=posixpath_normpath(context.workdir), __CWD__=posixpath_normpath(os.getcwd())) actual_output = context.command_result.output with on_assert_failed_print_details(actual_output, expected_text): textutil.assert_normtext_should_not_contain(actual_output, expected_text)
[ "@", "then", "(", "u'the command output should not contain \"{text}\"'", ")", "def", "step_command_output_should_not_contain_text", "(", "context", ",", "text", ")", ":", "expected_text", "=", "text", "if", "(", "(", "'{__WORKDIR__}'", "in", "text", ")", "or", "(", "'{__CWD__}'", "in", "text", ")", ")", ":", "expected_text", "=", "textutil", ".", "template_substitute", "(", "text", ",", "__WORKDIR__", "=", "posixpath_normpath", "(", "context", ".", "workdir", ")", ",", "__CWD__", "=", "posixpath_normpath", "(", "os", ".", "getcwd", "(", ")", ")", ")", "actual_output", "=", "context", ".", "command_result", ".", "output", "with", "on_assert_failed_print_details", "(", "actual_output", ",", "expected_text", ")", ":", "textutil", ".", "assert_normtext_should_not_contain", "(", "actual_output", ",", "expected_text", ")" ]
example: then the command output should not contain "text" .
train
true
514
def roi_pooling_2d(x, rois, outh, outw, spatial_scale): return ROIPooling2D(outh, outw, spatial_scale)(x, rois)
[ "def", "roi_pooling_2d", "(", "x", ",", "rois", ",", "outh", ",", "outw", ",", "spatial_scale", ")", ":", "return", "ROIPooling2D", "(", "outh", ",", "outw", ",", "spatial_scale", ")", "(", "x", ",", "rois", ")" ]
spatial region of interest pooling function .
train
false
515
def normalize_gce_facts(metadata, facts): for interface in metadata['instance']['networkInterfaces']: int_info = dict(ips=[interface['ip']], network_type='gce') int_info['public_ips'] = [ac['externalIp'] for ac in interface['accessConfigs']] int_info['public_ips'].extend(interface['forwardedIps']) (_, _, network_id) = interface['network'].rpartition('/') int_info['network_id'] = network_id facts['network']['interfaces'].append(int_info) (_, _, zone) = metadata['instance']['zone'].rpartition('/') facts['zone'] = zone facts['network']['ip'] = facts['network']['interfaces'][0]['ips'][0] pub_ip = facts['network']['interfaces'][0]['public_ips'][0] facts['network']['public_ip'] = pub_ip facts['network']['hostname'] = metadata['instance']['hostname'] facts['network']['public_hostname'] = facts['network']['public_ip'] return facts
[ "def", "normalize_gce_facts", "(", "metadata", ",", "facts", ")", ":", "for", "interface", "in", "metadata", "[", "'instance'", "]", "[", "'networkInterfaces'", "]", ":", "int_info", "=", "dict", "(", "ips", "=", "[", "interface", "[", "'ip'", "]", "]", ",", "network_type", "=", "'gce'", ")", "int_info", "[", "'public_ips'", "]", "=", "[", "ac", "[", "'externalIp'", "]", "for", "ac", "in", "interface", "[", "'accessConfigs'", "]", "]", "int_info", "[", "'public_ips'", "]", ".", "extend", "(", "interface", "[", "'forwardedIps'", "]", ")", "(", "_", ",", "_", ",", "network_id", ")", "=", "interface", "[", "'network'", "]", ".", "rpartition", "(", "'/'", ")", "int_info", "[", "'network_id'", "]", "=", "network_id", "facts", "[", "'network'", "]", "[", "'interfaces'", "]", ".", "append", "(", "int_info", ")", "(", "_", ",", "_", ",", "zone", ")", "=", "metadata", "[", "'instance'", "]", "[", "'zone'", "]", ".", "rpartition", "(", "'/'", ")", "facts", "[", "'zone'", "]", "=", "zone", "facts", "[", "'network'", "]", "[", "'ip'", "]", "=", "facts", "[", "'network'", "]", "[", "'interfaces'", "]", "[", "0", "]", "[", "'ips'", "]", "[", "0", "]", "pub_ip", "=", "facts", "[", "'network'", "]", "[", "'interfaces'", "]", "[", "0", "]", "[", "'public_ips'", "]", "[", "0", "]", "facts", "[", "'network'", "]", "[", "'public_ip'", "]", "=", "pub_ip", "facts", "[", "'network'", "]", "[", "'hostname'", "]", "=", "metadata", "[", "'instance'", "]", "[", "'hostname'", "]", "facts", "[", "'network'", "]", "[", "'public_hostname'", "]", "=", "facts", "[", "'network'", "]", "[", "'public_ip'", "]", "return", "facts" ]
normalize gce facts args: metadata : provider metadata facts : facts to update returns: dict: the result of adding the normalized metadata to the provided facts dict .
train
false
516
def get_vertical_shift_value(label): return _check_range_and_return('vertical shift', label, (-5), 5)
[ "def", "get_vertical_shift_value", "(", "label", ")", ":", "return", "_check_range_and_return", "(", "'vertical shift'", ",", "label", ",", "(", "-", "5", ")", ",", "5", ")" ]
returns the value corresponding to a vertical shift label int .
train
false
519
def _handle_job_set(function): def call(self, job_set=taskhandle.NullJobSet()): job_set.started_job(str(self)) function(self) job_set.finished_job() return call
[ "def", "_handle_job_set", "(", "function", ")", ":", "def", "call", "(", "self", ",", "job_set", "=", "taskhandle", ".", "NullJobSet", "(", ")", ")", ":", "job_set", ".", "started_job", "(", "str", "(", "self", ")", ")", "function", "(", "self", ")", "job_set", ".", "finished_job", "(", ")", "return", "call" ]
a decorator for handling taskhandle .
train
true
520
@sync_performer def perform_put(dispatcher, intent): def create_put_command(content, path): content = content.replace('\\', '\\\\').replace('%', '%%') return ('printf -- %s > %s' % (shell_quote(content), shell_quote(path))) return Effect(Run(command=create_put_command(intent.content, intent.path), log_command_filter=(lambda _: create_put_command(intent.log_content_filter(intent.content), intent.path))))
[ "@", "sync_performer", "def", "perform_put", "(", "dispatcher", ",", "intent", ")", ":", "def", "create_put_command", "(", "content", ",", "path", ")", ":", "content", "=", "content", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "replace", "(", "'%'", ",", "'%%'", ")", "return", "(", "'printf -- %s > %s'", "%", "(", "shell_quote", "(", "content", ")", ",", "shell_quote", "(", "path", ")", ")", ")", "return", "Effect", "(", "Run", "(", "command", "=", "create_put_command", "(", "intent", ".", "content", ",", "intent", ".", "path", ")", ",", "log_command_filter", "=", "(", "lambda", "_", ":", "create_put_command", "(", "intent", ".", "log_content_filter", "(", "intent", ".", "content", ")", ",", "intent", ".", "path", ")", ")", ")", ")" ]
default implementation of put .
train
false
521
def healthy(): zpool_cmd = _check_zpool() cmd = '{zpool_cmd} status -x'.format(zpool_cmd=zpool_cmd) res = __salt__['cmd.run_all'](cmd, python_shell=False) return (res['stdout'] == 'all pools are healthy')
[ "def", "healthy", "(", ")", ":", "zpool_cmd", "=", "_check_zpool", "(", ")", "cmd", "=", "'{zpool_cmd} status -x'", ".", "format", "(", "zpool_cmd", "=", "zpool_cmd", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "return", "(", "res", "[", "'stdout'", "]", "==", "'all pools are healthy'", ")" ]
return whether fmadm is reporting faults cli example: .
train
false
522
def create_upload_url_async(success_path, max_bytes_per_blob=None, max_bytes_total=None, rpc=None, gs_bucket_name=None): request = blobstore_service_pb.CreateUploadURLRequest() response = blobstore_service_pb.CreateUploadURLResponse() request.set_success_path(success_path) if (max_bytes_per_blob is not None): if (not isinstance(max_bytes_per_blob, (int, long))): raise TypeError('max_bytes_per_blob must be integer.') if (max_bytes_per_blob < 1): raise ValueError('max_bytes_per_blob must be positive.') request.set_max_upload_size_per_blob_bytes(max_bytes_per_blob) if (max_bytes_total is not None): if (not isinstance(max_bytes_total, (int, long))): raise TypeError('max_bytes_total must be integer.') if (max_bytes_total < 1): raise ValueError('max_bytes_total must be positive.') request.set_max_upload_size_bytes(max_bytes_total) if (request.has_max_upload_size_bytes() and request.has_max_upload_size_per_blob_bytes()): if (request.max_upload_size_bytes() < request.max_upload_size_per_blob_bytes()): raise ValueError('max_bytes_total can not be less than max_upload_size_per_blob_bytes') if (gs_bucket_name is not None): if (not isinstance(gs_bucket_name, basestring)): raise TypeError('gs_bucket_name must be a string.') request.set_gs_bucket_name(gs_bucket_name) return _make_async_call(rpc, 'CreateUploadURL', request, response, _get_result_hook, (lambda rpc: rpc.response.url()))
[ "def", "create_upload_url_async", "(", "success_path", ",", "max_bytes_per_blob", "=", "None", ",", "max_bytes_total", "=", "None", ",", "rpc", "=", "None", ",", "gs_bucket_name", "=", "None", ")", ":", "request", "=", "blobstore_service_pb", ".", "CreateUploadURLRequest", "(", ")", "response", "=", "blobstore_service_pb", ".", "CreateUploadURLResponse", "(", ")", "request", ".", "set_success_path", "(", "success_path", ")", "if", "(", "max_bytes_per_blob", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "max_bytes_per_blob", ",", "(", "int", ",", "long", ")", ")", ")", ":", "raise", "TypeError", "(", "'max_bytes_per_blob must be integer.'", ")", "if", "(", "max_bytes_per_blob", "<", "1", ")", ":", "raise", "ValueError", "(", "'max_bytes_per_blob must be positive.'", ")", "request", ".", "set_max_upload_size_per_blob_bytes", "(", "max_bytes_per_blob", ")", "if", "(", "max_bytes_total", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "max_bytes_total", ",", "(", "int", ",", "long", ")", ")", ")", ":", "raise", "TypeError", "(", "'max_bytes_total must be integer.'", ")", "if", "(", "max_bytes_total", "<", "1", ")", ":", "raise", "ValueError", "(", "'max_bytes_total must be positive.'", ")", "request", ".", "set_max_upload_size_bytes", "(", "max_bytes_total", ")", "if", "(", "request", ".", "has_max_upload_size_bytes", "(", ")", "and", "request", ".", "has_max_upload_size_per_blob_bytes", "(", ")", ")", ":", "if", "(", "request", ".", "max_upload_size_bytes", "(", ")", "<", "request", ".", "max_upload_size_per_blob_bytes", "(", ")", ")", ":", "raise", "ValueError", "(", "'max_bytes_total can not be less than max_upload_size_per_blob_bytes'", ")", "if", "(", "gs_bucket_name", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "gs_bucket_name", ",", "basestring", ")", ")", ":", "raise", "TypeError", "(", "'gs_bucket_name must be a string.'", ")", "request", ".", "set_gs_bucket_name", "(", "gs_bucket_name", ")", "return", "_make_async_call", "(", "rpc", ",", "'CreateUploadURL'", ",", "request", ",", "response", ",", "_get_result_hook", ",", "(", "lambda", "rpc", ":", "rpc", ".", "response", ".", "url", "(", ")", ")", ")" ]
async version of create_upload_url() .
train
false
524
def create_token(username, ttl=None, metadata=None, add_missing_user=True): if ttl: if (ttl > cfg.CONF.auth.token_ttl): msg = ('TTL specified %s is greater than max allowed %s.' % (ttl, cfg.CONF.auth.token_ttl)) raise TTLTooLargeException(msg) else: ttl = cfg.CONF.auth.token_ttl if username: try: User.get_by_name(username) except: if add_missing_user: user_db = UserDB(name=username) User.add_or_update(user_db) extra = {'username': username, 'user': user_db} LOG.audit(('Registered new user "%s".' % username), extra=extra) else: raise UserNotFoundError() token = uuid.uuid4().hex expiry = (date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl)) token = TokenDB(user=username, token=token, expiry=expiry, metadata=metadata) Token.add_or_update(token) username_string = (username if username else 'an anonymous user') token_expire_string = isotime.format(expiry, offset=False) extra = {'username': username, 'token_expiration': token_expire_string} LOG.audit(('Access granted to "%s" with the token set to expire at "%s".' % (username_string, token_expire_string)), extra=extra) return token
[ "def", "create_token", "(", "username", ",", "ttl", "=", "None", ",", "metadata", "=", "None", ",", "add_missing_user", "=", "True", ")", ":", "if", "ttl", ":", "if", "(", "ttl", ">", "cfg", ".", "CONF", ".", "auth", ".", "token_ttl", ")", ":", "msg", "=", "(", "'TTL specified %s is greater than max allowed %s.'", "%", "(", "ttl", ",", "cfg", ".", "CONF", ".", "auth", ".", "token_ttl", ")", ")", "raise", "TTLTooLargeException", "(", "msg", ")", "else", ":", "ttl", "=", "cfg", ".", "CONF", ".", "auth", ".", "token_ttl", "if", "username", ":", "try", ":", "User", ".", "get_by_name", "(", "username", ")", "except", ":", "if", "add_missing_user", ":", "user_db", "=", "UserDB", "(", "name", "=", "username", ")", "User", ".", "add_or_update", "(", "user_db", ")", "extra", "=", "{", "'username'", ":", "username", ",", "'user'", ":", "user_db", "}", "LOG", ".", "audit", "(", "(", "'Registered new user \"%s\".'", "%", "username", ")", ",", "extra", "=", "extra", ")", "else", ":", "raise", "UserNotFoundError", "(", ")", "token", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "expiry", "=", "(", "date_utils", ".", "get_datetime_utc_now", "(", ")", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "ttl", ")", ")", "token", "=", "TokenDB", "(", "user", "=", "username", ",", "token", "=", "token", ",", "expiry", "=", "expiry", ",", "metadata", "=", "metadata", ")", "Token", ".", "add_or_update", "(", "token", ")", "username_string", "=", "(", "username", "if", "username", "else", "'an anonymous user'", ")", "token_expire_string", "=", "isotime", ".", "format", "(", "expiry", ",", "offset", "=", "False", ")", "extra", "=", "{", "'username'", ":", "username", ",", "'token_expiration'", ":", "token_expire_string", "}", "LOG", ".", "audit", "(", "(", "'Access granted to \"%s\" with the token set to expire at \"%s\".'", "%", "(", "username_string", ",", "token_expire_string", ")", ")", ",", "extra", "=", "extra", ")", "return", "token" ]
login in and request an new api token .
train
false
525
@check_login_required @check_local_site_access def download_modified_file(*args, **kwargs): return _download_diff_file(True, *args, **kwargs)
[ "@", "check_login_required", "@", "check_local_site_access", "def", "download_modified_file", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "_download_diff_file", "(", "True", ",", "*", "args", ",", "**", "kwargs", ")" ]
downloads a modified file from a diff .
train
false
526
def sublime_format_path(pth): m = re.match('^([A-Za-z]{1}):(?:/|\\\\)(.*)', pth) if ((sublime.platform() == 'windows') and (m is not None)): pth = ((m.group(1) + '/') + m.group(2)) return pth.replace('\\', '/')
[ "def", "sublime_format_path", "(", "pth", ")", ":", "m", "=", "re", ".", "match", "(", "'^([A-Za-z]{1}):(?:/|\\\\\\\\)(.*)'", ",", "pth", ")", "if", "(", "(", "sublime", ".", "platform", "(", ")", "==", "'windows'", ")", "and", "(", "m", "is", "not", "None", ")", ")", ":", "pth", "=", "(", "(", "m", ".", "group", "(", "1", ")", "+", "'/'", ")", "+", "m", ".", "group", "(", "2", ")", ")", "return", "pth", ".", "replace", "(", "'\\\\'", ",", "'/'", ")" ]
format path for sublime internally .
train
false
527
def __cache_expire(cachedir): if (not os.path.exists(cachedir)): return for ent in os.listdir(cachedir): p = os.path.join(cachedir, ent) if os.path.isdir(p): for (root, dirs, files) in os.walk(p, topdown=False): map((lambda f: os.unlink(os.path.join(root, f))), files) map((lambda d: os.rmdir(os.path.join(root, d))), dirs) else: os.unlink(p)
[ "def", "__cache_expire", "(", "cachedir", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "cachedir", ")", ")", ":", "return", "for", "ent", "in", "os", ".", "listdir", "(", "cachedir", ")", ":", "p", "=", "os", ".", "path", ".", "join", "(", "cachedir", ",", "ent", ")", "if", "os", ".", "path", ".", "isdir", "(", "p", ")", ":", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "p", ",", "topdown", "=", "False", ")", ":", "map", "(", "(", "lambda", "f", ":", "os", ".", "unlink", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", ")", ",", "files", ")", "map", "(", "(", "lambda", "d", ":", "os", ".", "rmdir", "(", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", ")", ")", ",", "dirs", ")", "else", ":", "os", ".", "unlink", "(", "p", ")" ]
nuke everything under cachedir .
train
false
528
def option(value, default='', omit_opts=False, omit_master=False, omit_pillar=False): if (not omit_opts): if (value in __opts__): return __opts__[value] if (not omit_master): if (value in __pillar__.get('master', {})): return __pillar__['master'][value] if (not omit_pillar): if (value in __pillar__): return __pillar__[value] if (value in DEFAULTS): return DEFAULTS[value] return default
[ "def", "option", "(", "value", ",", "default", "=", "''", ",", "omit_opts", "=", "False", ",", "omit_master", "=", "False", ",", "omit_pillar", "=", "False", ")", ":", "if", "(", "not", "omit_opts", ")", ":", "if", "(", "value", "in", "__opts__", ")", ":", "return", "__opts__", "[", "value", "]", "if", "(", "not", "omit_master", ")", ":", "if", "(", "value", "in", "__pillar__", ".", "get", "(", "'master'", ",", "{", "}", ")", ")", ":", "return", "__pillar__", "[", "'master'", "]", "[", "value", "]", "if", "(", "not", "omit_pillar", ")", ":", "if", "(", "value", "in", "__pillar__", ")", ":", "return", "__pillar__", "[", "value", "]", "if", "(", "value", "in", "DEFAULTS", ")", ":", "return", "DEFAULTS", "[", "value", "]", "return", "default" ]
attaches an option to the command .
train
true
529
@image_comparison(baseline_images=[u'EventCollection_plot__set_positions']) def test__EventCollection__set_positions(): (splt, coll, props) = generate_EventCollection_plot() new_positions = np.hstack([props[u'positions'], props[u'extra_positions']]) coll.set_positions(new_positions) np.testing.assert_array_equal(new_positions, coll.get_positions()) check_segments(coll, new_positions, props[u'linelength'], props[u'lineoffset'], props[u'orientation']) splt.set_title(u'EventCollection: set_positions') splt.set_xlim((-1), 90)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'EventCollection_plot__set_positions'", "]", ")", "def", "test__EventCollection__set_positions", "(", ")", ":", "(", "splt", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "new_positions", "=", "np", ".", "hstack", "(", "[", "props", "[", "u'positions'", "]", ",", "props", "[", "u'extra_positions'", "]", "]", ")", "coll", ".", "set_positions", "(", "new_positions", ")", "np", ".", "testing", ".", "assert_array_equal", "(", "new_positions", ",", "coll", ".", "get_positions", "(", ")", ")", "check_segments", "(", "coll", ",", "new_positions", ",", "props", "[", "u'linelength'", "]", ",", "props", "[", "u'lineoffset'", "]", ",", "props", "[", "u'orientation'", "]", ")", "splt", ".", "set_title", "(", "u'EventCollection: set_positions'", ")", "splt", ".", "set_xlim", "(", "(", "-", "1", ")", ",", "90", ")" ]
check to make sure set_positions works properly .
train
false
531
def delete_course_and_groups(course_key, user_id): module_store = modulestore() with module_store.bulk_operations(course_key): module_store.delete_course(course_key, user_id) print 'removing User permissions from course....' try: remove_all_instructors(course_key) except Exception as err: log.error('Error in deleting course groups for {0}: {1}'.format(course_key, err))
[ "def", "delete_course_and_groups", "(", "course_key", ",", "user_id", ")", ":", "module_store", "=", "modulestore", "(", ")", "with", "module_store", ".", "bulk_operations", "(", "course_key", ")", ":", "module_store", ".", "delete_course", "(", "course_key", ",", "user_id", ")", "print", "'removing User permissions from course....'", "try", ":", "remove_all_instructors", "(", "course_key", ")", "except", "Exception", "as", "err", ":", "log", ".", "error", "(", "'Error in deleting course groups for {0}: {1}'", ".", "format", "(", "course_key", ",", "err", ")", ")" ]
this deletes the courseware associated with a course_key as well as cleaning update_item the various user table stuff .
train
false
532
def getenv(name, default=None): return os.environ.get(name, default)
[ "def", "getenv", "(", "name", ",", "default", "=", "None", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "name", ",", "default", ")" ]
get an environment variable .
train
false
534
@api_versions.wraps('2.24') @utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('migration', metavar='<migration>', help=_('ID of migration.')) def do_live_migration_abort(cs, args): server = _find_server(cs, args.server) cs.server_migrations.live_migration_abort(server, args.migration)
[ "@", "api_versions", ".", "wraps", "(", "'2.24'", ")", "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'migration'", ",", "metavar", "=", "'<migration>'", ",", "help", "=", "_", "(", "'ID of migration.'", ")", ")", "def", "do_live_migration_abort", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "cs", ".", "server_migrations", ".", "live_migration_abort", "(", "server", ",", "args", ".", "migration", ")" ]
abort an on-going live migration .
train
false
535
def pairwise(iterable, cyclic=False): (a, b) = tee(iterable) first = next(b, None) if (cyclic is True): return zip(a, chain(b, (first,))) return zip(a, b)
[ "def", "pairwise", "(", "iterable", ",", "cyclic", "=", "False", ")", ":", "(", "a", ",", "b", ")", "=", "tee", "(", "iterable", ")", "first", "=", "next", "(", "b", ",", "None", ")", "if", "(", "cyclic", "is", "True", ")", ":", "return", "zip", "(", "a", ",", "chain", "(", "b", ",", "(", "first", ",", ")", ")", ")", "return", "zip", "(", "a", ",", "b", ")" ]
return the items of an iterable paired with its next item .
train
false
536
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return a list of service that are enabled on boot cli example: .
train
false
537
@raise_if_offline def schema_has_column(table_name, column_name): bind = op.get_bind() insp = sa.engine.reflection.Inspector.from_engine(bind) if (not schema_has_table(table_name)): return return (column_name in [column['name'] for column in insp.get_columns(table_name)])
[ "@", "raise_if_offline", "def", "schema_has_column", "(", "table_name", ",", "column_name", ")", ":", "bind", "=", "op", ".", "get_bind", "(", ")", "insp", "=", "sa", ".", "engine", ".", "reflection", ".", "Inspector", ".", "from_engine", "(", "bind", ")", "if", "(", "not", "schema_has_table", "(", "table_name", ")", ")", ":", "return", "return", "(", "column_name", "in", "[", "column", "[", "'name'", "]", "for", "column", "in", "insp", ".", "get_columns", "(", "table_name", ")", "]", ")" ]
check whether the specified column exists in the current schema .
train
false
538
def rwx(mode, aclBit=False): bools = expand_mode(mode) s = list('rwxrwxrwxt') for (i, v) in enumerate(bools[:(-1)]): if (not v): s[i] = '-' if (not bools[(-1)]): s = s[:(-1)] return ((rwxtype(mode) + ''.join(s)) + ('+' if aclBit else ''))
[ "def", "rwx", "(", "mode", ",", "aclBit", "=", "False", ")", ":", "bools", "=", "expand_mode", "(", "mode", ")", "s", "=", "list", "(", "'rwxrwxrwxt'", ")", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "bools", "[", ":", "(", "-", "1", ")", "]", ")", ":", "if", "(", "not", "v", ")", ":", "s", "[", "i", "]", "=", "'-'", "if", "(", "not", "bools", "[", "(", "-", "1", ")", "]", ")", ":", "s", "=", "s", "[", ":", "(", "-", "1", ")", "]", "return", "(", "(", "rwxtype", "(", "mode", ")", "+", "''", ".", "join", "(", "s", ")", ")", "+", "(", "'+'", "if", "aclBit", "else", "''", ")", ")" ]
returns "rwx"-style string like that ls would give you .
train
false
539
def xy_to_array_origin(image): return rgb_transpose(image[:, ::(-1)])
[ "def", "xy_to_array_origin", "(", "image", ")", ":", "return", "rgb_transpose", "(", "image", "[", ":", ",", ":", ":", "(", "-", "1", ")", "]", ")" ]
return view of image transformed from cartesian to array origin .
train
false