id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
55,479
def is_var(exp): return isinstance(exp, Var)
[ "def", "is_var", "(", "exp", ")", ":", "return", "isinstance", "(", "exp", ",", "Var", ")" ]
is exp a var node .
train
false
55,480
def make_middleware(app, global_conf=None, warning_level=None): if warning_level: warning_level = int(warning_level) return HTTPExceptionHandler(app, warning_level=warning_level)
[ "def", "make_middleware", "(", "app", ",", "global_conf", "=", "None", ",", "warning_level", "=", "None", ")", ":", "if", "warning_level", ":", "warning_level", "=", "int", "(", "warning_level", ")", "return", "HTTPExceptionHandler", "(", "app", ",", "warning_level", "=", "warning_level", ")" ]
httpexceptions middleware; this catches any paste .
train
false
55,481
def fromRoman(s): if (not s): raise InvalidRomanNumeralError, 'Input can not be blank' if (not romanNumeralPattern.search(s)): raise InvalidRomanNumeralError, ('Invalid Roman numeral: %s' % s) result = 0 index = 0 for (numeral, integer) in romanNumeralMap: while (s[index:(index + len(numeral))] == numeral): result += integer index += len(numeral) return result
[ "def", "fromRoman", "(", "s", ")", ":", "if", "(", "not", "s", ")", ":", "raise", "InvalidRomanNumeralError", ",", "'Input can not be blank'", "if", "(", "not", "romanNumeralPattern", ".", "search", "(", "s", ")", ")", ":", "raise", "InvalidRomanNumeralError", ",", "(", "'Invalid Roman numeral: %s'", "%", "s", ")", "result", "=", "0", "index", "=", "0", "for", "(", "numeral", ",", "integer", ")", "in", "romanNumeralMap", ":", "while", "(", "s", "[", "index", ":", "(", "index", "+", "len", "(", "numeral", ")", ")", "]", "==", "numeral", ")", ":", "result", "+=", "integer", "index", "+=", "len", "(", "numeral", ")", "return", "result" ]
convert roman numeral to integer .
train
false
55,482
def _py_convert_agg_to_wx_bitmap(agg, bbox): if (bbox is None): return wx.BitmapFromImage(_py_convert_agg_to_wx_image(agg, None)) else: return _clipped_image_as_bitmap(_py_convert_agg_to_wx_image(agg, None), bbox)
[ "def", "_py_convert_agg_to_wx_bitmap", "(", "agg", ",", "bbox", ")", ":", "if", "(", "bbox", "is", "None", ")", ":", "return", "wx", ".", "BitmapFromImage", "(", "_py_convert_agg_to_wx_image", "(", "agg", ",", "None", ")", ")", "else", ":", "return", "_clipped_image_as_bitmap", "(", "_py_convert_agg_to_wx_image", "(", "agg", ",", "None", ")", ",", "bbox", ")" ]
convert the region of the agg buffer bounded by bbox to a wx .
train
false
55,485
@frappe.whitelist() def take_backup(): enqueue(u'frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox', queue=u'long') frappe.msgprint(_(u'Queued for backup. It may take a few minutes to an hour.'))
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "take_backup", "(", ")", ":", "enqueue", "(", "u'frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox'", ",", "queue", "=", "u'long'", ")", "frappe", ".", "msgprint", "(", "_", "(", "u'Queued for backup. It may take a few minutes to an hour.'", ")", ")" ]
enqueue longjob for taking backup to dropbox .
train
false
55,486
def device_memory_size(devmem): sz = getattr(devmem, '_cuda_memsize_', None) if (sz is None): (s, e) = device_extents(devmem) sz = (e - s) devmem._cuda_memsize_ = sz assert (sz > 0), 'zero length array' return sz
[ "def", "device_memory_size", "(", "devmem", ")", ":", "sz", "=", "getattr", "(", "devmem", ",", "'_cuda_memsize_'", ",", "None", ")", "if", "(", "sz", "is", "None", ")", ":", "(", "s", ",", "e", ")", "=", "device_extents", "(", "devmem", ")", "sz", "=", "(", "e", "-", "s", ")", "devmem", ".", "_cuda_memsize_", "=", "sz", "assert", "(", "sz", ">", "0", ")", ",", "'zero length array'", "return", "sz" ]
check the memory size of the device memory .
train
false
55,488
def get_queue_names(app_id=None, max_rows=100): rpc = apiproxy_stub_map.UserRPC('taskqueue') request = taskqueue_service_pb.TaskQueueFetchQueuesRequest() response = taskqueue_service_pb.TaskQueueFetchQueuesResponse() if app_id: request.set_app_id(app_id) request.set_max_rows(max_rows) queues = ['default'] try: rpc.make_call('FetchQueues', request, response) rpc.check_success() for queue in response.queue_list(): if ((queue.mode() == taskqueue_service_pb.TaskQueueMode.PUSH) and (not queue.queue_name().startswith('__')) and (queue.queue_name() != 'default')): queues.append(queue.queue_name()) except Exception: logging.exception('Failed to get queue names.') return queues
[ "def", "get_queue_names", "(", "app_id", "=", "None", ",", "max_rows", "=", "100", ")", ":", "rpc", "=", "apiproxy_stub_map", ".", "UserRPC", "(", "'taskqueue'", ")", "request", "=", "taskqueue_service_pb", ".", "TaskQueueFetchQueuesRequest", "(", ")", "response", "=", "taskqueue_service_pb", ".", "TaskQueueFetchQueuesResponse", "(", ")", "if", "app_id", ":", "request", ".", "set_app_id", "(", "app_id", ")", "request", ".", "set_max_rows", "(", "max_rows", ")", "queues", "=", "[", "'default'", "]", "try", ":", "rpc", ".", "make_call", "(", "'FetchQueues'", ",", "request", ",", "response", ")", "rpc", ".", "check_success", "(", ")", "for", "queue", "in", "response", ".", "queue_list", "(", ")", ":", "if", "(", "(", "queue", ".", "mode", "(", ")", "==", "taskqueue_service_pb", ".", "TaskQueueMode", ".", "PUSH", ")", "and", "(", "not", "queue", ".", "queue_name", "(", ")", ".", "startswith", "(", "'__'", ")", ")", "and", "(", "queue", ".", "queue_name", "(", ")", "!=", "'default'", ")", ")", ":", "queues", ".", "append", "(", "queue", ".", "queue_name", "(", ")", ")", "except", "Exception", ":", "logging", ".", "exception", "(", "'Failed to get queue names.'", ")", "return", "queues" ]
returns a list with all non-special queue names for app_id .
train
false
55,489
def calculateDeltaSeconds(start): return (time.time() - start)
[ "def", "calculateDeltaSeconds", "(", "start", ")", ":", "return", "(", "time", ".", "time", "(", ")", "-", "start", ")" ]
returns elapsed time from start till now .
train
false
55,490
def _collect_dirs(start_dir, blacklist=set(['conftest.py', 'nox.py']), suffix='_test.py'): for (parent, subdirs, files) in os.walk(start_dir): if any((f for f in files if (f.endswith(suffix) and (f not in blacklist)))): del subdirs[:] (yield parent) else: subdirs[:] = [s for s in subdirs if (s[0].isalpha() and (os.path.join(parent, s) not in blacklist))]
[ "def", "_collect_dirs", "(", "start_dir", ",", "blacklist", "=", "set", "(", "[", "'conftest.py'", ",", "'nox.py'", "]", ")", ",", "suffix", "=", "'_test.py'", ")", ":", "for", "(", "parent", ",", "subdirs", ",", "files", ")", "in", "os", ".", "walk", "(", "start_dir", ")", ":", "if", "any", "(", "(", "f", "for", "f", "in", "files", "if", "(", "f", ".", "endswith", "(", "suffix", ")", "and", "(", "f", "not", "in", "blacklist", ")", ")", ")", ")", ":", "del", "subdirs", "[", ":", "]", "(", "yield", "parent", ")", "else", ":", "subdirs", "[", ":", "]", "=", "[", "s", "for", "s", "in", "subdirs", "if", "(", "s", "[", "0", "]", ".", "isalpha", "(", ")", "and", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "s", ")", "not", "in", "blacklist", ")", ")", "]" ]
recursively collects a list of dirs that contain a file matching the given suffix .
train
false
55,492
def _get_TV(codon_lst1, codon_lst2, codon_table=default_codon_table): purine = ('A', 'G') pyrimidine = ('C', 'T') TV = [0, 0] sites = 0 for (codon1, codon2) in zip(codon_lst1, codon_lst2): if ('---' not in (codon1, codon2)): for (i, j) in zip(codon1, codon2): if (i == j): pass elif ((i in purine) and (j in purine)): TV[0] += 1 elif ((i in pyrimidine) and (j in pyrimidine)): TV[0] += 1 else: TV[1] += 1 sites += 1 return ((TV[0] / sites), (TV[1] / sites))
[ "def", "_get_TV", "(", "codon_lst1", ",", "codon_lst2", ",", "codon_table", "=", "default_codon_table", ")", ":", "purine", "=", "(", "'A'", ",", "'G'", ")", "pyrimidine", "=", "(", "'C'", ",", "'T'", ")", "TV", "=", "[", "0", ",", "0", "]", "sites", "=", "0", "for", "(", "codon1", ",", "codon2", ")", "in", "zip", "(", "codon_lst1", ",", "codon_lst2", ")", ":", "if", "(", "'---'", "not", "in", "(", "codon1", ",", "codon2", ")", ")", ":", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "codon1", ",", "codon2", ")", ":", "if", "(", "i", "==", "j", ")", ":", "pass", "elif", "(", "(", "i", "in", "purine", ")", "and", "(", "j", "in", "purine", ")", ")", ":", "TV", "[", "0", "]", "+=", "1", "elif", "(", "(", "i", "in", "pyrimidine", ")", "and", "(", "j", "in", "pyrimidine", ")", ")", ":", "TV", "[", "0", "]", "+=", "1", "else", ":", "TV", "[", "1", "]", "+=", "1", "sites", "+=", "1", "return", "(", "(", "TV", "[", "0", "]", "/", "sites", ")", ",", "(", "TV", "[", "1", "]", "/", "sites", ")", ")" ]
get tv .
train
false
55,493
def failing_checks(node_address, timeout=30): ctx = ssl.create_default_context() ctx.check_hostname = False ctx.load_verify_locations(cafile='ssl/cacert.pem') ctx.verify_mode = ssl.CERT_REQUIRED url = 'https://{}:8500/v1/health/state/any'.format(node_address) request = urllib2.Request(url) auth = ('Basic ' + base64.b64encode(get_credentials())) request.add_header('Authorization', auth) f = urllib2.urlopen(request, None, timeout, context=ctx) checks = json.loads(f.read().decode('utf8')) return [c for c in checks if (c.get('Status', '').lower() != 'passing')]
[ "def", "failing_checks", "(", "node_address", ",", "timeout", "=", "30", ")", ":", "ctx", "=", "ssl", ".", "create_default_context", "(", ")", "ctx", ".", "check_hostname", "=", "False", "ctx", ".", "load_verify_locations", "(", "cafile", "=", "'ssl/cacert.pem'", ")", "ctx", ".", "verify_mode", "=", "ssl", ".", "CERT_REQUIRED", "url", "=", "'https://{}:8500/v1/health/state/any'", ".", "format", "(", "node_address", ")", "request", "=", "urllib2", ".", "Request", "(", "url", ")", "auth", "=", "(", "'Basic '", "+", "base64", ".", "b64encode", "(", "get_credentials", "(", ")", ")", ")", "request", ".", "add_header", "(", "'Authorization'", ",", "auth", ")", "f", "=", "urllib2", ".", "urlopen", "(", "request", ",", "None", ",", "timeout", ",", "context", "=", "ctx", ")", "checks", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf8'", ")", ")", "return", "[", "c", "for", "c", "in", "checks", "if", "(", "c", ".", "get", "(", "'Status'", ",", "''", ")", ".", "lower", "(", ")", "!=", "'passing'", ")", "]" ]
returns a list of failing checks .
train
false
55,495
def set_using_network_time(enable): state = salt.utils.mac_utils.validate_enabled(enable) cmd = 'systemsetup -setusingnetworktime {0}'.format(state) salt.utils.mac_utils.execute_return_success(cmd) return (state == salt.utils.mac_utils.validate_enabled(get_using_network_time()))
[ "def", "set_using_network_time", "(", "enable", ")", ":", "state", "=", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "enable", ")", "cmd", "=", "'systemsetup -setusingnetworktime {0}'", ".", "format", "(", "state", ")", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ")", "return", "(", "state", "==", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "get_using_network_time", "(", ")", ")", ")" ]
set whether network time is on or off .
train
true
55,496
def test_construction(): s3_deleter.Deleter()
[ "def", "test_construction", "(", ")", ":", "s3_deleter", ".", "Deleter", "(", ")" ]
the constructor basically works .
train
false
55,499
def _set_rpm_probes(probes): return __salt__['probes.set_probes'](_ordered_dict_to_dict(probes), commit=False)
[ "def", "_set_rpm_probes", "(", "probes", ")", ":", "return", "__salt__", "[", "'probes.set_probes'", "]", "(", "_ordered_dict_to_dict", "(", "probes", ")", ",", "commit", "=", "False", ")" ]
calls the salt module "probes" to configure the probes on the device .
train
false
55,500
def _get_deep(data_structure, dot_path_or_list, default_value=None): search_path = None param_type = type(dot_path_or_list) if (param_type in (tuple, list)): search_path = dot_path_or_list elif (param_type == str): search_path = dot_path_or_list.split('.') assert (len(search_path) > 0), 'Missing valid search path' try: current_item = data_structure for search_key in search_path: current_item = current_item[search_key] except (KeyError, IndexError, TypeError): return default_value return current_item
[ "def", "_get_deep", "(", "data_structure", ",", "dot_path_or_list", ",", "default_value", "=", "None", ")", ":", "search_path", "=", "None", "param_type", "=", "type", "(", "dot_path_or_list", ")", "if", "(", "param_type", "in", "(", "tuple", ",", "list", ")", ")", ":", "search_path", "=", "dot_path_or_list", "elif", "(", "param_type", "==", "str", ")", ":", "search_path", "=", "dot_path_or_list", ".", "split", "(", "'.'", ")", "assert", "(", "len", "(", "search_path", ")", ">", "0", ")", ",", "'Missing valid search path'", "try", ":", "current_item", "=", "data_structure", "for", "search_key", "in", "search_path", ":", "current_item", "=", "current_item", "[", "search_key", "]", "except", "(", "KeyError", ",", "IndexError", ",", "TypeError", ")", ":", "return", "default_value", "return", "current_item" ]
attempts access nested data structures and not blow up on a gross key error "hello": { "hi": 5 .
train
false
55,501
def strip_unneeded(bkts, sufficient_funds): bkts = sorted(bkts, key=(lambda bkt: bkt.value)) for i in range(len(bkts)): if (not sufficient_funds(bkts[(i + 1):])): return bkts[i:] return bkts
[ "def", "strip_unneeded", "(", "bkts", ",", "sufficient_funds", ")", ":", "bkts", "=", "sorted", "(", "bkts", ",", "key", "=", "(", "lambda", "bkt", ":", "bkt", ".", "value", ")", ")", "for", "i", "in", "range", "(", "len", "(", "bkts", ")", ")", ":", "if", "(", "not", "sufficient_funds", "(", "bkts", "[", "(", "i", "+", "1", ")", ":", "]", ")", ")", ":", "return", "bkts", "[", "i", ":", "]", "return", "bkts" ]
remove buckets that are unnecessary in achieving the spend amount .
train
false
55,502
@treeio_login_required def ajax_object_lookup(request, response_format='html'): objects = [] if (request.GET and ('term' in request.GET)): objects = Object.filter_permitted(request.user.profile, Object.objects.filter(object_name__icontains=request.GET['term']), mode='x')[:10] return render_to_response('core/ajax_object_lookup', {'objects': objects}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "ajax_object_lookup", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "objects", "=", "[", "]", "if", "(", "request", ".", "GET", "and", "(", "'term'", "in", "request", ".", "GET", ")", ")", ":", "objects", "=", "Object", ".", "filter_permitted", "(", "request", ".", "user", ".", "profile", ",", "Object", ".", "objects", ".", "filter", "(", "object_name__icontains", "=", "request", ".", "GET", "[", "'term'", "]", ")", ",", "mode", "=", "'x'", ")", "[", ":", "10", "]", "return", "render_to_response", "(", "'core/ajax_object_lookup'", ",", "{", "'objects'", ":", "objects", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
returns a list of matching objects .
train
false
55,503
def scope2index(scope, descr, where=None): try: return scopes.index(scope) except ValueError: raise ValueError("{0} {1}has an unsupported scope value '{2}'".format(descr, ('from {0} '.format(where) if where else ''), scope))
[ "def", "scope2index", "(", "scope", ",", "descr", ",", "where", "=", "None", ")", ":", "try", ":", "return", "scopes", ".", "index", "(", "scope", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"{0} {1}has an unsupported scope value '{2}'\"", ".", "format", "(", "descr", ",", "(", "'from {0} '", ".", "format", "(", "where", ")", "if", "where", "else", "''", ")", ",", "scope", ")", ")" ]
look up the index of scope and raise a descriptive value error if not defined .
train
false
55,505
def publish_cmdline(reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=True, argv=None, usage=default_usage, description=default_description): pub = Publisher(reader, parser, writer, settings=settings) pub.set_components(reader_name, parser_name, writer_name) output = pub.publish(argv, usage, description, settings_spec, settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return output
[ "def", "publish_cmdline", "(", "reader", "=", "None", ",", "reader_name", "=", "'standalone'", ",", "parser", "=", "None", ",", "parser_name", "=", "'restructuredtext'", ",", "writer", "=", "None", ",", "writer_name", "=", "'pseudoxml'", ",", "settings", "=", "None", ",", "settings_spec", "=", "None", ",", "settings_overrides", "=", "None", ",", "config_section", "=", "None", ",", "enable_exit_status", "=", "True", ",", "argv", "=", "None", ",", "usage", "=", "default_usage", ",", "description", "=", "default_description", ")", ":", "pub", "=", "Publisher", "(", "reader", ",", "parser", ",", "writer", ",", "settings", "=", "settings", ")", "pub", ".", "set_components", "(", "reader_name", ",", "parser_name", ",", "writer_name", ")", "output", "=", "pub", ".", "publish", "(", "argv", ",", "usage", ",", "description", ",", "settings_spec", ",", "settings_overrides", ",", "config_section", "=", "config_section", ",", "enable_exit_status", "=", "enable_exit_status", ")", "return", "output" ]
set up & run a publisher for command-line-based file i/o .
train
false
55,506
@requires_sklearn def test_ica_reject_buffer(): raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data() picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads') ica = ICA(n_components=3, max_pca_components=4, n_pca_components=4) raw._data[2, 1000:1005] = 5e-12 with catch_logging() as drop_log: with warnings.catch_warnings(record=True): ica.fit(raw, picks[:5], reject=dict(mag=2.5e-12), decim=2, tstep=0.01, verbose=True) assert_true(((raw._data[:5, ::2].shape[1] - 4) == ica.n_samples_)) log = [l for l in drop_log.getvalue().split('\n') if ('detected' in l)] assert_equal(len(log), 1)
[ "@", "requires_sklearn", "def", "test_ica_reject_buffer", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", ".", "crop", "(", "1.5", ",", "stop", ")", ".", "load_data", "(", ")", "picks", "=", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "True", ",", "stim", "=", "False", ",", "ecg", "=", "False", ",", "eog", "=", "False", ",", "exclude", "=", "'bads'", ")", "ica", "=", "ICA", "(", "n_components", "=", "3", ",", "max_pca_components", "=", "4", ",", "n_pca_components", "=", "4", ")", "raw", ".", "_data", "[", "2", ",", "1000", ":", "1005", "]", "=", "5e-12", "with", "catch_logging", "(", ")", "as", "drop_log", ":", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", ":", "ica", ".", "fit", "(", "raw", ",", "picks", "[", ":", "5", "]", ",", "reject", "=", "dict", "(", "mag", "=", "2.5e-12", ")", ",", "decim", "=", "2", ",", "tstep", "=", "0.01", ",", "verbose", "=", "True", ")", "assert_true", "(", "(", "(", "raw", ".", "_data", "[", ":", "5", ",", ":", ":", "2", "]", ".", "shape", "[", "1", "]", "-", "4", ")", "==", "ica", ".", "n_samples_", ")", ")", "log", "=", "[", "l", "for", "l", "in", "drop_log", ".", "getvalue", "(", ")", ".", "split", "(", "'\\n'", ")", "if", "(", "'detected'", "in", "l", ")", "]", "assert_equal", "(", "len", "(", "log", ")", ",", "1", ")" ]
test ica data raw buffer rejection .
train
false
55,507
def constrain_rgb(r, g, b): w = (- min([0, r, g, b])) if (w > 0): r += w g += w b += w return (r, g, b)
[ "def", "constrain_rgb", "(", "r", ",", "g", ",", "b", ")", ":", "w", "=", "(", "-", "min", "(", "[", "0", ",", "r", ",", "g", ",", "b", "]", ")", ")", "if", "(", "w", ">", "0", ")", ":", "r", "+=", "w", "g", "+=", "w", "b", "+=", "w", "return", "(", "r", ",", "g", ",", "b", ")" ]
if the requested rgb shade contains a negative weight for one of the primaries .
train
false
55,508
def _parse_split_test_data_str(): tuple_class = collections.namedtuple('TestCase', 'input, keep, no_keep') for line in test_data_str.splitlines(): if (not line): continue data = line.split('/') item = tuple_class(input=data[0], keep=data[1].split('|'), no_keep=data[2].split('|')) (yield item) (yield tuple_class(input='', keep=[], no_keep=[]))
[ "def", "_parse_split_test_data_str", "(", ")", ":", "tuple_class", "=", "collections", ".", "namedtuple", "(", "'TestCase'", ",", "'input, keep, no_keep'", ")", "for", "line", "in", "test_data_str", ".", "splitlines", "(", ")", ":", "if", "(", "not", "line", ")", ":", "continue", "data", "=", "line", ".", "split", "(", "'/'", ")", "item", "=", "tuple_class", "(", "input", "=", "data", "[", "0", "]", ",", "keep", "=", "data", "[", "1", "]", ".", "split", "(", "'|'", ")", ",", "no_keep", "=", "data", "[", "2", "]", ".", "split", "(", "'|'", ")", ")", "(", "yield", "item", ")", "(", "yield", "tuple_class", "(", "input", "=", "''", ",", "keep", "=", "[", "]", ",", "no_keep", "=", "[", "]", ")", ")" ]
parse the test data set into a namedtuple to use in tests .
train
false
55,509
def is_field_shared_factory(field_name): class IsFieldShared(permissions.BasePermission, ): '\n Grants access if a particular profile field is shared with the requesting user.\n ' def has_permission(self, request, view): url_username = request.parser_context.get('kwargs', {}).get('username', '') if (request.user.username.lower() == url_username.lower()): return True if request.user.is_staff: return True user = get_object_or_404(User, username__iexact=url_username) if (field_name in visible_fields(user.profile, user)): return True raise Http404() return IsFieldShared
[ "def", "is_field_shared_factory", "(", "field_name", ")", ":", "class", "IsFieldShared", "(", "permissions", ".", "BasePermission", ",", ")", ":", "def", "has_permission", "(", "self", ",", "request", ",", "view", ")", ":", "url_username", "=", "request", ".", "parser_context", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ".", "get", "(", "'username'", ",", "''", ")", "if", "(", "request", ".", "user", ".", "username", ".", "lower", "(", ")", "==", "url_username", ".", "lower", "(", ")", ")", ":", "return", "True", "if", "request", ".", "user", ".", "is_staff", ":", "return", "True", "user", "=", "get_object_or_404", "(", "User", ",", "username__iexact", "=", "url_username", ")", "if", "(", "field_name", "in", "visible_fields", "(", "user", ".", "profile", ",", "user", ")", ")", ":", "return", "True", "raise", "Http404", "(", ")", "return", "IsFieldShared" ]
generates a permission class that grants access if a particular profile field is shared with the requesting user .
train
false
55,510
def fix_epoch(epoch): epoch = int(epoch) if (len(str(epoch)) <= 10): return epoch elif (len(str(epoch)) == 13): return int((epoch / 1000)) elif ((len(str(epoch)) > 10) and (len(str(epoch)) < 13)): raise ValueError('Unusually formatted epoch timestamp. Should be 10, 13, or more digits') else: orders_of_magnitude = (len(str(epoch)) - 10) powers_of_ten = (10 ** orders_of_magnitude) epoch = int((epoch / powers_of_ten)) return epoch
[ "def", "fix_epoch", "(", "epoch", ")", ":", "epoch", "=", "int", "(", "epoch", ")", "if", "(", "len", "(", "str", "(", "epoch", ")", ")", "<=", "10", ")", ":", "return", "epoch", "elif", "(", "len", "(", "str", "(", "epoch", ")", ")", "==", "13", ")", ":", "return", "int", "(", "(", "epoch", "/", "1000", ")", ")", "elif", "(", "(", "len", "(", "str", "(", "epoch", ")", ")", ">", "10", ")", "and", "(", "len", "(", "str", "(", "epoch", ")", ")", "<", "13", ")", ")", ":", "raise", "ValueError", "(", "'Unusually formatted epoch timestamp. Should be 10, 13, or more digits'", ")", "else", ":", "orders_of_magnitude", "=", "(", "len", "(", "str", "(", "epoch", ")", ")", "-", "10", ")", "powers_of_ten", "=", "(", "10", "**", "orders_of_magnitude", ")", "epoch", "=", "int", "(", "(", "epoch", "/", "powers_of_ten", ")", ")", "return", "epoch" ]
fix value of epoch to be epoch .
train
false
55,512
def make_user_coach(user, master_course_key): coach_role_on_master_course = CourseCcxCoachRole(master_course_key) coach_role_on_master_course.add_users(user)
[ "def", "make_user_coach", "(", "user", ",", "master_course_key", ")", ":", "coach_role_on_master_course", "=", "CourseCcxCoachRole", "(", "master_course_key", ")", "coach_role_on_master_course", ".", "add_users", "(", "user", ")" ]
makes an user coach on the master course .
train
false
55,513
def weights_uniform(nlags): return np.ones((nlags + 1))
[ "def", "weights_uniform", "(", "nlags", ")", ":", "return", "np", ".", "ones", "(", "(", "nlags", "+", "1", ")", ")" ]
uniform weights for hac this will be moved to another module parameters nlags : int highest lag in the kernel window .
train
false
55,514
def get_clonespec_for_valid_snapshot(config_spec, object_ref, reloc_spec, template, vm_): moving = True if (QUICK_LINKED_CLONE == vm_['snapshot']['disk_move_type']): reloc_spec.diskMoveType = QUICK_LINKED_CLONE elif (CURRENT_STATE_LINKED_CLONE == vm_['snapshot']['disk_move_type']): reloc_spec.diskMoveType = CURRENT_STATE_LINKED_CLONE elif (COPY_ALL_DISKS_FULL_CLONE == vm_['snapshot']['disk_move_type']): reloc_spec.diskMoveType = COPY_ALL_DISKS_FULL_CLONE elif (FLATTEN_DISK_FULL_CLONE == vm_['snapshot']['disk_move_type']): reloc_spec.diskMoveType = FLATTEN_DISK_FULL_CLONE else: moving = False if moving: return build_clonespec(config_spec, object_ref, reloc_spec, template) else: return None
[ "def", "get_clonespec_for_valid_snapshot", "(", "config_spec", ",", "object_ref", ",", "reloc_spec", ",", "template", ",", "vm_", ")", ":", "moving", "=", "True", "if", "(", "QUICK_LINKED_CLONE", "==", "vm_", "[", "'snapshot'", "]", "[", "'disk_move_type'", "]", ")", ":", "reloc_spec", ".", "diskMoveType", "=", "QUICK_LINKED_CLONE", "elif", "(", "CURRENT_STATE_LINKED_CLONE", "==", "vm_", "[", "'snapshot'", "]", "[", "'disk_move_type'", "]", ")", ":", "reloc_spec", ".", "diskMoveType", "=", "CURRENT_STATE_LINKED_CLONE", "elif", "(", "COPY_ALL_DISKS_FULL_CLONE", "==", "vm_", "[", "'snapshot'", "]", "[", "'disk_move_type'", "]", ")", ":", "reloc_spec", ".", "diskMoveType", "=", "COPY_ALL_DISKS_FULL_CLONE", "elif", "(", "FLATTEN_DISK_FULL_CLONE", "==", "vm_", "[", "'snapshot'", "]", "[", "'disk_move_type'", "]", ")", ":", "reloc_spec", ".", "diskMoveType", "=", "FLATTEN_DISK_FULL_CLONE", "else", ":", "moving", "=", "False", "if", "moving", ":", "return", "build_clonespec", "(", "config_spec", ",", "object_ref", ",", "reloc_spec", ",", "template", ")", "else", ":", "return", "None" ]
return clonespec only if values are valid .
train
true
55,515
def set_wake_on_network(enabled): state = salt.utils.mac_utils.validate_enabled(enabled) cmd = 'systemsetup -setwakeonnetworkaccess {0}'.format(state) salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated(state, get_wake_on_network)
[ "def", "set_wake_on_network", "(", "enabled", ")", ":", "state", "=", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "enabled", ")", "cmd", "=", "'systemsetup -setwakeonnetworkaccess {0}'", ".", "format", "(", "state", ")", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "confirm_updated", "(", "state", ",", "get_wake_on_network", ")" ]
set whether or not the computer will wake from sleep when network activity is detected .
train
true
55,516
def check_files(files, file_check, exclusions=set(), pattern=None): if (not files): return for fname in files: if ((not exists(fname)) or (not isfile(fname))): continue if any(((ex in fname) for ex in exclusions)): continue if ((pattern is None) or re.match(pattern, fname)): file_check(fname)
[ "def", "check_files", "(", "files", ",", "file_check", ",", "exclusions", "=", "set", "(", ")", ",", "pattern", "=", "None", ")", ":", "if", "(", "not", "files", ")", ":", "return", "for", "fname", "in", "files", ":", "if", "(", "(", "not", "exists", "(", "fname", ")", ")", "or", "(", "not", "isfile", "(", "fname", ")", ")", ")", ":", "continue", "if", "any", "(", "(", "(", "ex", "in", "fname", ")", "for", "ex", "in", "exclusions", ")", ")", ":", "continue", "if", "(", "(", "pattern", "is", "None", ")", "or", "re", ".", "match", "(", "pattern", ",", "fname", ")", ")", ":", "file_check", "(", "fname", ")" ]
checks all files with the file_check function provided .
train
false
55,517
def determine_format(request, serializer, default_format=u'application/json'): format = request.GET.get(u'format') if format: if (format in serializer.formats): return serializer.get_mime_for_format(format) if ((u'callback' in request.GET) and (u'jsonp' in serializer.formats)): return serializer.get_mime_for_format(u'jsonp') accept = request.META.get(u'HTTP_ACCEPT', u'*/*') if (accept != u'*/*'): try: best_format = mimeparse.best_match(serializer.supported_formats_reversed, accept) except ValueError: raise BadRequest(u'Invalid Accept header') if best_format: return best_format return default_format
[ "def", "determine_format", "(", "request", ",", "serializer", ",", "default_format", "=", "u'application/json'", ")", ":", "format", "=", "request", ".", "GET", ".", "get", "(", "u'format'", ")", "if", "format", ":", "if", "(", "format", "in", "serializer", ".", "formats", ")", ":", "return", "serializer", ".", "get_mime_for_format", "(", "format", ")", "if", "(", "(", "u'callback'", "in", "request", ".", "GET", ")", "and", "(", "u'jsonp'", "in", "serializer", ".", "formats", ")", ")", ":", "return", "serializer", ".", "get_mime_for_format", "(", "u'jsonp'", ")", "accept", "=", "request", ".", "META", ".", "get", "(", "u'HTTP_ACCEPT'", ",", "u'*/*'", ")", "if", "(", "accept", "!=", "u'*/*'", ")", ":", "try", ":", "best_format", "=", "mimeparse", ".", "best_match", "(", "serializer", ".", "supported_formats_reversed", ",", "accept", ")", "except", "ValueError", ":", "raise", "BadRequest", "(", "u'Invalid Accept header'", ")", "if", "best_format", ":", "return", "best_format", "return", "default_format" ]
tries to "smartly" determine which output format is desired .
train
false
55,519
def object_id(value): _object_id = '{}_{}_{}'.format(slugify(_value_name(value)), value.node.node_id, value.index) if (value.instance > 1): return '{}_{}'.format(_object_id, value.instance) return _object_id
[ "def", "object_id", "(", "value", ")", ":", "_object_id", "=", "'{}_{}_{}'", ".", "format", "(", "slugify", "(", "_value_name", "(", "value", ")", ")", ",", "value", ".", "node", ".", "node_id", ",", "value", ".", "index", ")", "if", "(", "value", ".", "instance", ">", "1", ")", ":", "return", "'{}_{}'", ".", "format", "(", "_object_id", ",", "value", ".", "instance", ")", "return", "_object_id" ]
return the object_id of the device value .
train
false
55,520
def get_pools(client): return [server.pool for server in client._get_topology().select_servers(any_server_selector)]
[ "def", "get_pools", "(", "client", ")", ":", "return", "[", "server", ".", "pool", "for", "server", "in", "client", ".", "_get_topology", "(", ")", ".", "select_servers", "(", "any_server_selector", ")", "]" ]
get all pools .
train
false
55,521
def get_cpu_list(ip, user, passwd): cmd = 'statcpu -iter 1 -t' showcpu_list = run_ssh_thread(ip, user, passwd, cmd) cpu_list = [] line_num = 0 for line in showcpu_list: line_num += 1 if (line_num >= 3): cpu_stats = line.split() if (len(cpu_stats) > 2): cpu_list.append(cpu_stats[0].split(',')[0]) return cpu_list
[ "def", "get_cpu_list", "(", "ip", ",", "user", ",", "passwd", ")", ":", "cmd", "=", "'statcpu -iter 1 -t'", "showcpu_list", "=", "run_ssh_thread", "(", "ip", ",", "user", ",", "passwd", ",", "cmd", ")", "cpu_list", "=", "[", "]", "line_num", "=", "0", "for", "line", "in", "showcpu_list", ":", "line_num", "+=", "1", "if", "(", "line_num", ">=", "3", ")", ":", "cpu_stats", "=", "line", ".", "split", "(", ")", "if", "(", "len", "(", "cpu_stats", ")", ">", "2", ")", ":", "cpu_list", ".", "append", "(", "cpu_stats", "[", "0", "]", ".", "split", "(", "','", ")", "[", "0", "]", ")", "return", "cpu_list" ]
get a list of cpus to build metric definitions with .
train
false
55,522
def test_append(hist, config_stub): config_stub.data = CONFIG_NOT_PRIVATE hist.append('new item') assert ('new item' in hist.history) hist.history.remove('new item') assert (hist.history == HISTORY)
[ "def", "test_append", "(", "hist", ",", "config_stub", ")", ":", "config_stub", ".", "data", "=", "CONFIG_NOT_PRIVATE", "hist", ".", "append", "(", "'new item'", ")", "assert", "(", "'new item'", "in", "hist", ".", "history", ")", "hist", ".", "history", ".", "remove", "(", "'new item'", ")", "assert", "(", "hist", ".", "history", "==", "HISTORY", ")" ]
test append outside private mode .
train
false
55,523
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a signature scheme object pkcs115_sigscheme that can create or verify pkcs#1 v1 .
train
false
55,525
def _collect_filetree_revs(obj_store, tree_sha, kset): filetree = obj_store[tree_sha] for (name, mode, sha) in filetree.iteritems(): if ((not S_ISGITLINK(mode)) and (sha not in kset)): kset.add(sha) if stat.S_ISDIR(mode): _collect_filetree_revs(obj_store, sha, kset)
[ "def", "_collect_filetree_revs", "(", "obj_store", ",", "tree_sha", ",", "kset", ")", ":", "filetree", "=", "obj_store", "[", "tree_sha", "]", "for", "(", "name", ",", "mode", ",", "sha", ")", "in", "filetree", ".", "iteritems", "(", ")", ":", "if", "(", "(", "not", "S_ISGITLINK", "(", "mode", ")", ")", "and", "(", "sha", "not", "in", "kset", ")", ")", ":", "kset", ".", "add", "(", "sha", ")", "if", "stat", ".", "S_ISDIR", "(", "mode", ")", ":", "_collect_filetree_revs", "(", "obj_store", ",", "sha", ",", "kset", ")" ]
collect sha1s of files and directories for specified tree .
train
false
55,527
def dirichlet_likelihood(weights, alpha=None): if (type(weights) is Variable): n_topics = weights.data.shape[1] else: n_topics = weights.W.data.shape[1] if (alpha is None): alpha = (1.0 / n_topics) if (type(weights) is Variable): log_proportions = F.log_softmax(weights) else: log_proportions = F.log_softmax(weights.W) loss = ((alpha - 1.0) * log_proportions) return (- F.sum(loss))
[ "def", "dirichlet_likelihood", "(", "weights", ",", "alpha", "=", "None", ")", ":", "if", "(", "type", "(", "weights", ")", "is", "Variable", ")", ":", "n_topics", "=", "weights", ".", "data", ".", "shape", "[", "1", "]", "else", ":", "n_topics", "=", "weights", ".", "W", ".", "data", ".", "shape", "[", "1", "]", "if", "(", "alpha", "is", "None", ")", ":", "alpha", "=", "(", "1.0", "/", "n_topics", ")", "if", "(", "type", "(", "weights", ")", "is", "Variable", ")", ":", "log_proportions", "=", "F", ".", "log_softmax", "(", "weights", ")", "else", ":", "log_proportions", "=", "F", ".", "log_softmax", "(", "weights", ".", "W", ")", "loss", "=", "(", "(", "alpha", "-", "1.0", ")", "*", "log_proportions", ")", "return", "(", "-", "F", ".", "sum", "(", "loss", ")", ")" ]
calculate the log likelihood of the observed topic proportions .
train
false
55,528
def _urlopen_cached(url, cache): from_cache = False if (cache is not None): cache_path = join(cache, (url.split('://')[(-1)].replace('/', ',') + '.zip')) try: data = _open_cache(cache_path) from_cache = True except: pass if (not from_cache): data = urlopen(url).read() if (cache is not None): _cache_it(data, cache_path) return (data, from_cache)
[ "def", "_urlopen_cached", "(", "url", ",", "cache", ")", ":", "from_cache", "=", "False", "if", "(", "cache", "is", "not", "None", ")", ":", "cache_path", "=", "join", "(", "cache", ",", "(", "url", ".", "split", "(", "'://'", ")", "[", "(", "-", "1", ")", "]", ".", "replace", "(", "'/'", ",", "','", ")", "+", "'.zip'", ")", ")", "try", ":", "data", "=", "_open_cache", "(", "cache_path", ")", "from_cache", "=", "True", "except", ":", "pass", "if", "(", "not", "from_cache", ")", ":", "data", "=", "urlopen", "(", "url", ")", ".", "read", "(", ")", "if", "(", "cache", "is", "not", "None", ")", ":", "_cache_it", "(", "data", ",", "cache_path", ")", "return", "(", "data", ",", "from_cache", ")" ]
tries to load data from cache location otherwise downloads it .
train
false
55,529
def interface_field(interfaces, **field_kwargs): if (not isinstance(interfaces, tuple)): raise TypeError('The ``interfaces`` argument must be a tuple. Got: {!r}'.format(interfaces)) original_invariant = field_kwargs.pop('invariant', None) def invariant(value): error_messages = [] if (original_invariant is not None): (original_invariant_result, _original_invariant_message) = original_invariant(value) if original_invariant_result: error_messages.append(original_invariant_result) missing_interfaces = [] for interface in interfaces: if (not interface.providedBy(value)): missing_interfaces.append(interface.getName()) if missing_interfaces: error_messages.append('The value {!r} did not provide these required interfaces: {}'.format(value, ', '.join(missing_interfaces))) if error_messages: return (False, '\n'.join(error_messages)) else: return (True, '') field_kwargs['invariant'] = invariant return field(**field_kwargs)
[ "def", "interface_field", "(", "interfaces", ",", "**", "field_kwargs", ")", ":", "if", "(", "not", "isinstance", "(", "interfaces", ",", "tuple", ")", ")", ":", "raise", "TypeError", "(", "'The ``interfaces`` argument must be a tuple. Got: {!r}'", ".", "format", "(", "interfaces", ")", ")", "original_invariant", "=", "field_kwargs", ".", "pop", "(", "'invariant'", ",", "None", ")", "def", "invariant", "(", "value", ")", ":", "error_messages", "=", "[", "]", "if", "(", "original_invariant", "is", "not", "None", ")", ":", "(", "original_invariant_result", ",", "_original_invariant_message", ")", "=", "original_invariant", "(", "value", ")", "if", "original_invariant_result", ":", "error_messages", ".", "append", "(", "original_invariant_result", ")", "missing_interfaces", "=", "[", "]", "for", "interface", "in", "interfaces", ":", "if", "(", "not", "interface", ".", "providedBy", "(", "value", ")", ")", ":", "missing_interfaces", ".", "append", "(", "interface", ".", "getName", "(", ")", ")", "if", "missing_interfaces", ":", "error_messages", ".", "append", "(", "'The value {!r} did not provide these required interfaces: {}'", ".", "format", "(", "value", ",", "', '", ".", "join", "(", "missing_interfaces", ")", ")", ")", "if", "error_messages", ":", "return", "(", "False", ",", "'\\n'", ".", "join", "(", "error_messages", ")", ")", "else", ":", "return", "(", "True", ",", "''", ")", "field_kwargs", "[", "'invariant'", "]", "=", "invariant", "return", "field", "(", "**", "field_kwargs", ")" ]
a pclass field which checks that the assigned value provides all the interfaces .
train
false
55,532
def _read_uint64(f): return np.uint64(struct.unpack('>Q', f.read(8))[0])
[ "def", "_read_uint64", "(", "f", ")", ":", "return", "np", ".", "uint64", "(", "struct", ".", "unpack", "(", "'>Q'", ",", "f", ".", "read", "(", "8", ")", ")", "[", "0", "]", ")" ]
read an unsigned 64-bit integer .
train
false
55,535
def self_test(): with tf.Session() as sess: print('Self-test for neural translation model.') model = seq2seq_model.Seq2SeqModel(10, 10, [(3, 3), (6, 6)], 32, 2, 5.0, 32, 0.3, 0.99, num_samples=8) sess.run(tf.global_variables_initializer()) data_set = ([([1, 1], [2, 2]), ([3, 3], [4]), ([5], [6])], [([1, 1, 1, 1, 1], [2, 2, 2, 2, 2]), ([3, 3, 3], [5, 6])]) for _ in xrange(5): bucket_id = random.choice([0, 1]) (encoder_inputs, decoder_inputs, target_weights) = model.get_batch(data_set, bucket_id) model.step(sess, encoder_inputs, decoder_inputs, target_weights, bucket_id, False)
[ "def", "self_test", "(", ")", ":", "with", "tf", ".", "Session", "(", ")", "as", "sess", ":", "print", "(", "'Self-test for neural translation model.'", ")", "model", "=", "seq2seq_model", ".", "Seq2SeqModel", "(", "10", ",", "10", ",", "[", "(", "3", ",", "3", ")", ",", "(", "6", ",", "6", ")", "]", ",", "32", ",", "2", ",", "5.0", ",", "32", ",", "0.3", ",", "0.99", ",", "num_samples", "=", "8", ")", "sess", ".", "run", "(", "tf", ".", "global_variables_initializer", "(", ")", ")", "data_set", "=", "(", "[", "(", "[", "1", ",", "1", "]", ",", "[", "2", ",", "2", "]", ")", ",", "(", "[", "3", ",", "3", "]", ",", "[", "4", "]", ")", ",", "(", "[", "5", "]", ",", "[", "6", "]", ")", "]", ",", "[", "(", "[", "1", ",", "1", ",", "1", ",", "1", ",", "1", "]", ",", "[", "2", ",", "2", ",", "2", ",", "2", ",", "2", "]", ")", ",", "(", "[", "3", ",", "3", ",", "3", "]", ",", "[", "5", ",", "6", "]", ")", "]", ")", "for", "_", "in", "xrange", "(", "5", ")", ":", "bucket_id", "=", "random", ".", "choice", "(", "[", "0", ",", "1", "]", ")", "(", "encoder_inputs", ",", "decoder_inputs", ",", "target_weights", ")", "=", "model", ".", "get_batch", "(", "data_set", ",", "bucket_id", ")", "model", ".", "step", "(", "sess", ",", "encoder_inputs", ",", "decoder_inputs", ",", "target_weights", ",", "bucket_id", ",", "False", ")" ]
test the translation model .
train
false
55,536
@treeio_login_required @handle_response_format def opportunity_view(request, opportunity_id, response_format='html'): profile = request.user.profile opportunity = get_object_or_404(Opportunity, pk=opportunity_id) if ((not profile.has_permission(opportunity)) and (not profile.is_admin('treeio.sales'))): return user_denied(request, message="You don't have access to this Opportunity") form = _do_update_record(profile, request, opportunity) return render_to_response('sales/opportunity_view', {'opportunity': opportunity, 'record_form': form}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "opportunity_view", "(", "request", ",", "opportunity_id", ",", "response_format", "=", "'html'", ")", ":", "profile", "=", "request", ".", "user", ".", "profile", "opportunity", "=", "get_object_or_404", "(", "Opportunity", ",", "pk", "=", "opportunity_id", ")", "if", "(", "(", "not", "profile", ".", "has_permission", "(", "opportunity", ")", ")", "and", "(", "not", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Opportunity\"", ")", "form", "=", "_do_update_record", "(", "profile", ",", "request", ",", "opportunity", ")", "return", "render_to_response", "(", "'sales/opportunity_view'", ",", "{", "'opportunity'", ":", "opportunity", ",", "'record_form'", ":", "form", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
opportunity view .
train
false