id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
677
def ensure_point(geom): ensure_geometry(geom) if (geom.geom_type != u'Point'): raise SpatialError((u"Provided geometry '%s' is not a 'Point'." % geom)) return geom
[ "def", "ensure_point", "(", "geom", ")", ":", "ensure_geometry", "(", "geom", ")", "if", "(", "geom", ".", "geom_type", "!=", "u'Point'", ")", ":", "raise", "SpatialError", "(", "(", "u\"Provided geometry '%s' is not a 'Point'.\"", "%", "geom", ")", ")", "return", "geom" ]
makes sure the parameter passed in looks like a geos point .
train
false
678
@positional(1) def get_package_for_module(module): if isinstance(module, basestring): try: module = sys.modules[module] except KeyError: return None try: return unicode(module.package) except AttributeError: if (module.__name__ == '__main__'): try: file_name = module.__file__ except AttributeError: pass else: base_name = os.path.basename(file_name) split_name = os.path.splitext(base_name) if (len(split_name) == 1): return unicode(base_name) else: return u'.'.join(split_name[:(-1)]) return unicode(module.__name__)
[ "@", "positional", "(", "1", ")", "def", "get_package_for_module", "(", "module", ")", ":", "if", "isinstance", "(", "module", ",", "basestring", ")", ":", "try", ":", "module", "=", "sys", ".", "modules", "[", "module", "]", "except", "KeyError", ":", "return", "None", "try", ":", "return", "unicode", "(", "module", ".", "package", ")", "except", "AttributeError", ":", "if", "(", "module", ".", "__name__", "==", "'__main__'", ")", ":", "try", ":", "file_name", "=", "module", ".", "__file__", "except", "AttributeError", ":", "pass", "else", ":", "base_name", "=", "os", ".", "path", ".", "basename", "(", "file_name", ")", "split_name", "=", "os", ".", "path", ".", "splitext", "(", "base_name", ")", "if", "(", "len", "(", "split_name", ")", "==", "1", ")", ":", "return", "unicode", "(", "base_name", ")", "else", ":", "return", "u'.'", ".", "join", "(", "split_name", "[", ":", "(", "-", "1", ")", "]", ")", "return", "unicode", "(", "module", ".", "__name__", ")" ]
get package name for a module .
train
false
679
def GetAllPosts(): posts = [{'content': str(row[1]), 'time': str(row[0])} for row in DB] posts.sort(key=(lambda row: row['time']), reverse=True) return posts
[ "def", "GetAllPosts", "(", ")", ":", "posts", "=", "[", "{", "'content'", ":", "str", "(", "row", "[", "1", "]", ")", ",", "'time'", ":", "str", "(", "row", "[", "0", "]", ")", "}", "for", "row", "in", "DB", "]", "posts", ".", "sort", "(", "key", "=", "(", "lambda", "row", ":", "row", "[", "'time'", "]", ")", ",", "reverse", "=", "True", ")", "return", "posts" ]
get all the posts from the database .
train
false
680
def split_stem(sentence): sentence = re.sub('([a-z])([A-Z])', u'\\1 \\2', sentence) return sentence.split()
[ "def", "split_stem", "(", "sentence", ")", ":", "sentence", "=", "re", ".", "sub", "(", "'([a-z])([A-Z])'", ",", "u'\\\\1 \\\\2'", ",", "sentence", ")", "return", "sentence", ".", "split", "(", ")" ]
splits camel cased sentence into words .
train
false
682
def cookie_app(environ, start_response): response = Response(environ.get('HTTP_COOKIE', 'No Cookie'), mimetype='text/plain') response.set_cookie('test', 'test') return response(environ, start_response)
[ "def", "cookie_app", "(", "environ", ",", "start_response", ")", ":", "response", "=", "Response", "(", "environ", ".", "get", "(", "'HTTP_COOKIE'", ",", "'No Cookie'", ")", ",", "mimetype", "=", "'text/plain'", ")", "response", ".", "set_cookie", "(", "'test'", ",", "'test'", ")", "return", "response", "(", "environ", ",", "start_response", ")" ]
a wsgi application which sets a cookie .
train
false
683
def team_status(): return s3_rest_controller()
[ "def", "team_status", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
team statuses .
train
false
684
def asciiHexDecode(stream): eod = '>' decodedStream = '' char = '' index = 0 while (index < len(stream)): c = stream[index] if (c == eod): if ((len(decodedStream) % 2) != 0): char += '0' try: decodedStream += chr(int(char, base=16)) except: return ((-1), 'Error in hexadecimal conversion') break elif c.isspace(): index += 1 continue char += c if (len(char) == 2): try: decodedStream += chr(int(char, base=16)) except: return ((-1), 'Error in hexadecimal conversion') char = '' index += 1 return (0, decodedStream)
[ "def", "asciiHexDecode", "(", "stream", ")", ":", "eod", "=", "'>'", "decodedStream", "=", "''", "char", "=", "''", "index", "=", "0", "while", "(", "index", "<", "len", "(", "stream", ")", ")", ":", "c", "=", "stream", "[", "index", "]", "if", "(", "c", "==", "eod", ")", ":", "if", "(", "(", "len", "(", "decodedStream", ")", "%", "2", ")", "!=", "0", ")", ":", "char", "+=", "'0'", "try", ":", "decodedStream", "+=", "chr", "(", "int", "(", "char", ",", "base", "=", "16", ")", ")", "except", ":", "return", "(", "(", "-", "1", ")", ",", "'Error in hexadecimal conversion'", ")", "break", "elif", "c", ".", "isspace", "(", ")", ":", "index", "+=", "1", "continue", "char", "+=", "c", "if", "(", "len", "(", "char", ")", "==", "2", ")", ":", "try", ":", "decodedStream", "+=", "chr", "(", "int", "(", "char", ",", "base", "=", "16", ")", ")", "except", ":", "return", "(", "(", "-", "1", ")", ",", "'Error in hexadecimal conversion'", ")", "char", "=", "''", "index", "+=", "1", "return", "(", "0", ",", "decodedStream", ")" ]
method to decode streams using hexadecimal encoding .
train
false
685
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize all qwebsettings .
train
false
686
def list_stringify(inlist): outlist = [] for item in inlist: if (not isinstance(item, (tuple, list))): if (not isinstance(item, basestring)): item = str(item) else: item = list_stringify(item) outlist.append(item) return outlist
[ "def", "list_stringify", "(", "inlist", ")", ":", "outlist", "=", "[", "]", "for", "item", "in", "inlist", ":", "if", "(", "not", "isinstance", "(", "item", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "if", "(", "not", "isinstance", "(", "item", ",", "basestring", ")", ")", ":", "item", "=", "str", "(", "item", ")", "else", ":", "item", "=", "list_stringify", "(", "item", ")", "outlist", ".", "append", "(", "item", ")", "return", "outlist" ]
recursively rebuilds a list - making sure all the members are strings .
train
false
688
def distro_release_info(): return _distro.distro_release_info()
[ "def", "distro_release_info", "(", ")", ":", "return", "_distro", ".", "distro_release_info", "(", ")" ]
return a dictionary containing key-value pairs for the information items from the distro release file data source of the current linux distribution .
train
false
689
def set_vif_host_backend_802qbh_config(conf, devname, profileid, tapname=None): conf.net_type = 'direct' conf.source_dev = devname conf.source_mode = 'vepa' conf.vporttype = '802.1Qbh' conf.add_vport_param('profileid', profileid) if tapname: conf.target_dev = tapname
[ "def", "set_vif_host_backend_802qbh_config", "(", "conf", ",", "devname", ",", "profileid", ",", "tapname", "=", "None", ")", ":", "conf", ".", "net_type", "=", "'direct'", "conf", ".", "source_dev", "=", "devname", "conf", ".", "source_mode", "=", "'vepa'", "conf", ".", "vporttype", "=", "'802.1Qbh'", "conf", ".", "add_vport_param", "(", "'profileid'", ",", "profileid", ")", "if", "tapname", ":", "conf", ".", "target_dev", "=", "tapname" ]
populate a libvirtconfigguestinterface instance with host backend details for an 802 .
train
false
690
@treeio_login_required @handle_response_format def source_add(request, response_format='html'): if (not request.user.profile.is_admin('treeio.sales')): return user_denied(request, message="You don't have administrator access to the Sales module") if request.POST: if ('cancel' not in request.POST): source = SaleSource() form = SaleSourceForm(request.user.profile, request.POST, instance=source) if form.is_valid(): source = form.save() source.set_user_from_request(request) return HttpResponseRedirect(reverse('sales_source_view', args=[source.id])) else: return HttpResponseRedirect(reverse('sales_settings_view')) else: form = SaleSourceForm(request.user.profile) all_products = Object.filter_by_request(request, Product.objects.filter(parent__isnull=True)) all_sources = Object.filter_by_request(request, SaleSource.objects) return render_to_response('sales/source_add', {'form': form, 'sources': all_sources, 'products': all_products}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "source_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have administrator access to the Sales module\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "source", "=", "SaleSource", "(", ")", "form", "=", "SaleSourceForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "source", ")", "if", "form", ".", "is_valid", "(", ")", ":", "source", "=", "form", ".", "save", "(", ")", "source", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_source_view'", ",", "args", "=", "[", "source", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_settings_view'", ")", ")", "else", ":", "form", "=", "SaleSourceForm", "(", "request", ".", "user", ".", "profile", ")", "all_products", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Product", ".", "objects", ".", "filter", "(", "parent__isnull", "=", "True", ")", ")", "all_sources", "=", "Object", ".", "filter_by_request", "(", "request", ",", "SaleSource", ".", "objects", ")", "return", "render_to_response", "(", "'sales/source_add'", ",", "{", "'form'", ":", "form", ",", "'sources'", ":", "all_sources", ",", "'products'", ":", "all_products", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
ticketstatus add .
train
false
691
def _single_spectrum_helper(x, mode, Fs=None, window=None, pad_to=None, sides=None): if ((mode is None) or (mode == u'psd') or (mode == u'default')): raise ValueError((u'_single_spectrum_helper does not work with %s mode' % mode)) if (pad_to is None): pad_to = len(x) (spec, freqs, _) = _spectral_helper(x=x, y=None, NFFT=len(x), Fs=Fs, detrend_func=detrend_none, window=window, noverlap=0, pad_to=pad_to, sides=sides, scale_by_freq=False, mode=mode) if (mode != u'complex'): spec = spec.real if ((spec.ndim == 2) and (spec.shape[1] == 1)): spec = spec[:, 0] return (spec, freqs)
[ "def", "_single_spectrum_helper", "(", "x", ",", "mode", ",", "Fs", "=", "None", ",", "window", "=", "None", ",", "pad_to", "=", "None", ",", "sides", "=", "None", ")", ":", "if", "(", "(", "mode", "is", "None", ")", "or", "(", "mode", "==", "u'psd'", ")", "or", "(", "mode", "==", "u'default'", ")", ")", ":", "raise", "ValueError", "(", "(", "u'_single_spectrum_helper does not work with %s mode'", "%", "mode", ")", ")", "if", "(", "pad_to", "is", "None", ")", ":", "pad_to", "=", "len", "(", "x", ")", "(", "spec", ",", "freqs", ",", "_", ")", "=", "_spectral_helper", "(", "x", "=", "x", ",", "y", "=", "None", ",", "NFFT", "=", "len", "(", "x", ")", ",", "Fs", "=", "Fs", ",", "detrend_func", "=", "detrend_none", ",", "window", "=", "window", ",", "noverlap", "=", "0", ",", "pad_to", "=", "pad_to", ",", "sides", "=", "sides", ",", "scale_by_freq", "=", "False", ",", "mode", "=", "mode", ")", "if", "(", "mode", "!=", "u'complex'", ")", ":", "spec", "=", "spec", ".", "real", "if", "(", "(", "spec", ".", "ndim", "==", "2", ")", "and", "(", "spec", ".", "shape", "[", "1", "]", "==", "1", ")", ")", ":", "spec", "=", "spec", "[", ":", ",", "0", "]", "return", "(", "spec", ",", "freqs", ")" ]
this is a helper function that implements the commonality between the complex .
train
false
692
def start(name, call=None): if (call != 'action'): raise SaltCloudSystemExit('The start action must be called with -a or --action.') data = show_instance(name, call='action') if (data.get('status') == 'active'): return {'success': True, 'action': 'start', 'status': 'active', 'msg': 'Machine is already running.'} ret = query(droplet_id=data['id'], command='actions', args={'type': 'power_on'}, http_method='post') return {'success': True, 'action': ret['action']['type'], 'state': ret['action']['status']}
[ "def", "start", "(", "name", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The start action must be called with -a or --action.'", ")", "data", "=", "show_instance", "(", "name", ",", "call", "=", "'action'", ")", "if", "(", "data", ".", "get", "(", "'status'", ")", "==", "'active'", ")", ":", "return", "{", "'success'", ":", "True", ",", "'action'", ":", "'start'", ",", "'status'", ":", "'active'", ",", "'msg'", ":", "'Machine is already running.'", "}", "ret", "=", "query", "(", "droplet_id", "=", "data", "[", "'id'", "]", ",", "command", "=", "'actions'", ",", "args", "=", "{", "'type'", ":", "'power_on'", "}", ",", "http_method", "=", "'post'", ")", "return", "{", "'success'", ":", "True", ",", "'action'", ":", "ret", "[", "'action'", "]", "[", "'type'", "]", ",", "'state'", ":", "ret", "[", "'action'", "]", "[", "'status'", "]", "}" ]
start a machine by name cli example: .
train
true
694
def django_to_webob_request(django_request): return DjangoWebobRequest(django_request)
[ "def", "django_to_webob_request", "(", "django_request", ")", ":", "return", "DjangoWebobRequest", "(", "django_request", ")" ]
returns a webob request to the django_request .
train
false
695
def end_recording(status, firepython_set_extension_data=None): if (firepython_set_extension_data is not None): warnings.warn('Firepython is no longer supported') rec = recorder_proxy.get_for_current_request() recorder_proxy.clear_for_current_request() if config.DEBUG: logging.debug('Cleared recorder') if (rec is not None): try: rec.record_http_status(status) rec.save() finally: memcache.delete(lock_key(), namespace=config.KEY_NAMESPACE)
[ "def", "end_recording", "(", "status", ",", "firepython_set_extension_data", "=", "None", ")", ":", "if", "(", "firepython_set_extension_data", "is", "not", "None", ")", ":", "warnings", ".", "warn", "(", "'Firepython is no longer supported'", ")", "rec", "=", "recorder_proxy", ".", "get_for_current_request", "(", ")", "recorder_proxy", ".", "clear_for_current_request", "(", ")", "if", "config", ".", "DEBUG", ":", "logging", ".", "debug", "(", "'Cleared recorder'", ")", "if", "(", "rec", "is", "not", "None", ")", ":", "try", ":", "rec", ".", "record_http_status", "(", "status", ")", "rec", ".", "save", "(", ")", "finally", ":", "memcache", ".", "delete", "(", "lock_key", "(", ")", ",", "namespace", "=", "config", ".", "KEY_NAMESPACE", ")" ]
stop recording rpc traces and save all traces to memcache .
train
false
696
def _getitem_from_frame(f_locals, key, default=None): try: return f_locals[key] except Exception: return default
[ "def", "_getitem_from_frame", "(", "f_locals", ",", "key", ",", "default", "=", "None", ")", ":", "try", ":", "return", "f_locals", "[", "key", "]", "except", "Exception", ":", "return", "default" ]
f_locals is not guaranteed to have .
train
false
698
def coerce_core(result, dshape, odo_kwargs=None): if iscoretype(result): return result elif isscalar(dshape): result = coerce_scalar(result, dshape, odo_kwargs=odo_kwargs) elif (istabular(dshape) and isrecord(dshape.measure)): result = into(DataFrame, result, **(odo_kwargs or {})) elif iscollection(dshape): dim = _dimensions(dshape) if (dim == 1): result = into(Series, result, **(odo_kwargs or {})) elif (dim > 1): result = into(np.ndarray, result, **(odo_kwargs or {})) else: msg = 'Expr with dshape dimensions < 1 should have been handled earlier: dim={}' raise ValueError(msg.format(str(dim))) else: msg = 'Expr does not evaluate to a core return type' raise ValueError(msg) return result
[ "def", "coerce_core", "(", "result", ",", "dshape", ",", "odo_kwargs", "=", "None", ")", ":", "if", "iscoretype", "(", "result", ")", ":", "return", "result", "elif", "isscalar", "(", "dshape", ")", ":", "result", "=", "coerce_scalar", "(", "result", ",", "dshape", ",", "odo_kwargs", "=", "odo_kwargs", ")", "elif", "(", "istabular", "(", "dshape", ")", "and", "isrecord", "(", "dshape", ".", "measure", ")", ")", ":", "result", "=", "into", "(", "DataFrame", ",", "result", ",", "**", "(", "odo_kwargs", "or", "{", "}", ")", ")", "elif", "iscollection", "(", "dshape", ")", ":", "dim", "=", "_dimensions", "(", "dshape", ")", "if", "(", "dim", "==", "1", ")", ":", "result", "=", "into", "(", "Series", ",", "result", ",", "**", "(", "odo_kwargs", "or", "{", "}", ")", ")", "elif", "(", "dim", ">", "1", ")", ":", "result", "=", "into", "(", "np", ".", "ndarray", ",", "result", ",", "**", "(", "odo_kwargs", "or", "{", "}", ")", ")", "else", ":", "msg", "=", "'Expr with dshape dimensions < 1 should have been handled earlier: dim={}'", "raise", "ValueError", "(", "msg", ".", "format", "(", "str", "(", "dim", ")", ")", ")", "else", ":", "msg", "=", "'Expr does not evaluate to a core return type'", "raise", "ValueError", "(", "msg", ")", "return", "result" ]
coerce data to a core data type .
train
false
700
def forget_sr(session, sr_ref): LOG.debug(_('Forgetting SR...')) unplug_pbds(session, sr_ref) session.call_xenapi('SR.forget', sr_ref)
[ "def", "forget_sr", "(", "session", ",", "sr_ref", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'Forgetting SR...'", ")", ")", "unplug_pbds", "(", "session", ",", "sr_ref", ")", "session", ".", "call_xenapi", "(", "'SR.forget'", ",", "sr_ref", ")" ]
forgets the storage repository without destroying the vdis within .
train
false
701
def _get_cycles(graph_dict, path, visited, result, vertice): if (vertice in path): cycle = [vertice] for node in path[::(-1)]: if (node == vertice): break cycle.insert(0, node) start_from = min(cycle) index = cycle.index(start_from) cycle = (cycle[index:] + cycle[0:index]) if (not (cycle in result)): result.append(cycle) return path.append(vertice) try: for node in graph_dict[vertice]: if (node not in visited): _get_cycles(graph_dict, path, visited, result, node) visited.add(node) except KeyError: pass path.pop()
[ "def", "_get_cycles", "(", "graph_dict", ",", "path", ",", "visited", ",", "result", ",", "vertice", ")", ":", "if", "(", "vertice", "in", "path", ")", ":", "cycle", "=", "[", "vertice", "]", "for", "node", "in", "path", "[", ":", ":", "(", "-", "1", ")", "]", ":", "if", "(", "node", "==", "vertice", ")", ":", "break", "cycle", ".", "insert", "(", "0", ",", "node", ")", "start_from", "=", "min", "(", "cycle", ")", "index", "=", "cycle", ".", "index", "(", "start_from", ")", "cycle", "=", "(", "cycle", "[", "index", ":", "]", "+", "cycle", "[", "0", ":", "index", "]", ")", "if", "(", "not", "(", "cycle", "in", "result", ")", ")", ":", "result", ".", "append", "(", "cycle", ")", "return", "path", ".", "append", "(", "vertice", ")", "try", ":", "for", "node", "in", "graph_dict", "[", "vertice", "]", ":", "if", "(", "node", "not", "in", "visited", ")", ":", "_get_cycles", "(", "graph_dict", ",", "path", ",", "visited", ",", "result", ",", "node", ")", "visited", ".", "add", "(", "node", ")", "except", "KeyError", ":", "pass", "path", ".", "pop", "(", ")" ]
recursive function doing the real work for get_cycles .
train
true
702
def get_new_otu_id(old_otu_id, tree, dissim): create_tip_index(tree) cache_tip_names(tree) node = tree._tip_index[old_otu_id] distance_up_tree = 0 while ((not node.isRoot()) and ((distance_up_tree + node.Length) < dissim)): distance_up_tree += node.Length node = node.Parent if node.isTip(): return node.Name else: return choice(node._tip_names)
[ "def", "get_new_otu_id", "(", "old_otu_id", ",", "tree", ",", "dissim", ")", ":", "create_tip_index", "(", "tree", ")", "cache_tip_names", "(", "tree", ")", "node", "=", "tree", ".", "_tip_index", "[", "old_otu_id", "]", "distance_up_tree", "=", "0", "while", "(", "(", "not", "node", ".", "isRoot", "(", ")", ")", "and", "(", "(", "distance_up_tree", "+", "node", ".", "Length", ")", "<", "dissim", ")", ")", ":", "distance_up_tree", "+=", "node", ".", "Length", "node", "=", "node", ".", "Parent", "if", "node", ".", "isTip", "(", ")", ":", "return", "node", ".", "Name", "else", ":", "return", "choice", "(", "node", ".", "_tip_names", ")" ]
simulates an otu switching to related one input a tipname .
train
false
703
def copy_config(server_root, temp_dir): (copied_files, copied_dirs) = ([], []) dir_len = len(os.path.dirname(server_root)) for (config_path, config_dirs, config_files) in os.walk(server_root): temp_path = os.path.join(temp_dir, config_path[(dir_len + 1):]) os.mkdir(temp_path) copied_all = True copied_files_in_current_dir = [] for config_file in config_files: config_file_path = os.path.join(config_path, config_file) temp_file_path = os.path.join(temp_path, config_file) if os.path.islink(config_file_path): os.symlink(os.readlink(config_file_path), temp_file_path) elif safe_config_file(config_file_path): copy_file_without_comments(config_file_path, temp_file_path) copied_files_in_current_dir.append(config_file_path) else: copied_all = False if (copied_all and (not config_dirs)): copied_dirs.append(config_path) else: copied_files += copied_files_in_current_dir return (copied_files, copied_dirs)
[ "def", "copy_config", "(", "server_root", ",", "temp_dir", ")", ":", "(", "copied_files", ",", "copied_dirs", ")", "=", "(", "[", "]", ",", "[", "]", ")", "dir_len", "=", "len", "(", "os", ".", "path", ".", "dirname", "(", "server_root", ")", ")", "for", "(", "config_path", ",", "config_dirs", ",", "config_files", ")", "in", "os", ".", "walk", "(", "server_root", ")", ":", "temp_path", "=", "os", ".", "path", ".", "join", "(", "temp_dir", ",", "config_path", "[", "(", "dir_len", "+", "1", ")", ":", "]", ")", "os", ".", "mkdir", "(", "temp_path", ")", "copied_all", "=", "True", "copied_files_in_current_dir", "=", "[", "]", "for", "config_file", "in", "config_files", ":", "config_file_path", "=", "os", ".", "path", ".", "join", "(", "config_path", ",", "config_file", ")", "temp_file_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "config_file", ")", "if", "os", ".", "path", ".", "islink", "(", "config_file_path", ")", ":", "os", ".", "symlink", "(", "os", ".", "readlink", "(", "config_file_path", ")", ",", "temp_file_path", ")", "elif", "safe_config_file", "(", "config_file_path", ")", ":", "copy_file_without_comments", "(", "config_file_path", ",", "temp_file_path", ")", "copied_files_in_current_dir", ".", "append", "(", "config_file_path", ")", "else", ":", "copied_all", "=", "False", "if", "(", "copied_all", "and", "(", "not", "config_dirs", ")", ")", ":", "copied_dirs", ".", "append", "(", "config_path", ")", "else", ":", "copied_files", "+=", "copied_files_in_current_dir", "return", "(", "copied_files", ",", "copied_dirs", ")" ]
safely copies server_root to temp_dir and returns copied files .
train
false
704
def convert_filename(value): if NORMALIZE_FILENAME: chunks = value.split(os.extsep) normalized = [] for v in chunks: v = unicodedata.normalize('NFKD', six.text_type(v)).encode('ascii', 'ignore').decode('ascii') v = re.sub('[^\\w\\s-]', '', v).strip() normalized.append(v) if (len(normalized) > 1): value = '.'.join(normalized) else: value = normalized[0] if CONVERT_FILENAME: value = value.replace(' ', '_').lower() return value
[ "def", "convert_filename", "(", "value", ")", ":", "if", "NORMALIZE_FILENAME", ":", "chunks", "=", "value", ".", "split", "(", "os", ".", "extsep", ")", "normalized", "=", "[", "]", "for", "v", "in", "chunks", ":", "v", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "six", ".", "text_type", "(", "v", ")", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ".", "decode", "(", "'ascii'", ")", "v", "=", "re", ".", "sub", "(", "'[^\\\\w\\\\s-]'", ",", "''", ",", "v", ")", ".", "strip", "(", ")", "normalized", ".", "append", "(", "v", ")", "if", "(", "len", "(", "normalized", ")", ">", "1", ")", ":", "value", "=", "'.'", ".", "join", "(", "normalized", ")", "else", ":", "value", "=", "normalized", "[", "0", "]", "if", "CONVERT_FILENAME", ":", "value", "=", "value", ".", "replace", "(", "' '", ",", "'_'", ")", ".", "lower", "(", ")", "return", "value" ]
convert filename .
train
false
705
def plugin_order(): p = {} for func in plugin_store: p[func] = [plugin_name for (plugin_name, f) in plugin_store[func]] return p
[ "def", "plugin_order", "(", ")", ":", "p", "=", "{", "}", "for", "func", "in", "plugin_store", ":", "p", "[", "func", "]", "=", "[", "plugin_name", "for", "(", "plugin_name", ",", "f", ")", "in", "plugin_store", "[", "func", "]", "]", "return", "p" ]
return the currently preferred plugin order .
train
false
706
def rehashPassword(password, hashParameters): try: (digestname, iterations, salt) = hashParameters.split(Delimiter) except ValueError: raise ValueError(u"Expected hash parameters string in format 'digestmod{0}iterations{0}salt".format(Delimiter)) if (digestname not in Hashes.keys()): raise ValueError(u"Unsupported hash algorithm '{0}' for hash parameters '{1}'.".format(digestname, hash)) iterations = int(iterations) salt = base64.b64decode(salt.encode(u'ascii')) password = password.encode(u'utf-8') return pbkdf2(password, salt, iterations, Hashes[digestname])
[ "def", "rehashPassword", "(", "password", ",", "hashParameters", ")", ":", "try", ":", "(", "digestname", ",", "iterations", ",", "salt", ")", "=", "hashParameters", ".", "split", "(", "Delimiter", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "u\"Expected hash parameters string in format 'digestmod{0}iterations{0}salt\"", ".", "format", "(", "Delimiter", ")", ")", "if", "(", "digestname", "not", "in", "Hashes", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "u\"Unsupported hash algorithm '{0}' for hash parameters '{1}'.\"", ".", "format", "(", "digestname", ",", "hash", ")", ")", "iterations", "=", "int", "(", "iterations", ")", "salt", "=", "base64", ".", "b64decode", "(", "salt", ".", "encode", "(", "u'ascii'", ")", ")", "password", "=", "password", ".", "encode", "(", "u'utf-8'", ")", "return", "pbkdf2", "(", "password", ",", "salt", ",", "iterations", ",", "Hashes", "[", "digestname", "]", ")" ]
module function to recreate a password hash given the hash parameters .
train
false
707
def unique_values(func): @wraps(func) def wrapper(*args, **kwargs): return unique_everseen(func(*args, **kwargs)) return wrapper
[ "def", "unique_values", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "unique_everseen", "(", "func", "(", "*", "args", ",", "**", "kwargs", ")", ")", "return", "wrapper" ]
wrap a function returning an iterable such that the resulting iterable only ever yields unique items .
train
true
708
def pages(post_objects, request): paginator = Paginator(post_objects, 20) try: current_page = int(request.GET.get('page', '1')) except ValueError: current_page = 1 page_range = page_list_return(len(paginator.page_range), current_page) try: page_objects = paginator.page(current_page) except (EmptyPage, InvalidPage): page_objects = paginator.page(paginator.num_pages) if (current_page >= 5): show_first = 1 else: show_first = 0 if (current_page <= (len(paginator.page_range) - 3)): show_end = 1 else: show_end = 0 return (post_objects, paginator, page_objects, page_range, current_page, show_first, show_end)
[ "def", "pages", "(", "post_objects", ",", "request", ")", ":", "paginator", "=", "Paginator", "(", "post_objects", ",", "20", ")", "try", ":", "current_page", "=", "int", "(", "request", ".", "GET", ".", "get", "(", "'page'", ",", "'1'", ")", ")", "except", "ValueError", ":", "current_page", "=", "1", "page_range", "=", "page_list_return", "(", "len", "(", "paginator", ".", "page_range", ")", ",", "current_page", ")", "try", ":", "page_objects", "=", "paginator", ".", "page", "(", "current_page", ")", "except", "(", "EmptyPage", ",", "InvalidPage", ")", ":", "page_objects", "=", "paginator", ".", "page", "(", "paginator", ".", "num_pages", ")", "if", "(", "current_page", ">=", "5", ")", ":", "show_first", "=", "1", "else", ":", "show_first", "=", "0", "if", "(", "current_page", "<=", "(", "len", "(", "paginator", ".", "page_range", ")", "-", "3", ")", ")", ":", "show_end", "=", "1", "else", ":", "show_end", "=", "0", "return", "(", "post_objects", ",", "paginator", ",", "page_objects", ",", "page_range", ",", "current_page", ",", "show_first", ",", "show_end", ")" ]
page public function .
train
false
709
def verbose_print(arg): if support.verbose: with _print_mutex: print arg
[ "def", "verbose_print", "(", "arg", ")", ":", "if", "support", ".", "verbose", ":", "with", "_print_mutex", ":", "print", "arg" ]
only prints s if verbose is true .
train
false
711
def exc_message(exc_info): exc = exc_info[1] if (exc is None): result = exc_info[0] else: try: result = str(exc) except UnicodeEncodeError: try: result = unicode(exc) except UnicodeError: result = exc.args[0] result = force_unicode(result, 'UTF-8') return xml_safe(result)
[ "def", "exc_message", "(", "exc_info", ")", ":", "exc", "=", "exc_info", "[", "1", "]", "if", "(", "exc", "is", "None", ")", ":", "result", "=", "exc_info", "[", "0", "]", "else", ":", "try", ":", "result", "=", "str", "(", "exc", ")", "except", "UnicodeEncodeError", ":", "try", ":", "result", "=", "unicode", "(", "exc", ")", "except", "UnicodeError", ":", "result", "=", "exc", ".", "args", "[", "0", "]", "result", "=", "force_unicode", "(", "result", ",", "'UTF-8'", ")", "return", "xml_safe", "(", "result", ")" ]
return the exceptions message .
train
true
712
def validate_target(func, *args, **kwargs): def inner(self, *args, **kwargs): target_id = None if (('target_id' in kwargs) and (kwargs['target_id'] != None)): target_id = kwargs['target_id'] else: target_id = 0 if (not self.target_is_valid(target_id)): raise NoSuchTargetException() return func(self, *args, **kwargs) return inner
[ "def", "validate_target", "(", "func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "def", "inner", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "target_id", "=", "None", "if", "(", "(", "'target_id'", "in", "kwargs", ")", "and", "(", "kwargs", "[", "'target_id'", "]", "!=", "None", ")", ")", ":", "target_id", "=", "kwargs", "[", "'target_id'", "]", "else", ":", "target_id", "=", "0", "if", "(", "not", "self", ".", "target_is_valid", "(", "target_id", ")", ")", ":", "raise", "NoSuchTargetException", "(", ")", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "inner" ]
a decorator that ensures that the specified target_id exists and is valid .
train
true
713
@handle_response_format @treeio_login_required @module_admin_required() def pagefolder_delete(request, folder_id, response_format='html'): folder = get_object_or_404(PageFolder, pk=folder_id) pages = Page.objects.filter(folder=folder).order_by('name') if request.POST: if ('delete' in request.POST): folder.delete() return HttpResponseRedirect(reverse('core_admin_index_pages')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('core_admin_pagefolder_view', args=[folder.id])) return render_to_response('core/administration/pagefolder_delete', {'folder': folder, 'pages': pages}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "pagefolder_delete", "(", "request", ",", "folder_id", ",", "response_format", "=", "'html'", ")", ":", "folder", "=", "get_object_or_404", "(", "PageFolder", ",", "pk", "=", "folder_id", ")", "pages", "=", "Page", ".", "objects", ".", "filter", "(", "folder", "=", "folder", ")", ".", "order_by", "(", "'name'", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "folder", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'core_admin_index_pages'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'core_admin_pagefolder_view'", ",", "args", "=", "[", "folder", ".", "id", "]", ")", ")", "return", "render_to_response", "(", "'core/administration/pagefolder_delete'", ",", "{", "'folder'", ":", "folder", ",", "'pages'", ":", "pages", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
folder for static pages delete .
train
false
714
def action_events_get(context, action_id): return IMPL.action_events_get(context, action_id)
[ "def", "action_events_get", "(", "context", ",", "action_id", ")", ":", "return", "IMPL", ".", "action_events_get", "(", "context", ",", "action_id", ")" ]
get the events by action id .
train
false
715
def test_double_start_finished(qtbot, proc, py_proc): with qtbot.waitSignals([proc.started, proc.finished], timeout=10000, order='strict'): argv = py_proc('import sys; sys.exit(0)') proc.start(*argv) with qtbot.waitSignals([proc.started, proc.finished], timeout=10000, order='strict'): argv = py_proc('import sys; sys.exit(0)') proc.start(*argv)
[ "def", "test_double_start_finished", "(", "qtbot", ",", "proc", ",", "py_proc", ")", ":", "with", "qtbot", ".", "waitSignals", "(", "[", "proc", ".", "started", ",", "proc", ".", "finished", "]", ",", "timeout", "=", "10000", ",", "order", "=", "'strict'", ")", ":", "argv", "=", "py_proc", "(", "'import sys; sys.exit(0)'", ")", "proc", ".", "start", "(", "*", "argv", ")", "with", "qtbot", ".", "waitSignals", "(", "[", "proc", ".", "started", ",", "proc", ".", "finished", "]", ",", "timeout", "=", "10000", ",", "order", "=", "'strict'", ")", ":", "argv", "=", "py_proc", "(", "'import sys; sys.exit(0)'", ")", "proc", ".", "start", "(", "*", "argv", ")" ]
test starting a guiprocess twice .
train
false
716
def StrJoin(delim, data): return delim.join((str(x) for x in data))
[ "def", "StrJoin", "(", "delim", ",", "data", ")", ":", "return", "delim", ".", "join", "(", "(", "str", "(", "x", ")", "for", "x", "in", "data", ")", ")" ]
string-ize & join data .
train
false
717
def _parse_date_columns(data_frame, parse_dates): if ((parse_dates is True) or (parse_dates is None) or (parse_dates is False)): parse_dates = [] if (not hasattr(parse_dates, '__iter__')): parse_dates = [parse_dates] for col_name in parse_dates: df_col = data_frame[col_name] try: fmt = parse_dates[col_name] except TypeError: fmt = None data_frame[col_name] = _handle_date_column(df_col, format=fmt) for (col_name, df_col) in data_frame.iteritems(): if is_datetime64tz_dtype(df_col): data_frame[col_name] = _handle_date_column(df_col) return data_frame
[ "def", "_parse_date_columns", "(", "data_frame", ",", "parse_dates", ")", ":", "if", "(", "(", "parse_dates", "is", "True", ")", "or", "(", "parse_dates", "is", "None", ")", "or", "(", "parse_dates", "is", "False", ")", ")", ":", "parse_dates", "=", "[", "]", "if", "(", "not", "hasattr", "(", "parse_dates", ",", "'__iter__'", ")", ")", ":", "parse_dates", "=", "[", "parse_dates", "]", "for", "col_name", "in", "parse_dates", ":", "df_col", "=", "data_frame", "[", "col_name", "]", "try", ":", "fmt", "=", "parse_dates", "[", "col_name", "]", "except", "TypeError", ":", "fmt", "=", "None", "data_frame", "[", "col_name", "]", "=", "_handle_date_column", "(", "df_col", ",", "format", "=", "fmt", ")", "for", "(", "col_name", ",", "df_col", ")", "in", "data_frame", ".", "iteritems", "(", ")", ":", "if", "is_datetime64tz_dtype", "(", "df_col", ")", ":", "data_frame", "[", "col_name", "]", "=", "_handle_date_column", "(", "df_col", ")", "return", "data_frame" ]
force non-datetime columns to be read as such .
train
false
718
def _ptransform(p): return ((-1.0) / (1.0 + (1.5 * _phi(((1.0 + p) / 2.0)))))
[ "def", "_ptransform", "(", "p", ")", ":", "return", "(", "(", "-", "1.0", ")", "/", "(", "1.0", "+", "(", "1.5", "*", "_phi", "(", "(", "(", "1.0", "+", "p", ")", "/", "2.0", ")", ")", ")", ")", ")" ]
function for p-value abcissa transformation .
train
false
720
def get_price_list(): rate = {} price_list = frappe.db.sql(u'select ip.item_code, ip.buying, ip.selling,\n DCTB DCTB concat(ifnull(cu.symbol,ip.currency), " ", round(ip.price_list_rate,2), " - ", ip.price_list) as price\n DCTB DCTB from `tabItem Price` ip, `tabPrice List` pl, `tabCurrency` cu\n DCTB DCTB where ip.price_list=pl.name and pl.currency=cu.name and pl.enabled=1', as_dict=1) for j in price_list: if j.price: rate.setdefault(j.item_code, {}).setdefault((u'Buying' if j.buying else u'Selling'), []).append(j.price) item_rate_map = {} for item in rate: for buying_or_selling in rate[item]: item_rate_map.setdefault(item, {}).setdefault(buying_or_selling, u', '.join(rate[item].get(buying_or_selling, []))) return item_rate_map
[ "def", "get_price_list", "(", ")", ":", "rate", "=", "{", "}", "price_list", "=", "frappe", ".", "db", ".", "sql", "(", "u'select ip.item_code, ip.buying, ip.selling,\\n DCTB DCTB concat(ifnull(cu.symbol,ip.currency), \" \", round(ip.price_list_rate,2), \" - \", ip.price_list) as price\\n DCTB DCTB from `tabItem Price` ip, `tabPrice List` pl, `tabCurrency` cu\\n DCTB DCTB where ip.price_list=pl.name and pl.currency=cu.name and pl.enabled=1'", ",", "as_dict", "=", "1", ")", "for", "j", "in", "price_list", ":", "if", "j", ".", "price", ":", "rate", ".", "setdefault", "(", "j", ".", "item_code", ",", "{", "}", ")", ".", "setdefault", "(", "(", "u'Buying'", "if", "j", ".", "buying", "else", "u'Selling'", ")", ",", "[", "]", ")", ".", "append", "(", "j", ".", "price", ")", "item_rate_map", "=", "{", "}", "for", "item", "in", "rate", ":", "for", "buying_or_selling", "in", "rate", "[", "item", "]", ":", "item_rate_map", ".", "setdefault", "(", "item", ",", "{", "}", ")", ".", "setdefault", "(", "buying_or_selling", ",", "u', '", ".", "join", "(", "rate", "[", "item", "]", ".", "get", "(", "buying_or_selling", ",", "[", "]", ")", ")", ")", "return", "item_rate_map" ]
get selling & buying price list of every item .
train
false
722
def matrix_tensor_product(*product): if isinstance(product[0], Matrix): return _sympy_tensor_product(*product) elif isinstance(product[0], numpy_ndarray): return _numpy_tensor_product(*product) elif isinstance(product[0], scipy_sparse_matrix): return _scipy_sparse_tensor_product(*product)
[ "def", "matrix_tensor_product", "(", "*", "product", ")", ":", "if", "isinstance", "(", "product", "[", "0", "]", ",", "Matrix", ")", ":", "return", "_sympy_tensor_product", "(", "*", "product", ")", "elif", "isinstance", "(", "product", "[", "0", "]", ",", "numpy_ndarray", ")", ":", "return", "_numpy_tensor_product", "(", "*", "product", ")", "elif", "isinstance", "(", "product", "[", "0", "]", ",", "scipy_sparse_matrix", ")", ":", "return", "_scipy_sparse_tensor_product", "(", "*", "product", ")" ]
compute the matrix tensor product of sympy/numpy/scipy .
train
false
723
@hug.post() def post_here(body): return body
[ "@", "hug", ".", "post", "(", ")", "def", "post_here", "(", "body", ")", ":", "return", "body" ]
this example shows how to read in post data w/ hug outside of its automatic param parsing .
train
false
724
def _add_metaclass(metaclass): def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if (slots is not None): if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper
[ "def", "_add_metaclass", "(", "metaclass", ")", ":", "def", "wrapper", "(", "cls", ")", ":", "orig_vars", "=", "cls", ".", "__dict__", ".", "copy", "(", ")", "slots", "=", "orig_vars", ".", "get", "(", "'__slots__'", ")", "if", "(", "slots", "is", "not", "None", ")", ":", "if", "isinstance", "(", "slots", ",", "str", ")", ":", "slots", "=", "[", "slots", "]", "for", "slots_var", "in", "slots", ":", "orig_vars", ".", "pop", "(", "slots_var", ")", "orig_vars", ".", "pop", "(", "'__dict__'", ",", "None", ")", "orig_vars", ".", "pop", "(", "'__weakref__'", ",", "None", ")", "return", "metaclass", "(", "cls", ".", "__name__", ",", "cls", ".", "__bases__", ",", "orig_vars", ")", "return", "wrapper" ]
class decorator for creating a class with a metaclass .
train
false
725
@handle_response_format @treeio_login_required def mlist_add(request, response_format='html'): user = request.user.profile if request.POST: mlist = MailingList() form = MailingListForm(user, request.POST, instance=mlist) if form.is_valid(): mlist = form.save() mlist.set_user_from_request(request) return HttpResponseRedirect('/messaging/') else: form = MailingListForm(user) context = _get_default_context(request) context.update({'form': form}) return render_to_response('messaging/mlist_add', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "mlist_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "user", "=", "request", ".", "user", ".", "profile", "if", "request", ".", "POST", ":", "mlist", "=", "MailingList", "(", ")", "form", "=", "MailingListForm", "(", "user", ",", "request", ".", "POST", ",", "instance", "=", "mlist", ")", "if", "form", ".", "is_valid", "(", ")", ":", "mlist", "=", "form", ".", "save", "(", ")", "mlist", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "'/messaging/'", ")", "else", ":", "form", "=", "MailingListForm", "(", "user", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", "}", ")", "return", "render_to_response", "(", "'messaging/mlist_add'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
new message mlist .
train
false
726
def Confirm(message): return bool((PresentDialog(message, [u'Ok', u'Cancel']) == 0))
[ "def", "Confirm", "(", "message", ")", ":", "return", "bool", "(", "(", "PresentDialog", "(", "message", ",", "[", "u'Ok'", ",", "u'Cancel'", "]", ")", "==", "0", ")", ")" ]
display |message| with ok/cancel operations .
train
false
727
def fileOpenDlg(tryFilePath='', tryFileName='', prompt=_translate('Select file to open'), allowed=None): global qtapp qtapp = ensureQtApp() if (allowed is None): allowed = 'All files (*.*);;PsychoPy Data (*.psydat);;txt (*.txt *.dlm *.csv);;pickled files (*.pickle *.pkl);;shelved files (*.shelf)' fdir = os.path.join(tryFilePath, tryFileName) filesToOpen = QtWidgets.QFileDialog.getOpenFileNames(parent=None, caption=prompt, directory=fdir, filter=allowed) if (type(filesToOpen) == tuple): filesToOpen = filesToOpen[0] filesToOpen = [unicode(fpath) for fpath in filesToOpen if os.path.exists(fpath)] if (len(filesToOpen) == 0): return None return filesToOpen
[ "def", "fileOpenDlg", "(", "tryFilePath", "=", "''", ",", "tryFileName", "=", "''", ",", "prompt", "=", "_translate", "(", "'Select file to open'", ")", ",", "allowed", "=", "None", ")", ":", "global", "qtapp", "qtapp", "=", "ensureQtApp", "(", ")", "if", "(", "allowed", "is", "None", ")", ":", "allowed", "=", "'All files (*.*);;PsychoPy Data (*.psydat);;txt (*.txt *.dlm *.csv);;pickled files (*.pickle *.pkl);;shelved files (*.shelf)'", "fdir", "=", "os", ".", "path", ".", "join", "(", "tryFilePath", ",", "tryFileName", ")", "filesToOpen", "=", "QtWidgets", ".", "QFileDialog", ".", "getOpenFileNames", "(", "parent", "=", "None", ",", "caption", "=", "prompt", ",", "directory", "=", "fdir", ",", "filter", "=", "allowed", ")", "if", "(", "type", "(", "filesToOpen", ")", "==", "tuple", ")", ":", "filesToOpen", "=", "filesToOpen", "[", "0", "]", "filesToOpen", "=", "[", "unicode", "(", "fpath", ")", "for", "fpath", "in", "filesToOpen", "if", "os", ".", "path", ".", "exists", "(", "fpath", ")", "]", "if", "(", "len", "(", "filesToOpen", ")", "==", "0", ")", ":", "return", "None", "return", "filesToOpen" ]
a simple dialogue allowing read access to the file system .
train
false
728
@task @timed def install_node_prereqs(): if no_prereq_install(): print NO_PREREQ_MESSAGE return prereq_cache('Node prereqs', ['package.json'], node_prereqs_installation)
[ "@", "task", "@", "timed", "def", "install_node_prereqs", "(", ")", ":", "if", "no_prereq_install", "(", ")", ":", "print", "NO_PREREQ_MESSAGE", "return", "prereq_cache", "(", "'Node prereqs'", ",", "[", "'package.json'", "]", ",", "node_prereqs_installation", ")" ]
installs node prerequisites .
train
false
729
def generate_task_families(task_class, n): ret = {} for i in range(n): class_name = '{}_{}'.format(task_class.task_family, i) ret[class_name] = type(class_name, (task_class,), {}) return ret
[ "def", "generate_task_families", "(", "task_class", ",", "n", ")", ":", "ret", "=", "{", "}", "for", "i", "in", "range", "(", "n", ")", ":", "class_name", "=", "'{}_{}'", ".", "format", "(", "task_class", ".", "task_family", ",", "i", ")", "ret", "[", "class_name", "]", "=", "type", "(", "class_name", ",", "(", "task_class", ",", ")", ",", "{", "}", ")", "return", "ret" ]
generate n copies of a task with different task_family names .
train
false
730
def _get_corrected_exif(image, original): if ('exif' in original.info): image_exif = image.info.get('exif', piexif.dump({})) original_exif = original.info['exif'] image_exif = _update_exif_orientation(image_exif, _get_exif_orientation(original_exif)) return image_exif
[ "def", "_get_corrected_exif", "(", "image", ",", "original", ")", ":", "if", "(", "'exif'", "in", "original", ".", "info", ")", ":", "image_exif", "=", "image", ".", "info", ".", "get", "(", "'exif'", ",", "piexif", ".", "dump", "(", "{", "}", ")", ")", "original_exif", "=", "original", ".", "info", "[", "'exif'", "]", "image_exif", "=", "_update_exif_orientation", "(", "image_exif", ",", "_get_exif_orientation", "(", "original_exif", ")", ")", "return", "image_exif" ]
if the original image contains exif data .
train
false
731
def save_to_disk(db, ids, root, opts=None, callback=None): (root, opts, length) = sanitize_args(root, opts) failures = [] for x in ids: tb = '' try: (failed, id, title) = save_book_to_disk(x, db, root, opts, length) tb = _('Requested formats not available') except: (failed, id, title) = (True, x, db.title(x, index_is_id=True)) tb = traceback.format_exc() if failed: failures.append((id, title, tb)) if callable(callback): if (not callback(int(id), title, failed, tb)): break return failures
[ "def", "save_to_disk", "(", "db", ",", "ids", ",", "root", ",", "opts", "=", "None", ",", "callback", "=", "None", ")", ":", "(", "root", ",", "opts", ",", "length", ")", "=", "sanitize_args", "(", "root", ",", "opts", ")", "failures", "=", "[", "]", "for", "x", "in", "ids", ":", "tb", "=", "''", "try", ":", "(", "failed", ",", "id", ",", "title", ")", "=", "save_book_to_disk", "(", "x", ",", "db", ",", "root", ",", "opts", ",", "length", ")", "tb", "=", "_", "(", "'Requested formats not available'", ")", "except", ":", "(", "failed", ",", "id", ",", "title", ")", "=", "(", "True", ",", "x", ",", "db", ".", "title", "(", "x", ",", "index_is_id", "=", "True", ")", ")", "tb", "=", "traceback", ".", "format_exc", "(", ")", "if", "failed", ":", "failures", ".", "append", "(", "(", "id", ",", "title", ",", "tb", ")", ")", "if", "callable", "(", "callback", ")", ":", "if", "(", "not", "callback", "(", "int", "(", "id", ")", ",", "title", ",", "failed", ",", "tb", ")", ")", ":", "break", "return", "failures" ]
save books from the database db to the path specified by root .
train
false
732
def crc_update(crc, data): if ((type(data) != array.array) or (data.itemsize != 1)): buf = array.array('B', data) else: buf = data crc = (crc ^ _MASK) for b in buf: table_index = ((crc ^ b) & 255) crc = ((CRC_TABLE[table_index] ^ (crc >> 8)) & _MASK) return (crc ^ _MASK)
[ "def", "crc_update", "(", "crc", ",", "data", ")", ":", "if", "(", "(", "type", "(", "data", ")", "!=", "array", ".", "array", ")", "or", "(", "data", ".", "itemsize", "!=", "1", ")", ")", ":", "buf", "=", "array", ".", "array", "(", "'B'", ",", "data", ")", "else", ":", "buf", "=", "data", "crc", "=", "(", "crc", "^", "_MASK", ")", "for", "b", "in", "buf", ":", "table_index", "=", "(", "(", "crc", "^", "b", ")", "&", "255", ")", "crc", "=", "(", "(", "CRC_TABLE", "[", "table_index", "]", "^", "(", "crc", ">>", "8", ")", ")", "&", "_MASK", ")", "return", "(", "crc", "^", "_MASK", ")" ]
update crc-32c checksum with data .
train
true
733
def asmodule(module): if isinstance(module, str): module = __import__(module, fromlist=[]) return module
[ "def", "asmodule", "(", "module", ")", ":", "if", "isinstance", "(", "module", ",", "str", ")", ":", "module", "=", "__import__", "(", "module", ",", "fromlist", "=", "[", "]", ")", "return", "module" ]
return the module references by module name .
train
false
734
def test_suffix(): transformer = hug.transform.suffix({'.js': int, '.txt': str}) class FakeRequest(object, ): path = 'hey.js' request = FakeRequest() assert (transformer('1', request) == 1) request.path = 'hey.txt' assert (transformer(2, request) == '2') request.path = 'hey.undefined' (transformer({'data': 'value'}, request) == {'data': 'value'})
[ "def", "test_suffix", "(", ")", ":", "transformer", "=", "hug", ".", "transform", ".", "suffix", "(", "{", "'.js'", ":", "int", ",", "'.txt'", ":", "str", "}", ")", "class", "FakeRequest", "(", "object", ",", ")", ":", "path", "=", "'hey.js'", "request", "=", "FakeRequest", "(", ")", "assert", "(", "transformer", "(", "'1'", ",", "request", ")", "==", "1", ")", "request", ".", "path", "=", "'hey.txt'", "assert", "(", "transformer", "(", "2", ",", "request", ")", "==", "'2'", ")", "request", ".", "path", "=", "'hey.undefined'", "(", "transformer", "(", "{", "'data'", ":", "'value'", "}", ",", "request", ")", "==", "{", "'data'", ":", "'value'", "}", ")" ]
ensure that its possible to route the output type format by the suffix of the requested url .
train
false
735
def transcribe(dna): if isinstance(dna, Seq): return dna.transcribe() elif isinstance(dna, MutableSeq): return dna.toseq().transcribe() else: return dna.replace('T', 'U').replace('t', 'u')
[ "def", "transcribe", "(", "dna", ")", ":", "if", "isinstance", "(", "dna", ",", "Seq", ")", ":", "return", "dna", ".", "transcribe", "(", ")", "elif", "isinstance", "(", "dna", ",", "MutableSeq", ")", ":", "return", "dna", ".", "toseq", "(", ")", ".", "transcribe", "(", ")", "else", ":", "return", "dna", ".", "replace", "(", "'T'", ",", "'U'", ")", ".", "replace", "(", "'t'", ",", "'u'", ")" ]
transcribes a dna sequence into rna .
train
false
736
def test_prewitt_zeros(): result = filters.prewitt(np.zeros((10, 10)), np.ones((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_prewitt_zeros", "(", ")", ":", "result", "=", "filters", ".", "prewitt", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose", "(", "result", ",", "0", ")" ]
prewitt on an array of all zeros .
train
false
737
def get_requests(name): return reduce((lambda memo, obj: (memo + get_rate(('%srequests_%s_count' % (NAME_PREFIX, obj))))), ['DELETE', 'GET', 'POST', 'PUT'], 0)
[ "def", "get_requests", "(", "name", ")", ":", "return", "reduce", "(", "(", "lambda", "memo", ",", "obj", ":", "(", "memo", "+", "get_rate", "(", "(", "'%srequests_%s_count'", "%", "(", "NAME_PREFIX", ",", "obj", ")", ")", ")", ")", ")", ",", "[", "'DELETE'", ",", "'GET'", ",", "'POST'", ",", "'PUT'", "]", ",", "0", ")" ]
return requests per second .
train
false
738
def get_latest_repository_metadata_if_it_includes_invalid_tools(trans, repository): repository_metadata = get_latest_repository_metadata(trans, repository) if (repository_metadata is not None): metadata = repository_metadata.metadata if ((metadata is not None) and ('invalid_tools' in metadata)): return repository_metadata return None
[ "def", "get_latest_repository_metadata_if_it_includes_invalid_tools", "(", "trans", ",", "repository", ")", ":", "repository_metadata", "=", "get_latest_repository_metadata", "(", "trans", ",", "repository", ")", "if", "(", "repository_metadata", "is", "not", "None", ")", ":", "metadata", "=", "repository_metadata", ".", "metadata", "if", "(", "(", "metadata", "is", "not", "None", ")", "and", "(", "'invalid_tools'", "in", "metadata", ")", ")", ":", "return", "repository_metadata", "return", "None" ]
return the latest repository_metadata record for the received repository that contains invalid tools if one exists .
train
false
740
def decode_int(v): if ((len(v) > 0) and ((v[0] == '\x00') or (v[0] == 0))): raise Exception('No leading zero bytes allowed for integers') return big_endian_to_int(v)
[ "def", "decode_int", "(", "v", ")", ":", "if", "(", "(", "len", "(", "v", ")", ">", "0", ")", "and", "(", "(", "v", "[", "0", "]", "==", "'\\x00'", ")", "or", "(", "v", "[", "0", "]", "==", "0", ")", ")", ")", ":", "raise", "Exception", "(", "'No leading zero bytes allowed for integers'", ")", "return", "big_endian_to_int", "(", "v", ")" ]
decode c{int} .
train
true
742
def detect_encoding(readline): bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return bytes() def find_cookie(line): try: line_string = line.decode('ascii') except UnicodeDecodeError: return None match = cookie_re.match(line_string) if (not match): return None encoding = _get_normal_name(match.group(1)) try: codec = lookup(encoding) except LookupError: raise SyntaxError(('unknown encoding: ' + encoding)) if bom_found: if (codec.name != 'utf-8'): raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if (not first): return (default, []) encoding = find_cookie(first) if encoding: return (encoding, [first]) second = read_or_stop() if (not second): return (default, [first]) encoding = find_cookie(second) if encoding: return (encoding, [first, second]) return (default, [first, second])
[ "def", "detect_encoding", "(", "readline", ")", ":", "bom_found", "=", "False", "encoding", "=", "None", "default", "=", "'utf-8'", "def", "read_or_stop", "(", ")", ":", "try", ":", "return", "readline", "(", ")", "except", "StopIteration", ":", "return", "bytes", "(", ")", "def", "find_cookie", "(", "line", ")", ":", "try", ":", "line_string", "=", "line", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "return", "None", "match", "=", "cookie_re", ".", "match", "(", "line_string", ")", "if", "(", "not", "match", ")", ":", "return", "None", "encoding", "=", "_get_normal_name", "(", "match", ".", "group", "(", "1", ")", ")", "try", ":", "codec", "=", "lookup", "(", "encoding", ")", "except", "LookupError", ":", "raise", "SyntaxError", "(", "(", "'unknown encoding: '", "+", "encoding", ")", ")", "if", "bom_found", ":", "if", "(", "codec", ".", "name", "!=", "'utf-8'", ")", ":", "raise", "SyntaxError", "(", "'encoding problem: utf-8'", ")", "encoding", "+=", "'-sig'", "return", "encoding", "first", "=", "read_or_stop", "(", ")", "if", "first", ".", "startswith", "(", "BOM_UTF8", ")", ":", "bom_found", "=", "True", "first", "=", "first", "[", "3", ":", "]", "default", "=", "'utf-8-sig'", "if", "(", "not", "first", ")", ":", "return", "(", "default", ",", "[", "]", ")", "encoding", "=", "find_cookie", "(", "first", ")", "if", "encoding", ":", "return", "(", "encoding", ",", "[", "first", "]", ")", "second", "=", "read_or_stop", "(", ")", "if", "(", "not", "second", ")", ":", "return", "(", "default", ",", "[", "first", "]", ")", "encoding", "=", "find_cookie", "(", "second", ")", "if", "encoding", ":", "return", "(", "encoding", ",", "[", "first", ",", "second", "]", ")", "return", "(", "default", ",", "[", "first", ",", "second", "]", ")" ]
return file encoding .
train
true
744
def inverse_dicts(*dicts): inverse = {} for dictionary in dicts: for (key, value) in dictionary.items(): if isinstance(value, Iterable): for item in value: add_pair_to_dict(item, key, inverse) else: add_pair_to_dict(value, key, inverse) return inverse
[ "def", "inverse_dicts", "(", "*", "dicts", ")", ":", "inverse", "=", "{", "}", "for", "dictionary", "in", "dicts", ":", "for", "(", "key", ",", "value", ")", "in", "dictionary", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "Iterable", ")", ":", "for", "item", "in", "value", ":", "add_pair_to_dict", "(", "item", ",", "key", ",", "inverse", ")", "else", ":", "add_pair_to_dict", "(", "value", ",", "key", ",", "inverse", ")", "return", "inverse" ]
inverts the dicts .
train
false
745
def is_datetimelike(data): try: maybe_to_datetimelike(data) return True except Exception: pass return False
[ "def", "is_datetimelike", "(", "data", ")", ":", "try", ":", "maybe_to_datetimelike", "(", "data", ")", "return", "True", "except", "Exception", ":", "pass", "return", "False" ]
return a boolean if we can be successfully converted to a datetimelike .
train
false
746
def delete_usage_plan(plan_id, region=None, key=None, keyid=None, profile=None): try: existing = describe_usage_plans(plan_id=plan_id, region=region, key=key, keyid=keyid, profile=profile) if ('error' in existing): return {'error': existing['error']} if (('plans' in existing) and existing['plans']): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) res = conn.delete_usage_plan(usagePlanId=plan_id) return {'deleted': True, 'usagePlanId': plan_id} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "delete_usage_plan", "(", "plan_id", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "existing", "=", "describe_usage_plans", "(", "plan_id", "=", "plan_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "'error'", "in", "existing", ")", ":", "return", "{", "'error'", ":", "existing", "[", "'error'", "]", "}", "if", "(", "(", "'plans'", "in", "existing", ")", "and", "existing", "[", "'plans'", "]", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "res", "=", "conn", ".", "delete_usage_plan", "(", "usagePlanId", "=", "plan_id", ")", "return", "{", "'deleted'", ":", "True", ",", "'usagePlanId'", ":", "plan_id", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
deletes usage plan identified by plan_id .
train
false
747
def add_qiime_labels(mapping_f, fasta_dir, filename_column, output_dir='.', count_start=0): (headers, mapping_data, run_description, errors, warnings) = process_id_map(mapping_f, has_barcodes=False, disable_primer_check=True, added_demultiplex_field=None, variable_len_barcodes=False) fasta_name_to_sample_id = check_mapping_data(mapping_data, headers, filename_column) fasta_files = get_fasta_fps(fasta_dir, fasta_name_to_sample_id.keys()) write_combined_fasta(fasta_name_to_sample_id, fasta_files, output_dir, counter=count_start)
[ "def", "add_qiime_labels", "(", "mapping_f", ",", "fasta_dir", ",", "filename_column", ",", "output_dir", "=", "'.'", ",", "count_start", "=", "0", ")", ":", "(", "headers", ",", "mapping_data", ",", "run_description", ",", "errors", ",", "warnings", ")", "=", "process_id_map", "(", "mapping_f", ",", "has_barcodes", "=", "False", ",", "disable_primer_check", "=", "True", ",", "added_demultiplex_field", "=", "None", ",", "variable_len_barcodes", "=", "False", ")", "fasta_name_to_sample_id", "=", "check_mapping_data", "(", "mapping_data", ",", "headers", ",", "filename_column", ")", "fasta_files", "=", "get_fasta_fps", "(", "fasta_dir", ",", "fasta_name_to_sample_id", ".", "keys", "(", ")", ")", "write_combined_fasta", "(", "fasta_name_to_sample_id", ",", "fasta_files", ",", "output_dir", ",", "counter", "=", "count_start", ")" ]
main function for combining fasta files .
train
false
748
def qflags_key(base, value, add_base=False, klass=None): if (klass is None): klass = value.__class__ if (klass == int): raise TypeError("Can't guess enum class of an int!") bits = [] names = [] mask = 1 value = int(value) while (mask <= value): if (value & mask): bits.append(mask) mask <<= 1 for bit in bits: names.append(qenum_key(base, klass(bit), add_base)) return '|'.join(names)
[ "def", "qflags_key", "(", "base", ",", "value", ",", "add_base", "=", "False", ",", "klass", "=", "None", ")", ":", "if", "(", "klass", "is", "None", ")", ":", "klass", "=", "value", ".", "__class__", "if", "(", "klass", "==", "int", ")", ":", "raise", "TypeError", "(", "\"Can't guess enum class of an int!\"", ")", "bits", "=", "[", "]", "names", "=", "[", "]", "mask", "=", "1", "value", "=", "int", "(", "value", ")", "while", "(", "mask", "<=", "value", ")", ":", "if", "(", "value", "&", "mask", ")", ":", "bits", ".", "append", "(", "mask", ")", "mask", "<<=", "1", "for", "bit", "in", "bits", ":", "names", ".", "append", "(", "qenum_key", "(", "base", ",", "klass", "(", "bit", ")", ",", "add_base", ")", ")", "return", "'|'", ".", "join", "(", "names", ")" ]
convert a qt qflags value to its keys as string .
train
false
750
def split_title_year(title): if (not title): return if (not re.search(u'\\d{4}', title)): return (title, None) match = re.search(u'(.*?)\\(?(\\d{4})?\\)?$', title) title = match.group(1).strip() if match.group(2): year = int(match.group(2)) else: year = None return (title, year)
[ "def", "split_title_year", "(", "title", ")", ":", "if", "(", "not", "title", ")", ":", "return", "if", "(", "not", "re", ".", "search", "(", "u'\\\\d{4}'", ",", "title", ")", ")", ":", "return", "(", "title", ",", "None", ")", "match", "=", "re", ".", "search", "(", "u'(.*?)\\\\(?(\\\\d{4})?\\\\)?$'", ",", "title", ")", "title", "=", "match", ".", "group", "(", "1", ")", ".", "strip", "(", ")", "if", "match", ".", "group", "(", "2", ")", ":", "year", "=", "int", "(", "match", ".", "group", "(", "2", ")", ")", "else", ":", "year", "=", "None", "return", "(", "title", ",", "year", ")" ]
splits title containing a year into a title .
train
false
751
def InstallManagementConsole(namespace, users={'admin': 'admin'}, port=503): from twisted.internet import reactor def build_protocol(): p = insults.ServerProtocol(manhole.ColoredManhole, namespace) return p r = manhole_ssh.TerminalRealm() r.chainedProtocolFactory = build_protocol c = checkers.InMemoryUsernamePasswordDatabaseDontUse(**users) p = portal.Portal(r, [c]) factory = manhole_ssh.ConchFactory(p) reactor.listenTCP(port, factory)
[ "def", "InstallManagementConsole", "(", "namespace", ",", "users", "=", "{", "'admin'", ":", "'admin'", "}", ",", "port", "=", "503", ")", ":", "from", "twisted", ".", "internet", "import", "reactor", "def", "build_protocol", "(", ")", ":", "p", "=", "insults", ".", "ServerProtocol", "(", "manhole", ".", "ColoredManhole", ",", "namespace", ")", "return", "p", "r", "=", "manhole_ssh", ".", "TerminalRealm", "(", ")", "r", ".", "chainedProtocolFactory", "=", "build_protocol", "c", "=", "checkers", ".", "InMemoryUsernamePasswordDatabaseDontUse", "(", "**", "users", ")", "p", "=", "portal", ".", "Portal", "(", "r", ",", "[", "c", "]", ")", "factory", "=", "manhole_ssh", ".", "ConchFactory", "(", "p", ")", "reactor", ".", "listenTCP", "(", "port", ",", "factory", ")" ]
helper method to start an ssh management console for the modbus server .
train
false
752
def ip_addrs6(interface=None, include_loopback=False): return salt.utils.network.ip_addrs6(interface=interface, include_loopback=include_loopback)
[ "def", "ip_addrs6", "(", "interface", "=", "None", ",", "include_loopback", "=", "False", ")", ":", "return", "salt", ".", "utils", ".", "network", ".", "ip_addrs6", "(", "interface", "=", "interface", ",", "include_loopback", "=", "include_loopback", ")" ]
returns a list of ipv6 addresses assigned to the host .
train
false
753
def _logged_get(log, *args, **kwargs): req_kwargs = kwargs send_kwargs = {} for arg in ('stream', 'verify', 'proxies', 'cert', 'timeout'): if (arg in kwargs): send_kwargs[arg] = req_kwargs.pop(arg) if ('message' in kwargs): message = kwargs.pop('message') else: message = 'getting URL' req = requests.Request('GET', *args, **req_kwargs) with requests.Session() as s: s.headers = {'User-Agent': 'beets'} prepped = s.prepare_request(req) log.debug('{}: {}', message, prepped.url) return s.send(prepped, **send_kwargs)
[ "def", "_logged_get", "(", "log", ",", "*", "args", ",", "**", "kwargs", ")", ":", "req_kwargs", "=", "kwargs", "send_kwargs", "=", "{", "}", "for", "arg", "in", "(", "'stream'", ",", "'verify'", ",", "'proxies'", ",", "'cert'", ",", "'timeout'", ")", ":", "if", "(", "arg", "in", "kwargs", ")", ":", "send_kwargs", "[", "arg", "]", "=", "req_kwargs", ".", "pop", "(", "arg", ")", "if", "(", "'message'", "in", "kwargs", ")", ":", "message", "=", "kwargs", ".", "pop", "(", "'message'", ")", "else", ":", "message", "=", "'getting URL'", "req", "=", "requests", ".", "Request", "(", "'GET'", ",", "*", "args", ",", "**", "req_kwargs", ")", "with", "requests", ".", "Session", "(", ")", "as", "s", ":", "s", ".", "headers", "=", "{", "'User-Agent'", ":", "'beets'", "}", "prepped", "=", "s", ".", "prepare_request", "(", "req", ")", "log", ".", "debug", "(", "'{}: {}'", ",", "message", ",", "prepped", ".", "url", ")", "return", "s", ".", "send", "(", "prepped", ",", "**", "send_kwargs", ")" ]
like requests .
train
false
756
def _in_gce_environment(): if (SETTINGS.env_name is not None): return (SETTINGS.env_name == 'GCE_PRODUCTION') if ((NO_GCE_CHECK != 'True') and _detect_gce_environment()): SETTINGS.env_name = 'GCE_PRODUCTION' return True return False
[ "def", "_in_gce_environment", "(", ")", ":", "if", "(", "SETTINGS", ".", "env_name", "is", "not", "None", ")", ":", "return", "(", "SETTINGS", ".", "env_name", "==", "'GCE_PRODUCTION'", ")", "if", "(", "(", "NO_GCE_CHECK", "!=", "'True'", ")", "and", "_detect_gce_environment", "(", ")", ")", ":", "SETTINGS", ".", "env_name", "=", "'GCE_PRODUCTION'", "return", "True", "return", "False" ]
detect if the code is running in the compute engine environment .
train
true
757
def rotatePoints(points, prefix, xmlElement): rotateMatrixTetragrid = matrix.getRotateMatrixTetragrid(prefix, xmlElement) if (rotateMatrixTetragrid == None): print 'Warning, rotateMatrixTetragrid was None in rotate so nothing will be done for:' print xmlElement return for point in points: matrix.transformVector3ByMatrix(rotateMatrixTetragrid, point)
[ "def", "rotatePoints", "(", "points", ",", "prefix", ",", "xmlElement", ")", ":", "rotateMatrixTetragrid", "=", "matrix", ".", "getRotateMatrixTetragrid", "(", "prefix", ",", "xmlElement", ")", "if", "(", "rotateMatrixTetragrid", "==", "None", ")", ":", "print", "'Warning, rotateMatrixTetragrid was None in rotate so nothing will be done for:'", "print", "xmlElement", "return", "for", "point", "in", "points", ":", "matrix", ".", "transformVector3ByMatrix", "(", "rotateMatrixTetragrid", ",", "point", ")" ]
rotate the points .
train
false
758
def RotL_64(x, N): return (np.left_shift(x, (N & 63), dtype=np.uint64) | np.right_shift(x, ((64 - N) & 63), dtype=np.uint64))
[ "def", "RotL_64", "(", "x", ",", "N", ")", ":", "return", "(", "np", ".", "left_shift", "(", "x", ",", "(", "N", "&", "63", ")", ",", "dtype", "=", "np", ".", "uint64", ")", "|", "np", ".", "right_shift", "(", "x", ",", "(", "(", "64", "-", "N", ")", "&", "63", ")", ",", "dtype", "=", "np", ".", "uint64", ")", ")" ]
return x rotated left by n .
train
false
759
def align_left(text, length, left_edge='|', right_edge='|', text_length=None, left_padding=2): if (text_length is None): text_length = get_text_length(text) computed_length = (((text_length + left_padding) + get_text_length(left_edge)) + get_text_length(right_edge)) if ((length - computed_length) >= 0): padding = left_padding else: padding = 0 output = [] length_so_far = 0 output.append(left_edge) length_so_far += len(left_edge) output.append((' ' * padding)) length_so_far += padding output.append(text) length_so_far += text_length output.append((' ' * ((length - length_so_far) - len(right_edge)))) output.append(right_edge) return ''.join(output)
[ "def", "align_left", "(", "text", ",", "length", ",", "left_edge", "=", "'|'", ",", "right_edge", "=", "'|'", ",", "text_length", "=", "None", ",", "left_padding", "=", "2", ")", ":", "if", "(", "text_length", "is", "None", ")", ":", "text_length", "=", "get_text_length", "(", "text", ")", "computed_length", "=", "(", "(", "(", "text_length", "+", "left_padding", ")", "+", "get_text_length", "(", "left_edge", ")", ")", "+", "get_text_length", "(", "right_edge", ")", ")", "if", "(", "(", "length", "-", "computed_length", ")", ">=", "0", ")", ":", "padding", "=", "left_padding", "else", ":", "padding", "=", "0", "output", "=", "[", "]", "length_so_far", "=", "0", "output", ".", "append", "(", "left_edge", ")", "length_so_far", "+=", "len", "(", "left_edge", ")", "output", ".", "append", "(", "(", "' '", "*", "padding", ")", ")", "length_so_far", "+=", "padding", "output", ".", "append", "(", "text", ")", "length_so_far", "+=", "text_length", "output", ".", "append", "(", "(", "' '", "*", "(", "(", "length", "-", "length_so_far", ")", "-", "len", "(", "right_edge", ")", ")", ")", ")", "output", ".", "append", "(", "right_edge", ")", "return", "''", ".", "join", "(", "output", ")" ]
left align text .
train
false
760
@docfiller def generic_filter1d(input, function, filter_size, axis=(-1), output=None, mode='reflect', cval=0.0, origin=0, extra_arguments=(), extra_keywords=None): if (extra_keywords is None): extra_keywords = {} input = numpy.asarray(input) if numpy.iscomplexobj(input): raise TypeError('Complex type not supported') (output, return_value) = _ni_support._get_output(output, input) if (filter_size < 1): raise RuntimeError('invalid filter size') axis = _ni_support._check_axis(axis, input.ndim) if ((((filter_size // 2) + origin) < 0) or (((filter_size // 2) + origin) >= filter_size)): raise ValueError('invalid origin') mode = _ni_support._extend_mode_to_code(mode) _nd_image.generic_filter1d(input, function, filter_size, axis, output, mode, cval, origin, extra_arguments, extra_keywords) return return_value
[ "@", "docfiller", "def", "generic_filter1d", "(", "input", ",", "function", ",", "filter_size", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ",", "origin", "=", "0", ",", "extra_arguments", "=", "(", ")", ",", "extra_keywords", "=", "None", ")", ":", "if", "(", "extra_keywords", "is", "None", ")", ":", "extra_keywords", "=", "{", "}", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "if", "numpy", ".", "iscomplexobj", "(", "input", ")", ":", "raise", "TypeError", "(", "'Complex type not supported'", ")", "(", "output", ",", "return_value", ")", "=", "_ni_support", ".", "_get_output", "(", "output", ",", "input", ")", "if", "(", "filter_size", "<", "1", ")", ":", "raise", "RuntimeError", "(", "'invalid filter size'", ")", "axis", "=", "_ni_support", ".", "_check_axis", "(", "axis", ",", "input", ".", "ndim", ")", "if", "(", "(", "(", "(", "filter_size", "//", "2", ")", "+", "origin", ")", "<", "0", ")", "or", "(", "(", "(", "filter_size", "//", "2", ")", "+", "origin", ")", ">=", "filter_size", ")", ")", ":", "raise", "ValueError", "(", "'invalid origin'", ")", "mode", "=", "_ni_support", ".", "_extend_mode_to_code", "(", "mode", ")", "_nd_image", ".", "generic_filter1d", "(", "input", ",", "function", ",", "filter_size", ",", "axis", ",", "output", ",", "mode", ",", "cval", ",", "origin", ",", "extra_arguments", ",", "extra_keywords", ")", "return", "return_value" ]
calculate a one-dimensional filter along the given axis .
train
false
761
def linkcheck(): os.system('sphinx-build -b linkcheck -d build/doctrees . build/linkcheck')
[ "def", "linkcheck", "(", ")", ":", "os", ".", "system", "(", "'sphinx-build -b linkcheck -d build/doctrees . build/linkcheck'", ")" ]
check if all links are corect .
train
false
763
def format_slice(key_val, dim): if isinstance(key_val, slice): key_val = slice(to_int(key_val.start), to_int(key_val.stop), to_int(key_val.step)) return key_val else: key_val = to_int(key_val) key_val = wrap_neg_index(key_val, dim) if (0 <= key_val < dim): return slice(key_val, (key_val + 1), 1) else: raise IndexError('Index/slice out of bounds.')
[ "def", "format_slice", "(", "key_val", ",", "dim", ")", ":", "if", "isinstance", "(", "key_val", ",", "slice", ")", ":", "key_val", "=", "slice", "(", "to_int", "(", "key_val", ".", "start", ")", ",", "to_int", "(", "key_val", ".", "stop", ")", ",", "to_int", "(", "key_val", ".", "step", ")", ")", "return", "key_val", "else", ":", "key_val", "=", "to_int", "(", "key_val", ")", "key_val", "=", "wrap_neg_index", "(", "key_val", ",", "dim", ")", "if", "(", "0", "<=", "key_val", "<", "dim", ")", ":", "return", "slice", "(", "key_val", ",", "(", "key_val", "+", "1", ")", ",", "1", ")", "else", ":", "raise", "IndexError", "(", "'Index/slice out of bounds.'", ")" ]
converts part of a key into a slice with a start and step .
train
false
764
def delocalize(string): conv = localeconv() ts = conv['thousands_sep'] if ts: string = string.replace(ts, '') dd = conv['decimal_point'] if dd: string = string.replace(dd, '.') return string
[ "def", "delocalize", "(", "string", ")", ":", "conv", "=", "localeconv", "(", ")", "ts", "=", "conv", "[", "'thousands_sep'", "]", "if", "ts", ":", "string", "=", "string", ".", "replace", "(", "ts", ",", "''", ")", "dd", "=", "conv", "[", "'decimal_point'", "]", "if", "dd", ":", "string", "=", "string", ".", "replace", "(", "dd", ",", "'.'", ")", "return", "string" ]
parses a string as a normalized number according to the locale settings .
train
false
765
def local_git_clone(repo_url): with lcd(LOGDIR): local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi') local(('git clone %s letsencrypt' % repo_url)) local('tar czf le.tar.gz letsencrypt')
[ "def", "local_git_clone", "(", "repo_url", ")", ":", "with", "lcd", "(", "LOGDIR", ")", ":", "local", "(", "'if [ -d letsencrypt ]; then rm -rf letsencrypt; fi'", ")", "local", "(", "(", "'git clone %s letsencrypt'", "%", "repo_url", ")", ")", "local", "(", "'tar czf le.tar.gz letsencrypt'", ")" ]
clones master of repo_url .
train
false
766
def oo_collect(data, attribute=None, filters=None): if (not isinstance(data, list)): raise errors.AnsibleFilterError('|failed expects to filter on a List') if (not attribute): raise errors.AnsibleFilterError('|failed expects attribute to be set') if (filters is not None): if (not isinstance(filters, dict)): raise errors.AnsibleFilterError('|failed expects filter to be a dict') retval = [get_attr(d, attribute) for d in data if all([(d.get(key, None) == filters[key]) for key in filters])] else: retval = [get_attr(d, attribute) for d in data] retval = [val for val in retval if (val is not None)] return retval
[ "def", "oo_collect", "(", "data", ",", "attribute", "=", "None", ",", "filters", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "data", ",", "list", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects to filter on a List'", ")", "if", "(", "not", "attribute", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects attribute to be set'", ")", "if", "(", "filters", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "filters", ",", "dict", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects filter to be a dict'", ")", "retval", "=", "[", "get_attr", "(", "d", ",", "attribute", ")", "for", "d", "in", "data", "if", "all", "(", "[", "(", "d", ".", "get", "(", "key", ",", "None", ")", "==", "filters", "[", "key", "]", ")", "for", "key", "in", "filters", "]", ")", "]", "else", ":", "retval", "=", "[", "get_attr", "(", "d", ",", "attribute", ")", "for", "d", "in", "data", "]", "retval", "=", "[", "val", "for", "val", "in", "retval", "if", "(", "val", "is", "not", "None", ")", "]", "return", "retval" ]
this takes a list of dict and collects all attributes specified into a list .
train
false
767
def prefixed_userid(request): authn_type = getattr(request, 'authn_type', None) if (authn_type is not None): return ((authn_type + ':') + request.selected_userid)
[ "def", "prefixed_userid", "(", "request", ")", ":", "authn_type", "=", "getattr", "(", "request", ",", "'authn_type'", ",", "None", ")", "if", "(", "authn_type", "is", "not", "None", ")", ":", "return", "(", "(", "authn_type", "+", "':'", ")", "+", "request", ".", "selected_userid", ")" ]
in kinto users ids are prefixed with the policy name that is contained in pyramid multiauth .
train
false
769
def function_simple(a, b, c): return (a, b, c)
[ "def", "function_simple", "(", "a", ",", "b", ",", "c", ")", ":", "return", "(", "a", ",", "b", ",", "c", ")" ]
a function which accepts several arguments .
train
false
770
def set_vm_state_and_notify(context, instance_uuid, service, method, updates, ex, request_spec): LOG.warning(_LW('Failed to %(service)s_%(method)s: %(ex)s'), {'service': service, 'method': method, 'ex': ex}) vm_state = updates['vm_state'] properties = request_spec.get('instance_properties', {}) notifier = rpc.get_notifier(service) state = vm_state.upper() LOG.warning(_LW('Setting instance to %s state.'), state, instance_uuid=instance_uuid) instance = objects.Instance(context=context, uuid=instance_uuid, **updates) instance.obj_reset_changes(['uuid']) instance.save() compute_utils.add_instance_fault_from_exc(context, instance, ex, sys.exc_info()) payload = dict(request_spec=request_spec, instance_properties=properties, instance_id=instance_uuid, state=vm_state, method=method, reason=ex) event_type = ('%s.%s' % (service, method)) notifier.error(context, event_type, payload)
[ "def", "set_vm_state_and_notify", "(", "context", ",", "instance_uuid", ",", "service", ",", "method", ",", "updates", ",", "ex", ",", "request_spec", ")", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Failed to %(service)s_%(method)s: %(ex)s'", ")", ",", "{", "'service'", ":", "service", ",", "'method'", ":", "method", ",", "'ex'", ":", "ex", "}", ")", "vm_state", "=", "updates", "[", "'vm_state'", "]", "properties", "=", "request_spec", ".", "get", "(", "'instance_properties'", ",", "{", "}", ")", "notifier", "=", "rpc", ".", "get_notifier", "(", "service", ")", "state", "=", "vm_state", ".", "upper", "(", ")", "LOG", ".", "warning", "(", "_LW", "(", "'Setting instance to %s state.'", ")", ",", "state", ",", "instance_uuid", "=", "instance_uuid", ")", "instance", "=", "objects", ".", "Instance", "(", "context", "=", "context", ",", "uuid", "=", "instance_uuid", ",", "**", "updates", ")", "instance", ".", "obj_reset_changes", "(", "[", "'uuid'", "]", ")", "instance", ".", "save", "(", ")", "compute_utils", ".", "add_instance_fault_from_exc", "(", "context", ",", "instance", ",", "ex", ",", "sys", ".", "exc_info", "(", ")", ")", "payload", "=", "dict", "(", "request_spec", "=", "request_spec", ",", "instance_properties", "=", "properties", ",", "instance_id", "=", "instance_uuid", ",", "state", "=", "vm_state", ",", "method", "=", "method", ",", "reason", "=", "ex", ")", "event_type", "=", "(", "'%s.%s'", "%", "(", "service", ",", "method", ")", ")", "notifier", ".", "error", "(", "context", ",", "event_type", ",", "payload", ")" ]
changes vm state and notifies .
train
false
771
def register_deep_copy_op_c_code(typ, code, version=()): DeepCopyOp.c_code_and_version[typ] = (code, version)
[ "def", "register_deep_copy_op_c_code", "(", "typ", ",", "code", ",", "version", "=", "(", ")", ")", ":", "DeepCopyOp", ".", "c_code_and_version", "[", "typ", "]", "=", "(", "code", ",", "version", ")" ]
tell deepcopyop how to generate c code for a theano type .
train
false
772
def _length_hint(obj): try: return len(obj) except (AttributeError, TypeError): try: get_hint = type(obj).__length_hint__ except AttributeError: return None try: hint = get_hint(obj) except TypeError: return None if ((hint is NotImplemented) or (not isinstance(hint, int_types)) or (hint < 0)): return None return hint
[ "def", "_length_hint", "(", "obj", ")", ":", "try", ":", "return", "len", "(", "obj", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "try", ":", "get_hint", "=", "type", "(", "obj", ")", ".", "__length_hint__", "except", "AttributeError", ":", "return", "None", "try", ":", "hint", "=", "get_hint", "(", "obj", ")", "except", "TypeError", ":", "return", "None", "if", "(", "(", "hint", "is", "NotImplemented", ")", "or", "(", "not", "isinstance", "(", "hint", ",", "int_types", ")", ")", "or", "(", "hint", "<", "0", ")", ")", ":", "return", "None", "return", "hint" ]
returns the length hint of an object .
train
true
773
def policy_version_exists(policy_name, version_id, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) policy_arn = _get_policy_arn(policy_name, region, key, keyid, profile) try: conn.get_policy_version(policy_arn, version_id) return True except boto.exception.BotoServerError: return False
[ "def", "policy_version_exists", "(", "policy_name", ",", "version_id", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "policy_arn", "=", "_get_policy_arn", "(", "policy_name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "try", ":", "conn", ".", "get_policy_version", "(", "policy_arn", ",", "version_id", ")", "return", "True", "except", "boto", ".", "exception", ".", "BotoServerError", ":", "return", "False" ]
given a policy name and version id .
train
true
774
def test_cnot_commutators(): assert (Commutator(CNOT(0, 1), Z(0)).doit() == 0) assert (Commutator(CNOT(0, 1), T(0)).doit() == 0) assert (Commutator(CNOT(0, 1), S(0)).doit() == 0) assert (Commutator(CNOT(0, 1), X(1)).doit() == 0) assert (Commutator(CNOT(0, 1), CNOT(0, 1)).doit() == 0) assert (Commutator(CNOT(0, 1), CNOT(0, 2)).doit() == 0) assert (Commutator(CNOT(0, 2), CNOT(0, 1)).doit() == 0) assert (Commutator(CNOT(1, 2), CNOT(1, 0)).doit() == 0)
[ "def", "test_cnot_commutators", "(", ")", ":", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "Z", "(", "0", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "T", "(", "0", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "S", "(", "0", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "X", "(", "1", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "CNOT", "(", "0", ",", "1", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "1", ")", ",", "CNOT", "(", "0", ",", "2", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "0", ",", "2", ")", ",", "CNOT", "(", "0", ",", "1", ")", ")", ".", "doit", "(", ")", "==", "0", ")", "assert", "(", "Commutator", "(", "CNOT", "(", "1", ",", "2", ")", ",", "CNOT", "(", "1", ",", "0", ")", ")", ".", "doit", "(", ")", "==", "0", ")" ]
test commutators of involving cnot gates .
train
false
775
def check_header_match_180_or_later(header1, header2): header1 = header1.split(':') header2 = header2.split(':') for (e1, e2) in zip(header1, header2): if (e1.split(' ')[0] != e2.split(' ')[0]): return False return True
[ "def", "check_header_match_180_or_later", "(", "header1", ",", "header2", ")", ":", "header1", "=", "header1", ".", "split", "(", "':'", ")", "header2", "=", "header2", ".", "split", "(", "':'", ")", "for", "(", "e1", ",", "e2", ")", "in", "zip", "(", "header1", ",", "header2", ")", ":", "if", "(", "e1", ".", "split", "(", "' '", ")", "[", "0", "]", "!=", "e2", ".", "split", "(", "' '", ")", "[", "0", "]", ")", ":", "return", "False", "return", "True" ]
confirm headers are compatible in casava 1 .
train
false
777
@register.tag def get_unread_message_count_for(parser, token): try: (tag_name, arg) = token.contents.split(None, 1) except ValueError: raise template.TemplateSyntaxError(('%s tag requires arguments' % token.contents.split()[0])) m = re.search('(.*?) as (\\w+)', arg) if (not m): raise template.TemplateSyntaxError(('%s tag had invalid arguments' % tag_name)) (user, var_name) = m.groups() return MessageCount(user, var_name)
[ "@", "register", ".", "tag", "def", "get_unread_message_count_for", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "tag_name", ",", "arg", ")", "=", "token", ".", "contents", ".", "split", "(", "None", ",", "1", ")", "except", "ValueError", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'%s tag requires arguments'", "%", "token", ".", "contents", ".", "split", "(", ")", "[", "0", "]", ")", ")", "m", "=", "re", ".", "search", "(", "'(.*?) as (\\\\w+)'", ",", "arg", ")", "if", "(", "not", "m", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "'%s tag had invalid arguments'", "%", "tag_name", ")", ")", "(", "user", ",", "var_name", ")", "=", "m", ".", "groups", "(", ")", "return", "MessageCount", "(", "user", ",", "var_name", ")" ]
returns the unread message count for a user .
train
true
778
def grubbs(timeseries): series = scipy.array([x[1] for x in timeseries]) stdDev = scipy.std(series) mean = np.mean(series) tail_average = tail_avg(timeseries) z_score = ((tail_average - mean) / stdDev) len_series = len(series) threshold = scipy.stats.t.isf((0.05 / (2 * len_series)), (len_series - 2)) threshold_squared = (threshold * threshold) grubbs_score = (((len_series - 1) / np.sqrt(len_series)) * np.sqrt((threshold_squared / ((len_series - 2) + threshold_squared)))) return (z_score > grubbs_score)
[ "def", "grubbs", "(", "timeseries", ")", ":", "series", "=", "scipy", ".", "array", "(", "[", "x", "[", "1", "]", "for", "x", "in", "timeseries", "]", ")", "stdDev", "=", "scipy", ".", "std", "(", "series", ")", "mean", "=", "np", ".", "mean", "(", "series", ")", "tail_average", "=", "tail_avg", "(", "timeseries", ")", "z_score", "=", "(", "(", "tail_average", "-", "mean", ")", "/", "stdDev", ")", "len_series", "=", "len", "(", "series", ")", "threshold", "=", "scipy", ".", "stats", ".", "t", ".", "isf", "(", "(", "0.05", "/", "(", "2", "*", "len_series", ")", ")", ",", "(", "len_series", "-", "2", ")", ")", "threshold_squared", "=", "(", "threshold", "*", "threshold", ")", "grubbs_score", "=", "(", "(", "(", "len_series", "-", "1", ")", "/", "np", ".", "sqrt", "(", "len_series", ")", ")", "*", "np", ".", "sqrt", "(", "(", "threshold_squared", "/", "(", "(", "len_series", "-", "2", ")", "+", "threshold_squared", ")", ")", ")", ")", "return", "(", "z_score", ">", "grubbs_score", ")" ]
a timeseries is anomalous if the z score is greater than the grubbs score .
train
false
779
def headers_url_generator(resp, fuzzable_req): resp_headers = resp.get_headers() for (parser, header_names) in URL_HEADERS.iteritems(): for header_name in header_names: (header_value, _) = resp_headers.iget(header_name, None) if (header_value is not None): header_value = smart_unicode(header_value, encoding=resp.charset) for ref in parser(resp, header_name, header_value): (yield (ref, fuzzable_req, resp, False))
[ "def", "headers_url_generator", "(", "resp", ",", "fuzzable_req", ")", ":", "resp_headers", "=", "resp", ".", "get_headers", "(", ")", "for", "(", "parser", ",", "header_names", ")", "in", "URL_HEADERS", ".", "iteritems", "(", ")", ":", "for", "header_name", "in", "header_names", ":", "(", "header_value", ",", "_", ")", "=", "resp_headers", ".", "iget", "(", "header_name", ",", "None", ")", "if", "(", "header_value", "is", "not", "None", ")", ":", "header_value", "=", "smart_unicode", "(", "header_value", ",", "encoding", "=", "resp", ".", "charset", ")", "for", "ref", "in", "parser", "(", "resp", ",", "header_name", ",", "header_value", ")", ":", "(", "yield", "(", "ref", ",", "fuzzable_req", ",", "resp", ",", "False", ")", ")" ]
yields tuples containing: * newly found url * the fuzzablerequest instance passed as parameter * the httpresponse generated by the fuzzablerequest * boolean indicating if we trust this reference or not the newly found urls are extracted from the http response headers such as "location" .
train
false
780
def ttest_ind_from_stats(mean1, std1, nobs1, mean2, std2, nobs2, equal_var=True): if equal_var: (df, denom) = _equal_var_ttest_denom((std1 ** 2), nobs1, (std2 ** 2), nobs2) else: (df, denom) = _unequal_var_ttest_denom((std1 ** 2), nobs1, (std2 ** 2), nobs2) res = _ttest_ind_from_stats(mean1, mean2, denom, df) return Ttest_indResult(*res)
[ "def", "ttest_ind_from_stats", "(", "mean1", ",", "std1", ",", "nobs1", ",", "mean2", ",", "std2", ",", "nobs2", ",", "equal_var", "=", "True", ")", ":", "if", "equal_var", ":", "(", "df", ",", "denom", ")", "=", "_equal_var_ttest_denom", "(", "(", "std1", "**", "2", ")", ",", "nobs1", ",", "(", "std2", "**", "2", ")", ",", "nobs2", ")", "else", ":", "(", "df", ",", "denom", ")", "=", "_unequal_var_ttest_denom", "(", "(", "std1", "**", "2", ")", ",", "nobs1", ",", "(", "std2", "**", "2", ")", ",", "nobs2", ")", "res", "=", "_ttest_ind_from_stats", "(", "mean1", ",", "mean2", ",", "denom", ",", "df", ")", "return", "Ttest_indResult", "(", "*", "res", ")" ]
t-test for means of two independent samples from descriptive statistics .
train
false
781
def dump_js_escaped_json(obj, cls=EdxJSONEncoder): json_string = json.dumps(obj, ensure_ascii=True, cls=cls) json_string = _escape_json_for_js(json_string) return json_string
[ "def", "dump_js_escaped_json", "(", "obj", ",", "cls", "=", "EdxJSONEncoder", ")", ":", "json_string", "=", "json", ".", "dumps", "(", "obj", ",", "ensure_ascii", "=", "True", ",", "cls", "=", "cls", ")", "json_string", "=", "_escape_json_for_js", "(", "json_string", ")", "return", "json_string" ]
json dumps and escapes objects that are safe to be embedded in javascript .
train
false
782
@receiver(post_migrate) def sync_create_groups(sender, **kwargs): if (sender.label == u'accounts'): create_groups(False)
[ "@", "receiver", "(", "post_migrate", ")", "def", "sync_create_groups", "(", "sender", ",", "**", "kwargs", ")", ":", "if", "(", "sender", ".", "label", "==", "u'accounts'", ")", ":", "create_groups", "(", "False", ")" ]
create groups on syncdb .
train
false
783
def _make_logpt(global_RVs, local_RVs, observed_RVs, potentials): factors = ((([(c * v.logpt) for (v, c) in observed_RVs.items()] + [(c * v.logpt) for (v, c) in global_RVs.items()]) + [(c * v.logpt) for (v, (_, c)) in local_RVs.items()]) + potentials) logpt = tt.add(*map(tt.sum, factors)) return logpt
[ "def", "_make_logpt", "(", "global_RVs", ",", "local_RVs", ",", "observed_RVs", ",", "potentials", ")", ":", "factors", "=", "(", "(", "(", "[", "(", "c", "*", "v", ".", "logpt", ")", "for", "(", "v", ",", "c", ")", "in", "observed_RVs", ".", "items", "(", ")", "]", "+", "[", "(", "c", "*", "v", ".", "logpt", ")", "for", "(", "v", ",", "c", ")", "in", "global_RVs", ".", "items", "(", ")", "]", ")", "+", "[", "(", "c", "*", "v", ".", "logpt", ")", "for", "(", "v", ",", "(", "_", ",", "c", ")", ")", "in", "local_RVs", ".", "items", "(", ")", "]", ")", "+", "potentials", ")", "logpt", "=", "tt", ".", "add", "(", "*", "map", "(", "tt", ".", "sum", ",", "factors", ")", ")", "return", "logpt" ]
return expression of log probability .
train
false
784
def generate_prototypes_from_src(src_text): src_text = collapse_braces(strip(src_text)) prototype_pattern = re.compile(prototype, (re.M | re.S)) function_pattern = re.compile(function, (re.M | re.S)) prototypes = prototype_pattern.findall(src_text) functions = function_pattern.findall(src_text) declared_prototypes = list(map(sanitize_prototype, prototypes)) all_prototypes = list(map(sanitize_prototype, functions)) for declared_prototype in declared_prototypes: if (declared_prototype in all_prototypes): all_prototypes.remove(declared_prototype) all_prototypes = sorted(set(all_prototypes), key=all_prototypes.index) return all_prototypes
[ "def", "generate_prototypes_from_src", "(", "src_text", ")", ":", "src_text", "=", "collapse_braces", "(", "strip", "(", "src_text", ")", ")", "prototype_pattern", "=", "re", ".", "compile", "(", "prototype", ",", "(", "re", ".", "M", "|", "re", ".", "S", ")", ")", "function_pattern", "=", "re", ".", "compile", "(", "function", ",", "(", "re", ".", "M", "|", "re", ".", "S", ")", ")", "prototypes", "=", "prototype_pattern", ".", "findall", "(", "src_text", ")", "functions", "=", "function_pattern", ".", "findall", "(", "src_text", ")", "declared_prototypes", "=", "list", "(", "map", "(", "sanitize_prototype", ",", "prototypes", ")", ")", "all_prototypes", "=", "list", "(", "map", "(", "sanitize_prototype", ",", "functions", ")", ")", "for", "declared_prototype", "in", "declared_prototypes", ":", "if", "(", "declared_prototype", "in", "all_prototypes", ")", ":", "all_prototypes", ".", "remove", "(", "declared_prototype", ")", "all_prototypes", "=", "sorted", "(", "set", "(", "all_prototypes", ")", ",", "key", "=", "all_prototypes", ".", "index", ")", "return", "all_prototypes" ]
generate prototypes for all functions of a given source code .
train
false
786
def intrinsic(func): name = getattr(func, '__name__', str(func)) llc = _Intrinsic(name, func) llc._register() return llc
[ "def", "intrinsic", "(", "func", ")", ":", "name", "=", "getattr", "(", "func", ",", "'__name__'", ",", "str", "(", "func", ")", ")", "llc", "=", "_Intrinsic", "(", "name", ",", "func", ")", "llc", ".", "_register", "(", ")", "return", "llc" ]
a decorator marking the decorated function as typing and implementing *func* in nopython mode using the llvmlite irbuilder api .
train
false
787
def __clean_tmp(sfn): if sfn.startswith(os.path.join(tempfile.gettempdir(), salt.utils.files.TEMPFILE_PREFIX)): all_roots = itertools.chain.from_iterable(six.itervalues(__opts__['file_roots'])) in_roots = any((sfn.startswith(root) for root in all_roots)) if (os.path.exists(sfn) and (not in_roots)): os.remove(sfn)
[ "def", "__clean_tmp", "(", "sfn", ")", ":", "if", "sfn", ".", "startswith", "(", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "salt", ".", "utils", ".", "files", ".", "TEMPFILE_PREFIX", ")", ")", ":", "all_roots", "=", "itertools", ".", "chain", ".", "from_iterable", "(", "six", ".", "itervalues", "(", "__opts__", "[", "'file_roots'", "]", ")", ")", "in_roots", "=", "any", "(", "(", "sfn", ".", "startswith", "(", "root", ")", "for", "root", "in", "all_roots", ")", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "sfn", ")", "and", "(", "not", "in_roots", ")", ")", ":", "os", ".", "remove", "(", "sfn", ")" ]
clean out a template temp file .
train
true
788
def from_tree(expr, namespace=None): if isinstance(expr, dict): (op, args) = (expr[u'op'], expr[u'args']) if (op == u'slice'): return expr_utils._slice(*[from_tree(arg, namespace) for arg in args]) if hasattr(blaze.expr, op): cls = getattr(blaze.expr, op) else: cls = expression_from_name(op) if (cls is Symbol): cls = symbol children = [from_tree(arg, namespace) for arg in args] return cls(*children) elif isinstance(expr, (list, tuple)): return tuple((from_tree(arg, namespace) for arg in expr)) if (namespace and (expr in namespace)): return namespace[expr] else: return expr
[ "def", "from_tree", "(", "expr", ",", "namespace", "=", "None", ")", ":", "if", "isinstance", "(", "expr", ",", "dict", ")", ":", "(", "op", ",", "args", ")", "=", "(", "expr", "[", "u'op'", "]", ",", "expr", "[", "u'args'", "]", ")", "if", "(", "op", "==", "u'slice'", ")", ":", "return", "expr_utils", ".", "_slice", "(", "*", "[", "from_tree", "(", "arg", ",", "namespace", ")", "for", "arg", "in", "args", "]", ")", "if", "hasattr", "(", "blaze", ".", "expr", ",", "op", ")", ":", "cls", "=", "getattr", "(", "blaze", ".", "expr", ",", "op", ")", "else", ":", "cls", "=", "expression_from_name", "(", "op", ")", "if", "(", "cls", "is", "Symbol", ")", ":", "cls", "=", "symbol", "children", "=", "[", "from_tree", "(", "arg", ",", "namespace", ")", "for", "arg", "in", "args", "]", "return", "cls", "(", "*", "children", ")", "elif", "isinstance", "(", "expr", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "tuple", "(", "(", "from_tree", "(", "arg", ",", "namespace", ")", "for", "arg", "in", "expr", ")", ")", "if", "(", "namespace", "and", "(", "expr", "in", "namespace", ")", ")", ":", "return", "namespace", "[", "expr", "]", "else", ":", "return", "expr" ]
convert core data structures to blaze expression core data structure representations created by to_tree are converted back into blaze expressions .
train
false
790
@datastore_rpc._positional(1) def NonTransactional(_func=None, allow_existing=True): if (_func is not None): return NonTransactional()(_func) def outer_wrapper(func): def inner_wrapper(*args, **kwds): if (not IsInTransaction()): return func(*args, **kwds) if (not allow_existing): raise datastore_errors.BadRequestError('Function cannot be called from within a transaction.') txn_connection = _GetConnection() _SetConnection(_thread_local.old_connection) try: return func(*args, **kwds) finally: _SetConnection(txn_connection) return inner_wrapper return outer_wrapper
[ "@", "datastore_rpc", ".", "_positional", "(", "1", ")", "def", "NonTransactional", "(", "_func", "=", "None", ",", "allow_existing", "=", "True", ")", ":", "if", "(", "_func", "is", "not", "None", ")", ":", "return", "NonTransactional", "(", ")", "(", "_func", ")", "def", "outer_wrapper", "(", "func", ")", ":", "def", "inner_wrapper", "(", "*", "args", ",", "**", "kwds", ")", ":", "if", "(", "not", "IsInTransaction", "(", ")", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kwds", ")", "if", "(", "not", "allow_existing", ")", ":", "raise", "datastore_errors", ".", "BadRequestError", "(", "'Function cannot be called from within a transaction.'", ")", "txn_connection", "=", "_GetConnection", "(", ")", "_SetConnection", "(", "_thread_local", ".", "old_connection", ")", "try", ":", "return", "func", "(", "*", "args", ",", "**", "kwds", ")", "finally", ":", "_SetConnection", "(", "txn_connection", ")", "return", "inner_wrapper", "return", "outer_wrapper" ]
a decorator that insures a function is run outside a transaction .
train
false
793
def getStreamLabel(sampleRate, channels, blockSize): return '{}_{}_{}'.format(sampleRate, channels, blockSize)
[ "def", "getStreamLabel", "(", "sampleRate", ",", "channels", ",", "blockSize", ")", ":", "return", "'{}_{}_{}'", ".", "format", "(", "sampleRate", ",", "channels", ",", "blockSize", ")" ]
returns the string repr of the stream label .
train
false
796
def mkdtemp(suffix='', prefix=template, dir=None): if (dir is None): dir = gettempdir() names = _get_candidate_names() for seq in xrange(TMP_MAX): name = names.next() file = _os.path.join(dir, ((prefix + name) + suffix)) try: _os.mkdir(file, 448) return file except OSError as e: if (e.errno == _errno.EEXIST): continue raise raise IOError, (_errno.EEXIST, 'No usable temporary directory name found')
[ "def", "mkdtemp", "(", "suffix", "=", "''", ",", "prefix", "=", "template", ",", "dir", "=", "None", ")", ":", "if", "(", "dir", "is", "None", ")", ":", "dir", "=", "gettempdir", "(", ")", "names", "=", "_get_candidate_names", "(", ")", "for", "seq", "in", "xrange", "(", "TMP_MAX", ")", ":", "name", "=", "names", ".", "next", "(", ")", "file", "=", "_os", ".", "path", ".", "join", "(", "dir", ",", "(", "(", "prefix", "+", "name", ")", "+", "suffix", ")", ")", "try", ":", "_os", ".", "mkdir", "(", "file", ",", "448", ")", "return", "file", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "==", "_errno", ".", "EEXIST", ")", ":", "continue", "raise", "raise", "IOError", ",", "(", "_errno", ".", "EEXIST", ",", "'No usable temporary directory name found'", ")" ]
user-callable function to create and return a unique temporary directory .
train
false
797
def test_es2(): try: from vispy.gloo.gl import es2 except Exception: import ctypes ctypes.TEST_DLL = _DummyObject() from vispy.gloo.gl import es2 _test_function_names(es2) _test_constant_names(es2)
[ "def", "test_es2", "(", ")", ":", "try", ":", "from", "vispy", ".", "gloo", ".", "gl", "import", "es2", "except", "Exception", ":", "import", "ctypes", "ctypes", ".", "TEST_DLL", "=", "_DummyObject", "(", ")", "from", "vispy", ".", "gloo", ".", "gl", "import", "es2", "_test_function_names", "(", "es2", ")", "_test_constant_names", "(", "es2", ")" ]
es2 backend should have all es 2 .
train
false
798
def atoms(cls, instance_or_dict): field_getter = serializable_getter = instance_or_dict.get try: field_getter = instance_or_dict._data.get except AttributeError: pass sequences = ((cls._field_list, field_getter), (cls._serializables.items(), serializable_getter)) for (sequence, get) in sequences: for (field_name, field) in sequence: (yield (field_name, field, get(field_name, Undefined)))
[ "def", "atoms", "(", "cls", ",", "instance_or_dict", ")", ":", "field_getter", "=", "serializable_getter", "=", "instance_or_dict", ".", "get", "try", ":", "field_getter", "=", "instance_or_dict", ".", "_data", ".", "get", "except", "AttributeError", ":", "pass", "sequences", "=", "(", "(", "cls", ".", "_field_list", ",", "field_getter", ")", ",", "(", "cls", ".", "_serializables", ".", "items", "(", ")", ",", "serializable_getter", ")", ")", "for", "(", "sequence", ",", "get", ")", "in", "sequences", ":", "for", "(", "field_name", ",", "field", ")", "in", "sequence", ":", "(", "yield", "(", "field_name", ",", "field", ",", "get", "(", "field_name", ",", "Undefined", ")", ")", ")" ]
iterator for the atomic components of a model definition and relevant data that creates a 3-tuple of the fields name .
train
false
799
def CDLEVENINGSTAR(barDs, count, penetration=(-4e+37)): return call_talib_with_ohlc(barDs, count, talib.CDLEVENINGSTAR, penetration)
[ "def", "CDLEVENINGSTAR", "(", "barDs", ",", "count", ",", "penetration", "=", "(", "-", "4e+37", ")", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLEVENINGSTAR", ",", "penetration", ")" ]
evening star .
train
false