id_within_dataset
int64
46
2.71M
snippet
stringlengths
63
481k
tokens
sequencelengths
20
15.6k
language
stringclasses
2 values
nl
stringlengths
1
32.4k
is_duplicated
bool
2 classes
2,686,388
def send_mail_template(subject, template, addr_from, addr_to, context=None, attachments=None, fail_silently=None, addr_bcc=None, headers=None): """ Send email rendering text and html versions for the specified template name using the context dictionary passed in. """ if context is None: context = {} if attachments is None: attachments = [] if fail_silently is None: fail_silently = settings.EMAIL_FAIL_SILENTLY # Add template accessible settings from yacms to the context # (normally added by a context processor for HTTP requests). context.update(context_settings()) # Allow for a single address to be passed in. # Python 3 strings have an __iter__ method, so the following hack # doesn't work: if not hasattr(addr_to, "__iter__"): if isinstance(addr_to, str) or isinstance(addr_to, bytes): addr_to = [addr_to] if addr_bcc is not None and (isinstance(addr_bcc, str) or isinstance(addr_bcc, bytes)): addr_bcc = [addr_bcc] # Loads a template passing in vars as context. render = lambda type: loader.get_template("%s.%s" % (template, type)).render(Context(context)) # Create and send email. msg = EmailMultiAlternatives(subject, render("txt"), addr_from, addr_to, addr_bcc, headers=headers) msg.attach_alternative(render("html"), "text/html") for attachment in attachments: msg.attach(*attachment) msg.send(fail_silently=fail_silently)
[ "def", "send_mail_template", "(", "subject", ",", "template", ",", "addr_from", ",", "addr_to", ",", "context", "=", "None", ",", "attachments", "=", "None", ",", "fail_silently", "=", "None", ",", "addr_bcc", "=", "None", ",", "headers", "=", "None", ")", ":", "if", "context", "is", "None", ":", "context", "=", "{", "}", "if", "attachments", "is", "None", ":", "attachments", "=", "[", "]", "if", "fail_silently", "is", "None", ":", "fail_silently", "=", "settings", ".", "EMAIL_FAIL_SILENTLY", "context", ".", "update", "(", "context_settings", "(", ")", ")", "if", "isinstance", "(", "addr_to", ",", "str", ")", "or", "isinstance", "(", "addr_to", ",", "bytes", ")", ":", "addr_to", "=", "[", "addr_to", "]", "if", "addr_bcc", "is", "not", "None", "and", "(", "isinstance", "(", "addr_bcc", ",", "str", ")", "or", "isinstance", "(", "addr_bcc", ",", "bytes", ")", ")", ":", "addr_bcc", "=", "[", "addr_bcc", "]", "render", "=", "lambda", "type", ":", "loader", ".", "get_template", "(", "\"%s.%s\"", "%", "(", "template", ",", "type", ")", ")", ".", "render", "(", "Context", "(", "context", ")", ")", "msg", "=", "EmailMultiAlternatives", "(", "subject", ",", "render", "(", "\"txt\"", ")", ",", "addr_from", ",", "addr_to", ",", "addr_bcc", ",", "headers", "=", "headers", ")", "msg", ".", "attach_alternative", "(", "render", "(", "\"html\"", ")", ",", "\"text/html\"", ")", "for", "attachment", "in", "attachments", ":", "msg", ".", "attach", "(", "*", "attachment", ")", "msg", ".", "send", "(", "fail_silently", "=", "fail_silently", ")" ]
python
Send email rendering text and html versions for the specified template name using the context dictionary passed in.
true
2,686,896
def deep_force_unicode(value): """ Recursively call force_text on value. """ if isinstance(value, (list, tuple, set)): value = type(value)(map(deep_force_unicode, value)) elif isinstance(value, dict): value = type(value)(map(deep_force_unicode, value.items())) elif isinstance(value, Promise): value = force_text(value) return value
[ "def", "deep_force_unicode", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ",", "set", ")", ")", ":", "value", "=", "type", "(", "value", ")", "(", "map", "(", "deep_force_unicode", ",", "value", ")", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "value", "=", "type", "(", "value", ")", "(", "map", "(", "deep_force_unicode", ",", "value", ".", "items", "(", ")", ")", ")", "elif", "isinstance", "(", "value", ",", "Promise", ")", ":", "value", "=", "force_text", "(", "value", ")", "return", "value" ]
python
Recursively call force_text on value.
true
2,686,990
def set_site(request): """ Put the selected site ID into the session - posted to from the "Select site" drop-down in the header of the admin. The site ID is then used in favour of the current request's domain in ``yacms.core.managers.CurrentSiteManager``. """ site_id = int(request.GET["site_id"]) if not request.user.is_superuser: try: SitePermission.objects.get(user=request.user, sites=site_id) except SitePermission.DoesNotExist: raise PermissionDenied request.session["site_id"] = site_id admin_url = reverse("admin:index") next = next_url(request) or admin_url # Don't redirect to a change view for an object that won't exist # on the selected site - go to its list view instead. if next.startswith(admin_url): parts = next.split("/") if len(parts) > 4 and parts[4].isdigit(): next = "/".join(parts[:4]) return redirect(next)
[ "def", "set_site", "(", "request", ")", ":", "site_id", "=", "int", "(", "request", ".", "GET", "[", "\"site_id\"", "]", ")", "if", "not", "request", ".", "user", ".", "is_superuser", ":", "try", ":", "SitePermission", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ",", "sites", "=", "site_id", ")", "except", "SitePermission", ".", "DoesNotExist", ":", "raise", "PermissionDenied", "request", ".", "session", "[", "\"site_id\"", "]", "=", "site_id", "admin_url", "=", "reverse", "(", "\"admin:index\"", ")", "next", "=", "next_url", "(", "request", ")", "or", "admin_url", "if", "next", ".", "startswith", "(", "admin_url", ")", ":", "parts", "=", "next", ".", "split", "(", "\"/\"", ")", "if", "len", "(", "parts", ")", ">", "4", "and", "parts", "[", "4", "]", ".", "isdigit", "(", ")", ":", "next", "=", "\"/\"", ".", "join", "(", "parts", "[", ":", "4", "]", ")", "return", "redirect", "(", "next", ")" ]
python
Put the selected site ID into the session - posted to from the "Select site" drop-down in the header of the admin. The site ID is then used in favour of the current request's domain in ``yacms.core.managers.CurrentSiteManager``.
true
2,687,127
def path_for_import(name): """ Returns the directory path for the given package or module. """ return os.path.dirname(os.path.abspath(import_module(name).__file__))
[ "def", "path_for_import", "(", "name", ")", ":", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "import_module", "(", "name", ")", ".", "__file__", ")", ")" ]
python
Returns the directory path for the given package or module.
true
2,687,454
def read_no_interrupt(p): """Read from a pipe ignoring EINTR errors. This is necessary because when reading from pipes with GUI event loops running in the background, often interrupts are raised that stop the command from completing.""" import errno try: return p.read() except IOError, err: if err.errno != errno.EINTR: raise
[ "def", "read_no_interrupt", "(", "p", ")", ":", "import", "errno", "try", ":", "return", "p", ".", "read", "(", ")", "except", "IOError", ",", "err", ":", "if", "err", ".", "errno", "!=", "errno", ".", "EINTR", ":", "raise" ]
python
Read from a pipe ignoring EINTR errors. This is necessary because when reading from pipes with GUI event loops running in the background, often interrupts are raised that stop the command from completing.
true
2,687,567
def collector(): """TestSuite replacement entry point. Use anywhere you might use a unittest.TestSuite. The collector will, by default, load options from all config files and execute loader.loadTestsFromNames() on the configured testNames, or '.' if no testNames are configured. """ # plugins that implement any of these methods are disabled, since # we don't control the test runner and won't be able to run them # finalize() is also not called, but plugins that use it aren't disabled, # because capture needs it. setuptools_incompat = ('report', 'prepareTest', 'prepareTestLoader', 'prepareTestRunner', 'setOutputStream') plugins = RestrictedPluginManager(exclude=setuptools_incompat) conf = Config(files=all_config_files(), plugins=plugins) conf.configure(argv=['collector']) loader = defaultTestLoader(conf) if conf.testNames: suite = loader.loadTestsFromNames(conf.testNames) else: suite = loader.loadTestsFromNames(('.',)) return FinalizingSuiteWrapper(suite, plugins.finalize)
[ "def", "collector", "(", ")", ":", "setuptools_incompat", "=", "(", "'report'", ",", "'prepareTest'", ",", "'prepareTestLoader'", ",", "'prepareTestRunner'", ",", "'setOutputStream'", ")", "plugins", "=", "RestrictedPluginManager", "(", "exclude", "=", "setuptools_incompat", ")", "conf", "=", "Config", "(", "files", "=", "all_config_files", "(", ")", ",", "plugins", "=", "plugins", ")", "conf", ".", "configure", "(", "argv", "=", "[", "'collector'", "]", ")", "loader", "=", "defaultTestLoader", "(", "conf", ")", "if", "conf", ".", "testNames", ":", "suite", "=", "loader", ".", "loadTestsFromNames", "(", "conf", ".", "testNames", ")", "else", ":", "suite", "=", "loader", ".", "loadTestsFromNames", "(", "(", "'.'", ",", ")", ")", "return", "FinalizingSuiteWrapper", "(", "suite", ",", "plugins", ".", "finalize", ")" ]
python
TestSuite replacement entry point. Use anywhere you might use a unittest.TestSuite. The collector will, by default, load options from all config files and execute loader.loadTestsFromNames() on the configured testNames, or '.' if no testNames are configured.
true
2,687,793
def compress_dhist(dh): """Compress a directory history into a new one with at most 20 entries. Return a new list made from the first and last 10 elements of dhist after removal of duplicates. """ head, tail = dh[:-10], dh[-10:] newhead = [] done = set() for h in head: if h in done: continue newhead.append(h) done.add(h) return newhead + tail
[ "def", "compress_dhist", "(", "dh", ")", ":", "head", ",", "tail", "=", "dh", "[", ":", "-", "10", "]", ",", "dh", "[", "-", "10", ":", "]", "newhead", "=", "[", "]", "done", "=", "set", "(", ")", "for", "h", "in", "head", ":", "if", "h", "in", "done", ":", "continue", "newhead", ".", "append", "(", "h", ")", "done", ".", "add", "(", "h", ")", "return", "newhead", "+", "tail" ]
python
Compress a directory history into a new one with at most 20 entries. Return a new list made from the first and last 10 elements of dhist after removal of duplicates.
true
2,687,794
def magics_class(cls): """Class decorator for all subclasses of the main Magics class. Any class that subclasses Magics *must* also apply this decorator, to ensure that all the methods that have been decorated as line/cell magics get correctly registered in the class instance. This is necessary because when method decorators run, the class does not exist yet, so they temporarily store their information into a module global. Application of this class decorator copies that global data to the class instance and clears the global. Obviously, this mechanism is not thread-safe, which means that the *creation* of subclasses of Magic should only be done in a single-thread context. Instantiation of the classes has no restrictions. Given that these classes are typically created at IPython startup time and before user application code becomes active, in practice this should not pose any problems. """ cls.registered = True cls.magics = dict(line = magics['line'], cell = magics['cell']) magics['line'] = {} magics['cell'] = {} return cls
[ "def", "magics_class", "(", "cls", ")", ":", "cls", ".", "registered", "=", "True", "cls", ".", "magics", "=", "dict", "(", "line", "=", "magics", "[", "'line'", "]", ",", "cell", "=", "magics", "[", "'cell'", "]", ")", "magics", "[", "'line'", "]", "=", "{", "}", "magics", "[", "'cell'", "]", "=", "{", "}", "return", "cls" ]
python
Class decorator for all subclasses of the main Magics class. Any class that subclasses Magics *must* also apply this decorator, to ensure that all the methods that have been decorated as line/cell magics get correctly registered in the class instance. This is necessary because when method decorators run, the class does not exist yet, so they temporarily store their information into a module global. Application of this class decorator copies that global data to the class instance and clears the global. Obviously, this mechanism is not thread-safe, which means that the *creation* of subclasses of Magic should only be done in a single-thread context. Instantiation of the classes has no restrictions. Given that these classes are typically created at IPython startup time and before user application code becomes active, in practice this should not pose any problems.
true
2,687,795
def record_magic(dct, magic_kind, magic_name, func): """Utility function to store a function as a magic of a specific kind. Parameters ---------- dct : dict A dictionary with 'line' and 'cell' subdicts. magic_kind : str Kind of magic to be stored. magic_name : str Key to store the magic as. func : function Callable object to store. """ if magic_kind == 'line_cell': dct['line'][magic_name] = dct['cell'][magic_name] = func else: dct[magic_kind][magic_name] = func
[ "def", "record_magic", "(", "dct", ",", "magic_kind", ",", "magic_name", ",", "func", ")", ":", "if", "magic_kind", "==", "'line_cell'", ":", "dct", "[", "'line'", "]", "[", "magic_name", "]", "=", "dct", "[", "'cell'", "]", "[", "magic_name", "]", "=", "func", "else", ":", "dct", "[", "magic_kind", "]", "[", "magic_name", "]", "=", "func" ]
python
Utility function to store a function as a magic of a specific kind. Parameters ---------- dct : dict A dictionary with 'line' and 'cell' subdicts. magic_kind : str Kind of magic to be stored. magic_name : str Key to store the magic as. func : function Callable object to store.
true
2,687,796
def _method_magic_marker(magic_kind): """Decorator factory for methods in Magics subclasses. """ validate_type(magic_kind) # This is a closure to capture the magic_kind. We could also use a class, # but it's overkill for just that one bit of state. def magic_deco(arg): call = lambda f, *a, **k: f(*a, **k) if callable(arg): # "Naked" decorator call (just @foo, no args) func = arg name = func.func_name retval = decorator(call, func) record_magic(magics, magic_kind, name, name) elif isinstance(arg, basestring): # Decorator called with arguments (@foo('bar')) name = arg def mark(func, *a, **kw): record_magic(magics, magic_kind, name, func.func_name) return decorator(call, func) retval = mark else: raise TypeError("Decorator can only be called with " "string or function") return retval # Ensure the resulting decorator has a usable docstring magic_deco.__doc__ = _docstring_template.format('method', magic_kind) return magic_deco
[ "def", "_method_magic_marker", "(", "magic_kind", ")", ":", "validate_type", "(", "magic_kind", ")", "def", "magic_deco", "(", "arg", ")", ":", "call", "=", "lambda", "f", ",", "*", "a", ",", "**", "k", ":", "f", "(", "*", "a", ",", "**", "k", ")", "if", "callable", "(", "arg", ")", ":", "func", "=", "arg", "name", "=", "func", ".", "func_name", "retval", "=", "decorator", "(", "call", ",", "func", ")", "record_magic", "(", "magics", ",", "magic_kind", ",", "name", ",", "name", ")", "elif", "isinstance", "(", "arg", ",", "basestring", ")", ":", "name", "=", "arg", "def", "mark", "(", "func", ",", "*", "a", ",", "**", "kw", ")", ":", "record_magic", "(", "magics", ",", "magic_kind", ",", "name", ",", "func", ".", "func_name", ")", "return", "decorator", "(", "call", ",", "func", ")", "retval", "=", "mark", "else", ":", "raise", "TypeError", "(", "\"Decorator can only be called with \"", "\"string or function\"", ")", "return", "retval", "magic_deco", ".", "__doc__", "=", "_docstring_template", ".", "format", "(", "'method'", ",", "magic_kind", ")", "return", "magic_deco" ]
python
Decorator factory for methods in Magics subclasses.
true
2,688,895
def create_typestr2type_dicts(dont_include_in_type2typestr=["lambda"]): """Return dictionaries mapping lower case typename (e.g. 'tuple') to type objects from the types package, and vice versa.""" typenamelist = [tname for tname in dir(types) if tname.endswith("Type")] typestr2type, type2typestr = {}, {} for tname in typenamelist: name = tname[:-4].lower() # Cut 'Type' off the end of the name obj = getattr(types, tname) typestr2type[name] = obj if name not in dont_include_in_type2typestr: type2typestr[obj] = name return typestr2type, type2typestr
[ "def", "create_typestr2type_dicts", "(", "dont_include_in_type2typestr", "=", "[", "\"lambda\"", "]", ")", ":", "typenamelist", "=", "[", "tname", "for", "tname", "in", "dir", "(", "types", ")", "if", "tname", ".", "endswith", "(", "\"Type\"", ")", "]", "typestr2type", ",", "type2typestr", "=", "{", "}", ",", "{", "}", "for", "tname", "in", "typenamelist", ":", "name", "=", "tname", "[", ":", "-", "4", "]", ".", "lower", "(", ")", "obj", "=", "getattr", "(", "types", ",", "tname", ")", "typestr2type", "[", "name", "]", "=", "obj", "if", "name", "not", "in", "dont_include_in_type2typestr", ":", "type2typestr", "[", "obj", "]", "=", "name", "return", "typestr2type", ",", "type2typestr" ]
python
Return dictionaries mapping lower case typename (e.g. 'tuple') to type objects from the types package, and vice versa.
true
2,688,896
def is_type(obj, typestr_or_type): """is_type(obj, typestr_or_type) verifies if obj is of a certain type. It can take strings or actual python types for the second argument, i.e. 'tuple'<->TupleType. 'all' matches all types. TODO: Should be extended for choosing more than one type.""" if typestr_or_type == "all": return True if type(typestr_or_type) == types.TypeType: test_type = typestr_or_type else: test_type = typestr2type.get(typestr_or_type, False) if test_type: return isinstance(obj, test_type) return False
[ "def", "is_type", "(", "obj", ",", "typestr_or_type", ")", ":", "if", "typestr_or_type", "==", "\"all\"", ":", "return", "True", "if", "type", "(", "typestr_or_type", ")", "==", "types", ".", "TypeType", ":", "test_type", "=", "typestr_or_type", "else", ":", "test_type", "=", "typestr2type", ".", "get", "(", "typestr_or_type", ",", "False", ")", "if", "test_type", ":", "return", "isinstance", "(", "obj", ",", "test_type", ")", "return", "False" ]
python
is_type(obj, typestr_or_type) verifies if obj is of a certain type. It can take strings or actual python types for the second argument, i.e. 'tuple'<->TupleType. 'all' matches all types. TODO: Should be extended for choosing more than one type.
true
2,688,897
def dict_dir(obj): """Produce a dictionary of an object's attributes. Builds on dir2 by checking that a getattr() call actually succeeds.""" ns = {} for key in dir2(obj): # This seemingly unnecessary try/except is actually needed # because there is code out there with metaclasses that # create 'write only' attributes, where a getattr() call # will fail even if the attribute appears listed in the # object's dictionary. Properties can actually do the same # thing. In particular, Traits use this pattern try: ns[key] = getattr(obj, key) except AttributeError: pass return ns
[ "def", "dict_dir", "(", "obj", ")", ":", "ns", "=", "{", "}", "for", "key", "in", "dir2", "(", "obj", ")", ":", "try", ":", "ns", "[", "key", "]", "=", "getattr", "(", "obj", ",", "key", ")", "except", "AttributeError", ":", "pass", "return", "ns" ]
python
Produce a dictionary of an object's attributes. Builds on dir2 by checking that a getattr() call actually succeeds.
true
2,688,898
def filter_ns(ns, name_pattern="*", type_pattern="all", ignore_case=True, show_all=True): """Filter a namespace dictionary by name pattern and item type.""" pattern = name_pattern.replace("*",".*").replace("?",".") if ignore_case: reg = re.compile(pattern+"$", re.I) else: reg = re.compile(pattern+"$") # Check each one matches regex; shouldn't be hidden; of correct type. return dict((key,obj) for key, obj in ns.iteritems() if reg.match(key) \ and show_hidden(key, show_all) \ and is_type(obj, type_pattern) )
[ "def", "filter_ns", "(", "ns", ",", "name_pattern", "=", "\"*\"", ",", "type_pattern", "=", "\"all\"", ",", "ignore_case", "=", "True", ",", "show_all", "=", "True", ")", ":", "pattern", "=", "name_pattern", ".", "replace", "(", "\"*\"", ",", "\".*\"", ")", ".", "replace", "(", "\"?\"", ",", "\".\"", ")", "if", "ignore_case", ":", "reg", "=", "re", ".", "compile", "(", "pattern", "+", "\"$\"", ",", "re", ".", "I", ")", "else", ":", "reg", "=", "re", ".", "compile", "(", "pattern", "+", "\"$\"", ")", "return", "dict", "(", "(", "key", ",", "obj", ")", "for", "key", ",", "obj", "in", "ns", ".", "iteritems", "(", ")", "if", "reg", ".", "match", "(", "key", ")", "and", "show_hidden", "(", "key", ",", "show_all", ")", "and", "is_type", "(", "obj", ",", "type_pattern", ")", ")" ]
python
Filter a namespace dictionary by name pattern and item type.
true
2,688,899
def list_namespace(namespace, type_pattern, filter, ignore_case=False, show_all=False): """Return dictionary of all objects in a namespace dictionary that match type_pattern and filter.""" pattern_list=filter.split(".") if len(pattern_list) == 1: return filter_ns(namespace, name_pattern=pattern_list[0], type_pattern=type_pattern, ignore_case=ignore_case, show_all=show_all) else: # This is where we can change if all objects should be searched or # only modules. Just change the type_pattern to module to search only # modules filtered = filter_ns(namespace, name_pattern=pattern_list[0], type_pattern="all", ignore_case=ignore_case, show_all=show_all) results = {} for name, obj in filtered.iteritems(): ns = list_namespace(dict_dir(obj), type_pattern, ".".join(pattern_list[1:]), ignore_case=ignore_case, show_all=show_all) for inner_name, inner_obj in ns.iteritems(): results["%s.%s"%(name,inner_name)] = inner_obj return results
[ "def", "list_namespace", "(", "namespace", ",", "type_pattern", ",", "filter", ",", "ignore_case", "=", "False", ",", "show_all", "=", "False", ")", ":", "pattern_list", "=", "filter", ".", "split", "(", "\".\"", ")", "if", "len", "(", "pattern_list", ")", "==", "1", ":", "return", "filter_ns", "(", "namespace", ",", "name_pattern", "=", "pattern_list", "[", "0", "]", ",", "type_pattern", "=", "type_pattern", ",", "ignore_case", "=", "ignore_case", ",", "show_all", "=", "show_all", ")", "else", ":", "filtered", "=", "filter_ns", "(", "namespace", ",", "name_pattern", "=", "pattern_list", "[", "0", "]", ",", "type_pattern", "=", "\"all\"", ",", "ignore_case", "=", "ignore_case", ",", "show_all", "=", "show_all", ")", "results", "=", "{", "}", "for", "name", ",", "obj", "in", "filtered", ".", "iteritems", "(", ")", ":", "ns", "=", "list_namespace", "(", "dict_dir", "(", "obj", ")", ",", "type_pattern", ",", "\".\"", ".", "join", "(", "pattern_list", "[", "1", ":", "]", ")", ",", "ignore_case", "=", "ignore_case", ",", "show_all", "=", "show_all", ")", "for", "inner_name", ",", "inner_obj", "in", "ns", ".", "iteritems", "(", ")", ":", "results", "[", "\"%s.%s\"", "%", "(", "name", ",", "inner_name", ")", "]", "=", "inner_obj", "return", "results" ]
python
Return dictionary of all objects in a namespace dictionary that match type_pattern and filter.
true
2,689,142
def externals_finder(dirname, filename): """Find any 'svn:externals' directories""" found = False f = open(filename,'rt') for line in iter(f.readline, ''): # can't use direct iter! parts = line.split() if len(parts)==2: kind,length = parts data = f.read(int(length)) if kind=='K' and data=='svn:externals': found = True elif kind=='V' and found: f.close() break else: f.close() return for line in data.splitlines(): parts = line.split() if parts: yield joinpath(dirname, parts[0])
[ "def", "externals_finder", "(", "dirname", ",", "filename", ")", ":", "found", "=", "False", "f", "=", "open", "(", "filename", ",", "'rt'", ")", "for", "line", "in", "iter", "(", "f", ".", "readline", ",", "''", ")", ":", "parts", "=", "line", ".", "split", "(", ")", "if", "len", "(", "parts", ")", "==", "2", ":", "kind", ",", "length", "=", "parts", "data", "=", "f", ".", "read", "(", "int", "(", "length", ")", ")", "if", "kind", "==", "'K'", "and", "data", "==", "'svn:externals'", ":", "found", "=", "True", "elif", "kind", "==", "'V'", "and", "found", ":", "f", ".", "close", "(", ")", "break", "else", ":", "f", ".", "close", "(", ")", "return", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "parts", "=", "line", ".", "split", "(", ")", "if", "parts", ":", "yield", "joinpath", "(", "dirname", ",", "parts", "[", "0", "]", ")" ]
python
Find any 'svn:externals' directories
true
2,689,453
def extract_bugs(changelog): """Takes output from git log --oneline and extracts bug numbers""" bug_regexp = re.compile(r'\bbug (\d+)\b', re.IGNORECASE) bugs = set() for line in changelog: for bug in bug_regexp.findall(line): bugs.add(bug) return sorted(list(bugs))
[ "def", "extract_bugs", "(", "changelog", ")", ":", "bug_regexp", "=", "re", ".", "compile", "(", "r'\\bbug (\\d+)\\b'", ",", "re", ".", "IGNORECASE", ")", "bugs", "=", "set", "(", ")", "for", "line", "in", "changelog", ":", "for", "bug", "in", "bug_regexp", ".", "findall", "(", "line", ")", ":", "bugs", ".", "add", "(", "bug", ")", "return", "sorted", "(", "list", "(", "bugs", ")", ")" ]
python
Takes output from git log --oneline and extracts bug numbers
true
2,689,542
def register_adapter(mod, func): """ Registers a callable to be executed when a module is imported. If the module already exists then the callable will be executed immediately. You can register the same module multiple times, the callables will be executed in the order they were registered. The root module must exist (i.e. be importable) otherwise an `ImportError` will be thrown. @param mod: The fully qualified module string, as used in the imports statement. E.g. 'foo.bar.baz'. The string must map to a module otherwise the callable will not fire. @param func: The function to call when C{mod} is imported. This function must take one arg, the newly imported C{module} object. @type func: callable @raise TypeError: C{func} must be callable """ if not hasattr(func, '__call__'): raise TypeError('func must be callable') imports.when_imported(mod, func)
[ "def", "register_adapter", "(", "mod", ",", "func", ")", ":", "if", "not", "hasattr", "(", "func", ",", "'__call__'", ")", ":", "raise", "TypeError", "(", "'func must be callable'", ")", "imports", ".", "when_imported", "(", "mod", ",", "func", ")" ]
python
Registers a callable to be executed when a module is imported. If the module already exists then the callable will be executed immediately. You can register the same module multiple times, the callables will be executed in the order they were registered. The root module must exist (i.e. be importable) otherwise an `ImportError` will be thrown. @param mod: The fully qualified module string, as used in the imports statement. E.g. 'foo.bar.baz'. The string must map to a module otherwise the callable will not fire. @param func: The function to call when C{mod} is imported. This function must take one arg, the newly imported C{module} object. @type func: callable @raise TypeError: C{func} must be callable
true
2,689,581
def getDjangoObjects(context): """ Returns a reference to the C{django_objects} on the context. If it doesn't exist then it is created. @rtype: Instance of L{DjangoReferenceCollection} @since: 0.5 """ c = context.extra k = 'django_objects' try: return c[k] except KeyError: c[k] = DjangoReferenceCollection() return c[k]
[ "def", "getDjangoObjects", "(", "context", ")", ":", "c", "=", "context", ".", "extra", "k", "=", "'django_objects'", "try", ":", "return", "c", "[", "k", "]", "except", "KeyError", ":", "c", "[", "k", "]", "=", "DjangoReferenceCollection", "(", ")", "return", "c", "[", "k", "]" ]
python
Returns a reference to the C{django_objects} on the context. If it doesn't exist then it is created. @rtype: Instance of L{DjangoReferenceCollection} @since: 0.5
true
2,689,582
def writeDjangoObject(obj, encoder=None): """ The Django ORM creates new instances of objects for each db request. This is a problem for PyAMF as it uses the C{id(obj)} of the object to do reference checking. We could just ignore the problem, but the objects are conceptually the same so the effort should be made to attempt to resolve references for a given object graph. We create a new map on the encoder context object which contains a dict of C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We use the primary key to do the reference checking. @since: 0.5 """ s = obj.pk if s is None: encoder.writeObject(obj) return django_objects = getDjangoObjects(encoder.context) kls = obj.__class__ try: referenced_object = django_objects.getClassKey(kls, s) except KeyError: referenced_object = obj django_objects.addClassKey(kls, s, obj) encoder.writeObject(referenced_object)
[ "def", "writeDjangoObject", "(", "obj", ",", "encoder", "=", "None", ")", ":", "s", "=", "obj", ".", "pk", "if", "s", "is", "None", ":", "encoder", ".", "writeObject", "(", "obj", ")", "return", "django_objects", "=", "getDjangoObjects", "(", "encoder", ".", "context", ")", "kls", "=", "obj", ".", "__class__", "try", ":", "referenced_object", "=", "django_objects", ".", "getClassKey", "(", "kls", ",", "s", ")", "except", "KeyError", ":", "referenced_object", "=", "obj", "django_objects", ".", "addClassKey", "(", "kls", ",", "s", ",", "obj", ")", "encoder", ".", "writeObject", "(", "referenced_object", ")" ]
python
The Django ORM creates new instances of objects for each db request. This is a problem for PyAMF as it uses the C{id(obj)} of the object to do reference checking. We could just ignore the problem, but the objects are conceptually the same so the effort should be made to attempt to resolve references for a given object graph. We create a new map on the encoder context object which contains a dict of C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We use the primary key to do the reference checking. @since: 0.5
true
2,690,018
def get_profile_for_user(user): """ Returns site-specific profile for this user. Raises ``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not set, and ``ImproperlyConfigured`` if the corresponding model can't be found. """ if not hasattr(user, '_yacms_profile'): # Raises ProfileNotConfigured if not bool(ACCOUNTS_PROFILE_MODEL) profile_model = get_profile_model() profile_manager = profile_model._default_manager.using(user._state.db) user_field = get_profile_user_fieldname(profile_model, user.__class__) profile, created = profile_manager.get_or_create(**{user_field: user}) profile.user = user user._yacms_profile = profile return user._yacms_profile
[ "def", "get_profile_for_user", "(", "user", ")", ":", "if", "not", "hasattr", "(", "user", ",", "'_yacms_profile'", ")", ":", "profile_model", "=", "get_profile_model", "(", ")", "profile_manager", "=", "profile_model", ".", "_default_manager", ".", "using", "(", "user", ".", "_state", ".", "db", ")", "user_field", "=", "get_profile_user_fieldname", "(", "profile_model", ",", "user", ".", "__class__", ")", "profile", ",", "created", "=", "profile_manager", ".", "get_or_create", "(", "**", "{", "user_field", ":", "user", "}", ")", "profile", ".", "user", "=", "user", "user", ".", "_yacms_profile", "=", "profile", "return", "user", ".", "_yacms_profile" ]
python
Returns site-specific profile for this user. Raises ``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not set, and ``ImproperlyConfigured`` if the corresponding model can't be found.
true
2,690,020
def get_profile_user_fieldname(profile_model=None, user_model=None): """ Returns the name of the first field on the profile model that points to the ``auth.User`` model. """ Profile = profile_model or get_profile_model() User = user_model or get_user_model() for field in Profile._meta.fields: if field.rel and field.rel.to == User: return field.name raise ImproperlyConfigured("Value for ACCOUNTS_PROFILE_MODEL does not " "contain a ForeignKey field for auth.User: %s" % Profile.__name__)
[ "def", "get_profile_user_fieldname", "(", "profile_model", "=", "None", ",", "user_model", "=", "None", ")", ":", "Profile", "=", "profile_model", "or", "get_profile_model", "(", ")", "User", "=", "user_model", "or", "get_user_model", "(", ")", "for", "field", "in", "Profile", ".", "_meta", ".", "fields", ":", "if", "field", ".", "rel", "and", "field", ".", "rel", ".", "to", "==", "User", ":", "return", "field", ".", "name", "raise", "ImproperlyConfigured", "(", "\"Value for ACCOUNTS_PROFILE_MODEL does not \"", "\"contain a ForeignKey field for auth.User: %s\"", "%", "Profile", ".", "__name__", ")" ]
python
Returns the name of the first field on the profile model that points to the ``auth.User`` model.
true
2,690,076
def extract_vars(*names,**kw): """Extract a set of variables by name from another frame. :Parameters: - `*names`: strings One or more variable names which will be extracted from the caller's frame. :Keywords: - `depth`: integer (0) How many frames in the stack to walk when looking for your variables. Examples: In [2]: def func(x): ...: y = 1 ...: print sorted(extract_vars('x','y').items()) ...: In [3]: func('hello') [('x', 'hello'), ('y', 1)] """ depth = kw.get('depth',0) callerNS = sys._getframe(depth+1).f_locals return dict((k,callerNS[k]) for k in names)
[ "def", "extract_vars", "(", "*", "names", ",", "**", "kw", ")", ":", "depth", "=", "kw", ".", "get", "(", "'depth'", ",", "0", ")", "callerNS", "=", "sys", ".", "_getframe", "(", "depth", "+", "1", ")", ".", "f_locals", "return", "dict", "(", "(", "k", ",", "callerNS", "[", "k", "]", ")", "for", "k", "in", "names", ")" ]
python
Extract a set of variables by name from another frame. :Parameters: - `*names`: strings One or more variable names which will be extracted from the caller's frame. :Keywords: - `depth`: integer (0) How many frames in the stack to walk when looking for your variables. Examples: In [2]: def func(x): ...: y = 1 ...: print sorted(extract_vars('x','y').items()) ...: In [3]: func('hello') [('x', 'hello'), ('y', 1)]
true
2,690,077
def extract_vars_above(*names): """Extract a set of variables by name from another frame. Similar to extractVars(), but with a specified depth of 1, so that names are exctracted exactly from above the caller. This is simply a convenience function so that the very common case (for us) of skipping exactly 1 frame doesn't have to construct a special dict for keyword passing.""" callerNS = sys._getframe(2).f_locals return dict((k,callerNS[k]) for k in names)
[ "def", "extract_vars_above", "(", "*", "names", ")", ":", "callerNS", "=", "sys", ".", "_getframe", "(", "2", ")", ".", "f_locals", "return", "dict", "(", "(", "k", ",", "callerNS", "[", "k", "]", ")", "for", "k", "in", "names", ")" ]
python
Extract a set of variables by name from another frame. Similar to extractVars(), but with a specified depth of 1, so that names are exctracted exactly from above the caller. This is simply a convenience function so that the very common case (for us) of skipping exactly 1 frame doesn't have to construct a special dict for keyword passing.
true
2,690,078
def debugx(expr,pre_msg=''): """Print the value of an expression from the caller's frame. Takes an expression, evaluates it in the caller's frame and prints both the given expression and the resulting value (as well as a debug mark indicating the name of the calling function. The input must be of a form suitable for eval(). An optional message can be passed, which will be prepended to the printed expr->value pair.""" cf = sys._getframe(1) print '[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr, eval(expr,cf.f_globals,cf.f_locals))
[ "def", "debugx", "(", "expr", ",", "pre_msg", "=", "''", ")", ":", "cf", "=", "sys", ".", "_getframe", "(", "1", ")", "print", "'[DBG:%s] %s%s -> %r'", "%", "(", "cf", ".", "f_code", ".", "co_name", ",", "pre_msg", ",", "expr", ",", "eval", "(", "expr", ",", "cf", ".", "f_globals", ",", "cf", ".", "f_locals", ")", ")" ]
python
Print the value of an expression from the caller's frame. Takes an expression, evaluates it in the caller's frame and prints both the given expression and the resulting value (as well as a debug mark indicating the name of the calling function. The input must be of a form suitable for eval(). An optional message can be passed, which will be prepended to the printed expr->value pair.
true
2,690,079
def extract_module_locals(depth=0): """Returns (module, locals) of the funciton `depth` frames away from the caller""" f = sys._getframe(depth + 1) global_ns = f.f_globals module = sys.modules[global_ns['__name__']] return (module, f.f_locals)
[ "def", "extract_module_locals", "(", "depth", "=", "0", ")", ":", "f", "=", "sys", ".", "_getframe", "(", "depth", "+", "1", ")", "global_ns", "=", "f", ".", "f_globals", "module", "=", "sys", ".", "modules", "[", "global_ns", "[", "'__name__'", "]", "]", "return", "(", "module", ",", "f", ".", "f_locals", ")" ]
python
Returns (module, locals) of the funciton `depth` frames away from the caller
true
2,690,135
def _extract_future_flags(globs): """ Return the compiler-flags associated with the future features that have been imported into the given namespace (globs). """ flags = 0 for fname in __future__.all_feature_names: feature = globs.get(fname, None) if feature is getattr(__future__, fname): flags |= feature.compiler_flag return flags
[ "def", "_extract_future_flags", "(", "globs", ")", ":", "flags", "=", "0", "for", "fname", "in", "__future__", ".", "all_feature_names", ":", "feature", "=", "globs", ".", "get", "(", "fname", ",", "None", ")", "if", "feature", "is", "getattr", "(", "__future__", ",", "fname", ")", ":", "flags", "|=", "feature", ".", "compiler_flag", "return", "flags" ]
python
Return the compiler-flags associated with the future features that have been imported into the given namespace (globs).
true
2,690,136
def _normalize_module(module, depth=2): """ Return the module specified by `module`. In particular: - If `module` is a module, then return module. - If `module` is a string, then import and return the module with that name. - If `module` is None, then return the calling module. The calling module is assumed to be the module of the stack frame at the given depth in the call stack. """ if inspect.ismodule(module): return module elif isinstance(module, (str, unicode)): return __import__(module, globals(), locals(), ["*"]) elif module is None: return sys.modules[sys._getframe(depth).f_globals['__name__']] else: raise TypeError("Expected a module, string, or None")
[ "def", "_normalize_module", "(", "module", ",", "depth", "=", "2", ")", ":", "if", "inspect", ".", "ismodule", "(", "module", ")", ":", "return", "module", "elif", "isinstance", "(", "module", ",", "(", "str", ",", "unicode", ")", ")", ":", "return", "__import__", "(", "module", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "[", "\"*\"", "]", ")", "elif", "module", "is", "None", ":", "return", "sys", ".", "modules", "[", "sys", ".", "_getframe", "(", "depth", ")", ".", "f_globals", "[", "'__name__'", "]", "]", "else", ":", "raise", "TypeError", "(", "\"Expected a module, string, or None\"", ")" ]
python
Return the module specified by `module`. In particular: - If `module` is a module, then return module. - If `module` is a string, then import and return the module with that name. - If `module` is None, then return the calling module. The calling module is assumed to be the module of the stack frame at the given depth in the call stack.
true
2,690,137
def _exception_traceback(exc_info): """ Return a string containing a traceback message for the given exc_info tuple (as returned by sys.exc_info()). """ # Get a traceback message. excout = StringIO() exc_type, exc_val, exc_tb = exc_info traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) return excout.getvalue()
[ "def", "_exception_traceback", "(", "exc_info", ")", ":", "excout", "=", "StringIO", "(", ")", "exc_type", ",", "exc_val", ",", "exc_tb", "=", "exc_info", "traceback", ".", "print_exception", "(", "exc_type", ",", "exc_val", ",", "exc_tb", ",", "file", "=", "excout", ")", "return", "excout", ".", "getvalue", "(", ")" ]
python
Return a string containing a traceback message for the given exc_info tuple (as returned by sys.exc_info()).
true
2,690,142
def run_docstring_examples(f, globs, verbose=False, name="NoName", compileflags=None, optionflags=0): """ Test examples in the given object's docstring (`f`), using `globs` as globals. Optional argument `name` is used in failure messages. If the optional argument `verbose` is true, then generate output even if there are no failures. `compileflags` gives the set of flags that should be used by the Python compiler when running the examples. If not specified, then it will default to the set of future-import flags that apply to `globs`. Optional keyword arg `optionflags` specifies options for the testing and output. See the documentation for `testmod` for more information. """ # Find, parse, and run all tests in the given module. finder = DocTestFinder(verbose=verbose, recurse=False) runner = DocTestRunner(verbose=verbose, optionflags=optionflags) for test in finder.find(f, name, globs=globs): runner.run(test, compileflags=compileflags)
[ "def", "run_docstring_examples", "(", "f", ",", "globs", ",", "verbose", "=", "False", ",", "name", "=", "\"NoName\"", ",", "compileflags", "=", "None", ",", "optionflags", "=", "0", ")", ":", "finder", "=", "DocTestFinder", "(", "verbose", "=", "verbose", ",", "recurse", "=", "False", ")", "runner", "=", "DocTestRunner", "(", "verbose", "=", "verbose", ",", "optionflags", "=", "optionflags", ")", "for", "test", "in", "finder", ".", "find", "(", "f", ",", "name", ",", "globs", "=", "globs", ")", ":", "runner", ".", "run", "(", "test", ",", "compileflags", "=", "compileflags", ")" ]
python
Test examples in the given object's docstring (`f`), using `globs` as globals. Optional argument `name` is used in failure messages. If the optional argument `verbose` is true, then generate output even if there are no failures. `compileflags` gives the set of flags that should be used by the Python compiler when running the examples. If not specified, then it will default to the set of future-import flags that apply to `globs`. Optional keyword arg `optionflags` specifies options for the testing and output. See the documentation for `testmod` for more information.
true
2,690,144
def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, **options): """ Convert doctest tests for a module to a unittest test suite. This converts each documentation string in a module that contains doctest tests to a unittest test case. If any of the tests in a doc string fail, then the test case fails. An exception is raised showing the name of the file containing the test and a (sometimes approximate) line number. The `module` argument provides the module to be tested. The argument can be either a module or a module name. If no argument is given, the calling module is used. A number of options may be provided as keyword arguments: setUp A set-up function. This is called before running the tests in each file. The setUp function will be passed a DocTest object. The setUp function can access the test globals as the globs attribute of the test passed. tearDown A tear-down function. This is called after running the tests in each file. The tearDown function will be passed a DocTest object. The tearDown function can access the test globals as the globs attribute of the test passed. globs A dictionary containing initial global variables for the tests. optionflags A set of doctest option flags expressed as an integer. """ if test_finder is None: test_finder = DocTestFinder() module = _normalize_module(module) tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) if globs is None: globs = module.__dict__ if not tests: # Why do we want to do this? Because it reveals a bug that might # otherwise be hidden. raise ValueError(module, "has no tests") tests.sort() suite = unittest.TestSuite() for test in tests: if len(test.examples) == 0: continue if not test.filename: filename = module.__file__ if filename[-4:] in (".pyc", ".pyo"): filename = filename[:-1] elif sys.platform.startswith('java') and \ filename.endswith('$py.class'): filename = '%s.py' % filename[:-9] test.filename = filename suite.addTest(DocTestCase(test, **options)) return suite
[ "def", "DocTestSuite", "(", "module", "=", "None", ",", "globs", "=", "None", ",", "extraglobs", "=", "None", ",", "test_finder", "=", "None", ",", "**", "options", ")", ":", "if", "test_finder", "is", "None", ":", "test_finder", "=", "DocTestFinder", "(", ")", "module", "=", "_normalize_module", "(", "module", ")", "tests", "=", "test_finder", ".", "find", "(", "module", ",", "globs", "=", "globs", ",", "extraglobs", "=", "extraglobs", ")", "if", "globs", "is", "None", ":", "globs", "=", "module", ".", "__dict__", "if", "not", "tests", ":", "raise", "ValueError", "(", "module", ",", "\"has no tests\"", ")", "tests", ".", "sort", "(", ")", "suite", "=", "unittest", ".", "TestSuite", "(", ")", "for", "test", "in", "tests", ":", "if", "len", "(", "test", ".", "examples", ")", "==", "0", ":", "continue", "if", "not", "test", ".", "filename", ":", "filename", "=", "module", ".", "__file__", "if", "filename", "[", "-", "4", ":", "]", "in", "(", "\".pyc\"", ",", "\".pyo\"", ")", ":", "filename", "=", "filename", "[", ":", "-", "1", "]", "elif", "sys", ".", "platform", ".", "startswith", "(", "'java'", ")", "and", "filename", ".", "endswith", "(", "'$py.class'", ")", ":", "filename", "=", "'%s.py'", "%", "filename", "[", ":", "-", "9", "]", "test", ".", "filename", "=", "filename", "suite", ".", "addTest", "(", "DocTestCase", "(", "test", ",", "**", "options", ")", ")", "return", "suite" ]
python
Convert doctest tests for a module to a unittest test suite. This converts each documentation string in a module that contains doctest tests to a unittest test case. If any of the tests in a doc string fail, then the test case fails. An exception is raised showing the name of the file containing the test and a (sometimes approximate) line number. The `module` argument provides the module to be tested. The argument can be either a module or a module name. If no argument is given, the calling module is used. A number of options may be provided as keyword arguments: setUp A set-up function. This is called before running the tests in each file. The setUp function will be passed a DocTest object. The setUp function can access the test globals as the globs attribute of the test passed. tearDown A tear-down function. This is called after running the tests in each file. The tearDown function will be passed a DocTest object. The tearDown function can access the test globals as the globs attribute of the test passed. globs A dictionary containing initial global variables for the tests. optionflags A set of doctest option flags expressed as an integer.
true
2,690,146
def DocFileSuite(*paths, **kw): """A unittest suite for one or more doctest files. The path to each doctest file is given as a string; the interpretation of that string depends on the keyword argument "module_relative". A number of options may be provided as keyword arguments: module_relative If "module_relative" is True, then the given file paths are interpreted as os-independent module-relative paths. By default, these paths are relative to the calling module's directory; but if the "package" argument is specified, then they are relative to that package. To ensure os-independence, "filename" should use "/" characters to separate path segments, and may not be an absolute path (i.e., it may not begin with "/"). If "module_relative" is False, then the given file paths are interpreted as os-specific paths. These paths may be absolute or relative (to the current working directory). package A Python package or the name of a Python package whose directory should be used as the base directory for module relative paths. If "package" is not specified, then the calling module's directory is used as the base directory for module relative filenames. It is an error to specify "package" if "module_relative" is False. setUp A set-up function. This is called before running the tests in each file. The setUp function will be passed a DocTest object. The setUp function can access the test globals as the globs attribute of the test passed. tearDown A tear-down function. This is called after running the tests in each file. The tearDown function will be passed a DocTest object. The tearDown function can access the test globals as the globs attribute of the test passed. globs A dictionary containing initial global variables for the tests. optionflags A set of doctest option flags expressed as an integer. parser A DocTestParser (or subclass) that should be used to extract tests from the files. """ suite = unittest.TestSuite() # We do this here so that _normalize_module is called at the right # level. If it were called in DocFileTest, then this function # would be the caller and we might guess the package incorrectly. if kw.get('module_relative', True): kw['package'] = _normalize_module(kw.get('package')) for path in paths: suite.addTest(DocFileTest(path, **kw)) return suite
[ "def", "DocFileSuite", "(", "*", "paths", ",", "**", "kw", ")", ":", "suite", "=", "unittest", ".", "TestSuite", "(", ")", "if", "kw", ".", "get", "(", "'module_relative'", ",", "True", ")", ":", "kw", "[", "'package'", "]", "=", "_normalize_module", "(", "kw", ".", "get", "(", "'package'", ")", ")", "for", "path", "in", "paths", ":", "suite", ".", "addTest", "(", "DocFileTest", "(", "path", ",", "**", "kw", ")", ")", "return", "suite" ]
python
A unittest suite for one or more doctest files. The path to each doctest file is given as a string; the interpretation of that string depends on the keyword argument "module_relative". A number of options may be provided as keyword arguments: module_relative If "module_relative" is True, then the given file paths are interpreted as os-independent module-relative paths. By default, these paths are relative to the calling module's directory; but if the "package" argument is specified, then they are relative to that package. To ensure os-independence, "filename" should use "/" characters to separate path segments, and may not be an absolute path (i.e., it may not begin with "/"). If "module_relative" is False, then the given file paths are interpreted as os-specific paths. These paths may be absolute or relative (to the current working directory). package A Python package or the name of a Python package whose directory should be used as the base directory for module relative paths. If "package" is not specified, then the calling module's directory is used as the base directory for module relative filenames. It is an error to specify "package" if "module_relative" is False. setUp A set-up function. This is called before running the tests in each file. The setUp function will be passed a DocTest object. The setUp function can access the test globals as the globs attribute of the test passed. tearDown A tear-down function. This is called after running the tests in each file. The tearDown function will be passed a DocTest object. The tearDown function can access the test globals as the globs attribute of the test passed. globs A dictionary containing initial global variables for the tests. optionflags A set of doctest option flags expressed as an integer. parser A DocTestParser (or subclass) that should be used to extract tests from the files.
true
2,690,148
def testsource(module, name): """Extract the test sources from a doctest docstring as a script. Provide the module (or dotted name of the module) containing the test to be debugged and the name (within the module) of the object with the doc string with tests to be debugged. """ module = _normalize_module(module) tests = DocTestFinder().find(module) test = [t for t in tests if t.name == name] if not test: raise ValueError(name, "not found in tests") test = test[0] testsrc = script_from_examples(test.docstring) return testsrc
[ "def", "testsource", "(", "module", ",", "name", ")", ":", "module", "=", "_normalize_module", "(", "module", ")", "tests", "=", "DocTestFinder", "(", ")", ".", "find", "(", "module", ")", "test", "=", "[", "t", "for", "t", "in", "tests", "if", "t", ".", "name", "==", "name", "]", "if", "not", "test", ":", "raise", "ValueError", "(", "name", ",", "\"not found in tests\"", ")", "test", "=", "test", "[", "0", "]", "testsrc", "=", "script_from_examples", "(", "test", ".", "docstring", ")", "return", "testsrc" ]
python
Extract the test sources from a doctest docstring as a script. Provide the module (or dotted name of the module) containing the test to be debugged and the name (within the module) of the object with the doc string with tests to be debugged.
true
2,690,149
def debug_src(src, pm=False, globs=None): """Debug a single doctest docstring, in argument `src`'""" testsrc = script_from_examples(src) debug_script(testsrc, pm, globs)
[ "def", "debug_src", "(", "src", ",", "pm", "=", "False", ",", "globs", "=", "None", ")", ":", "testsrc", "=", "script_from_examples", "(", "src", ")", "debug_script", "(", "testsrc", ",", "pm", ",", "globs", ")" ]
python
Debug a single doctest docstring, in argument `src`
true
2,690,150
def debug_script(src, pm=False, globs=None): "Debug a test script. `src` is the script, as a string." import pdb # Note that tempfile.NameTemporaryFile() cannot be used. As the # docs say, a file so created cannot be opened by name a second time # on modern Windows boxes, and execfile() needs to open it. srcfilename = tempfile.mktemp(".py", "doctestdebug") f = open(srcfilename, 'w') f.write(src) f.close() try: if globs: globs = globs.copy() else: globs = {} if pm: try: execfile(srcfilename, globs, globs) except: print sys.exc_info()[1] pdb.post_mortem(sys.exc_info()[2]) else: # Note that %r is vital here. '%s' instead can, e.g., cause # backslashes to get treated as metacharacters on Windows. pdb.run("execfile(%r)" % srcfilename, globs, globs) finally: os.remove(srcfilename)
[ "def", "debug_script", "(", "src", ",", "pm", "=", "False", ",", "globs", "=", "None", ")", ":", "import", "pdb", "srcfilename", "=", "tempfile", ".", "mktemp", "(", "\".py\"", ",", "\"doctestdebug\"", ")", "f", "=", "open", "(", "srcfilename", ",", "'w'", ")", "f", ".", "write", "(", "src", ")", "f", ".", "close", "(", ")", "try", ":", "if", "globs", ":", "globs", "=", "globs", ".", "copy", "(", ")", "else", ":", "globs", "=", "{", "}", "if", "pm", ":", "try", ":", "execfile", "(", "srcfilename", ",", "globs", ",", "globs", ")", "except", ":", "print", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "pdb", ".", "post_mortem", "(", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "else", ":", "pdb", ".", "run", "(", "\"execfile(%r)\"", "%", "srcfilename", ",", "globs", ",", "globs", ")", "finally", ":", "os", ".", "remove", "(", "srcfilename", ")" ]
python
Debug a test script. `src` is the script, as a string.
true
2,690,272
def get_auth_settings(): """ Returns all the key/secret settings for Twitter access, only if they're all defined. """ from yacms.conf import settings try: auth_settings = (settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN_KEY, settings.TWITTER_ACCESS_TOKEN_SECRET) except AttributeError: return None else: return auth_settings if all(auth_settings) else None
[ "def", "get_auth_settings", "(", ")", ":", "from", "yacms", ".", "conf", "import", "settings", "try", ":", "auth_settings", "=", "(", "settings", ".", "TWITTER_CONSUMER_KEY", ",", "settings", ".", "TWITTER_CONSUMER_SECRET", ",", "settings", ".", "TWITTER_ACCESS_TOKEN_KEY", ",", "settings", ".", "TWITTER_ACCESS_TOKEN_SECRET", ")", "except", "AttributeError", ":", "return", "None", "else", ":", "return", "auth_settings", "if", "all", "(", "auth_settings", ")", "else", "None" ]
python
Returns all the key/secret settings for Twitter access, only if they're all defined.
true
2,691,116
def is_editable(obj, request): """ Returns ``True`` if the object is editable for the request. First check for a custom ``editable`` handler on the object, otherwise use the logged in user and check change permissions for the object's model. """ if hasattr(obj, "is_editable"): return obj.is_editable(request) else: codename = get_permission_codename("change", obj._meta) perm = "%s.%s" % (obj._meta.app_label, codename) return (request.user.is_authenticated() and has_site_permission(request.user) and request.user.has_perm(perm))
[ "def", "is_editable", "(", "obj", ",", "request", ")", ":", "if", "hasattr", "(", "obj", ",", "\"is_editable\"", ")", ":", "return", "obj", ".", "is_editable", "(", "request", ")", "else", ":", "codename", "=", "get_permission_codename", "(", "\"change\"", ",", "obj", ".", "_meta", ")", "perm", "=", "\"%s.%s\"", "%", "(", "obj", ".", "_meta", ".", "app_label", ",", "codename", ")", "return", "(", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "has_site_permission", "(", "request", ".", "user", ")", "and", "request", ".", "user", ".", "has_perm", "(", "perm", ")", ")" ]
python
Returns ``True`` if the object is editable for the request. First check for a custom ``editable`` handler on the object, otherwise use the logged in user and check change permissions for the object's model.
true
2,691,118
def is_spam(request, form, url): """ Main entry point for spam handling - called from the comment view and page processor for ``yacms.forms``, to check if posted content is spam. Spam filters are configured via the ``SPAM_FILTERS`` setting. """ for spam_filter_path in settings.SPAM_FILTERS: spam_filter = import_dotted_path(spam_filter_path) if spam_filter(request, form, url): return True
[ "def", "is_spam", "(", "request", ",", "form", ",", "url", ")", ":", "for", "spam_filter_path", "in", "settings", ".", "SPAM_FILTERS", ":", "spam_filter", "=", "import_dotted_path", "(", "spam_filter_path", ")", "if", "spam_filter", "(", "request", ",", "form", ",", "url", ")", ":", "return", "True" ]
python
Main entry point for spam handling - called from the comment view and page processor for ``yacms.forms``, to check if posted content is spam. Spam filters are configured via the ``SPAM_FILTERS`` setting.
true
2,691,121
def set_cookie(response, name, value, expiry_seconds=None, secure=False): """ Set cookie wrapper that allows number of seconds to be given as the expiry time, and ensures values are correctly encoded. """ if expiry_seconds is None: expiry_seconds = 90 * 24 * 60 * 60 # Default to 90 days. expires = datetime.strftime(datetime.utcnow() + timedelta(seconds=expiry_seconds), "%a, %d-%b-%Y %H:%M:%S GMT") # Django doesn't seem to support unicode cookie keys correctly on # Python 2. Work around by encoding it. See # https://code.djangoproject.com/ticket/19802 try: response.set_cookie(name, value, expires=expires, secure=secure) except (KeyError, TypeError): response.set_cookie(name.encode('utf-8'), value, expires=expires, secure=secure)
[ "def", "set_cookie", "(", "response", ",", "name", ",", "value", ",", "expiry_seconds", "=", "None", ",", "secure", "=", "False", ")", ":", "if", "expiry_seconds", "is", "None", ":", "expiry_seconds", "=", "90", "*", "24", "*", "60", "*", "60", "expires", "=", "datetime", ".", "strftime", "(", "datetime", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "expiry_seconds", ")", ",", "\"%a, %d-%b-%Y %H:%M:%S GMT\"", ")", "try", ":", "response", ".", "set_cookie", "(", "name", ",", "value", ",", "expires", "=", "expires", ",", "secure", "=", "secure", ")", "except", "(", "KeyError", ",", "TypeError", ")", ":", "response", ".", "set_cookie", "(", "name", ".", "encode", "(", "'utf-8'", ")", ",", "value", ",", "expires", "=", "expires", ",", "secure", "=", "secure", ")" ]
python
Set cookie wrapper that allows number of seconds to be given as the expiry time, and ensures values are correctly encoded.
true
2,691,481
def get_init(dirname): """Get __init__ file path for module directory Parameters ---------- dirname : str Find the __init__ file in directory `dirname` Returns ------- init_path : str Path to __init__ file """ fbase = os.path.join(dirname, "__init__") for ext in [".py", ".pyw"]: fname = fbase + ext if os.path.isfile(fname): return fname
[ "def", "get_init", "(", "dirname", ")", ":", "fbase", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "\"__init__\"", ")", "for", "ext", "in", "[", "\".py\"", ",", "\".pyw\"", "]", ":", "fname", "=", "fbase", "+", "ext", "if", "os", ".", "path", ".", "isfile", "(", "fname", ")", ":", "return", "fname" ]
python
Get __init__ file path for module directory Parameters ---------- dirname : str Find the __init__ file in directory `dirname` Returns ------- init_path : str Path to __init__ file
true
2,691,482
def find_mod(module_name): """Find module `module_name` on sys.path Return the path to module `module_name`. If `module_name` refers to a module directory then return path to __init__ file. Return full path of module or None if module is missing or does not have .py or .pyw extension. We are not interested in running bytecode. Parameters ---------- module_name : str Returns ------- modulepath : str Path to module `module_name`. """ parts = module_name.split(".") basepath = find_module(parts[0]) for submodname in parts[1:]: basepath = find_module(submodname, [basepath]) if basepath and os.path.isdir(basepath): basepath = get_init(basepath) return basepath
[ "def", "find_mod", "(", "module_name", ")", ":", "parts", "=", "module_name", ".", "split", "(", "\".\"", ")", "basepath", "=", "find_module", "(", "parts", "[", "0", "]", ")", "for", "submodname", "in", "parts", "[", "1", ":", "]", ":", "basepath", "=", "find_module", "(", "submodname", ",", "[", "basepath", "]", ")", "if", "basepath", "and", "os", ".", "path", ".", "isdir", "(", "basepath", ")", ":", "basepath", "=", "get_init", "(", "basepath", ")", "return", "basepath" ]
python
Find module `module_name` on sys.path Return the path to module `module_name`. If `module_name` refers to a module directory then return path to __init__ file. Return full path of module or None if module is missing or does not have .py or .pyw extension. We are not interested in running bytecode. Parameters ---------- module_name : str Returns ------- modulepath : str Path to module `module_name`.
true
2,691,524
def set_default_interface(etree): """ Sets the default interface that PyAMF will use to deal with XML entities (both objects and blobs). """ global types, ET, modules t = _get_etree_type(etree) _types = set(types or []) _types.update([t]) types = tuple(_types) modules[t] = etree old, ET = ET, etree return old
[ "def", "set_default_interface", "(", "etree", ")", ":", "global", "types", ",", "ET", ",", "modules", "t", "=", "_get_etree_type", "(", "etree", ")", "_types", "=", "set", "(", "types", "or", "[", "]", ")", "_types", ".", "update", "(", "[", "t", "]", ")", "types", "=", "tuple", "(", "_types", ")", "modules", "[", "t", "]", "=", "etree", "old", ",", "ET", "=", "ET", ",", "etree", "return", "old" ]
python
Sets the default interface that PyAMF will use to deal with XML entities (both objects and blobs).
true
2,691,525
def find_libs(): """ Run through L{ETREE_MODULES} and find C{ElementTree} implementations so that any type can be encoded. We work through the C implementations first, then the pure Python versions. The downside to this is that B{all} libraries will be imported but I{only} one is ever used. The libs are small (relatively) and the flexibility that this gives seems to outweigh the cost. Time will tell. """ from pyamf.util import get_module types = [] mapping = {} for mod in ETREE_MODULES: try: etree = get_module(mod) except ImportError: continue t = _get_etree_type(etree) types.append(t) mapping[t] = etree return tuple(types), mapping
[ "def", "find_libs", "(", ")", ":", "from", "pyamf", ".", "util", "import", "get_module", "types", "=", "[", "]", "mapping", "=", "{", "}", "for", "mod", "in", "ETREE_MODULES", ":", "try", ":", "etree", "=", "get_module", "(", "mod", ")", "except", "ImportError", ":", "continue", "t", "=", "_get_etree_type", "(", "etree", ")", "types", ".", "append", "(", "t", ")", "mapping", "[", "t", "]", "=", "etree", "return", "tuple", "(", "types", ")", ",", "mapping" ]
python
Run through L{ETREE_MODULES} and find C{ElementTree} implementations so that any type can be encoded. We work through the C implementations first, then the pure Python versions. The downside to this is that B{all} libraries will be imported but I{only} one is ever used. The libs are small (relatively) and the flexibility that this gives seems to outweigh the cost. Time will tell.
true
2,691,702
def unregister_class(alias): """ Opposite of L{register_class}. @raise UnknownClassAlias: Unknown alias. """ try: x = CLASS_CACHE[alias] except KeyError: raise UnknownClassAlias('Unknown alias %r' % (alias,)) if not x.anonymous: del CLASS_CACHE[x.alias] del CLASS_CACHE[x.klass] return x
[ "def", "unregister_class", "(", "alias", ")", ":", "try", ":", "x", "=", "CLASS_CACHE", "[", "alias", "]", "except", "KeyError", ":", "raise", "UnknownClassAlias", "(", "'Unknown alias %r'", "%", "(", "alias", ",", ")", ")", "if", "not", "x", ".", "anonymous", ":", "del", "CLASS_CACHE", "[", "x", ".", "alias", "]", "del", "CLASS_CACHE", "[", "x", ".", "klass", "]", "return", "x" ]
python
Opposite of L{register_class}. @raise UnknownClassAlias: Unknown alias.
true
2,691,703
def get_class_alias(klass_or_alias): """ Finds the L{ClassAlias} that is registered to C{klass_or_alias}. If a string is supplied and no related L{ClassAlias} is found, the alias is loaded via L{load_class}. @raise UnknownClassAlias: Unknown alias """ if isinstance(klass_or_alias, python.str_types): try: return CLASS_CACHE[klass_or_alias] except KeyError: return load_class(klass_or_alias) try: return CLASS_CACHE[klass_or_alias] except KeyError: raise UnknownClassAlias('Unknown alias for %r' % (klass_or_alias,))
[ "def", "get_class_alias", "(", "klass_or_alias", ")", ":", "if", "isinstance", "(", "klass_or_alias", ",", "python", ".", "str_types", ")", ":", "try", ":", "return", "CLASS_CACHE", "[", "klass_or_alias", "]", "except", "KeyError", ":", "return", "load_class", "(", "klass_or_alias", ")", "try", ":", "return", "CLASS_CACHE", "[", "klass_or_alias", "]", "except", "KeyError", ":", "raise", "UnknownClassAlias", "(", "'Unknown alias for %r'", "%", "(", "klass_or_alias", ",", ")", ")" ]
python
Finds the L{ClassAlias} that is registered to C{klass_or_alias}. If a string is supplied and no related L{ClassAlias} is found, the alias is loaded via L{load_class}. @raise UnknownClassAlias: Unknown alias
true
2,691,707
def get_decoder(encoding, *args, **kwargs): """ Returns a L{codec.Decoder} capable of decoding AMF[C{encoding}] streams. @raise ValueError: Unknown C{encoding}. """ def _get_decoder_class(): if encoding == AMF0: try: from cpyamf import amf0 except ImportError: from pyamf import amf0 return amf0.Decoder elif encoding == AMF3: try: from cpyamf import amf3 except ImportError: from pyamf import amf3 return amf3.Decoder raise ValueError("Unknown encoding %r" % (encoding,)) return _get_decoder_class()(*args, **kwargs)
[ "def", "get_decoder", "(", "encoding", ",", "*", "args", ",", "**", "kwargs", ")", ":", "def", "_get_decoder_class", "(", ")", ":", "if", "encoding", "==", "AMF0", ":", "try", ":", "from", "cpyamf", "import", "amf0", "except", "ImportError", ":", "from", "pyamf", "import", "amf0", "return", "amf0", ".", "Decoder", "elif", "encoding", "==", "AMF3", ":", "try", ":", "from", "cpyamf", "import", "amf3", "except", "ImportError", ":", "from", "pyamf", "import", "amf3", "return", "amf3", ".", "Decoder", "raise", "ValueError", "(", "\"Unknown encoding %r\"", "%", "(", "encoding", ",", ")", ")", "return", "_get_decoder_class", "(", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
python
Returns a L{codec.Decoder} capable of decoding AMF[C{encoding}] streams. @raise ValueError: Unknown C{encoding}.
true
2,691,708
def get_encoder(encoding, *args, **kwargs): """ Returns a L{codec.Encoder} capable of encoding AMF[C{encoding}] streams. @raise ValueError: Unknown C{encoding}. """ def _get_encoder_class(): if encoding == AMF0: try: from cpyamf import amf0 except ImportError: from pyamf import amf0 return amf0.Encoder elif encoding == AMF3: try: from cpyamf import amf3 except ImportError: from pyamf import amf3 return amf3.Encoder raise ValueError("Unknown encoding %r" % (encoding,)) return _get_encoder_class()(*args, **kwargs)
[ "def", "get_encoder", "(", "encoding", ",", "*", "args", ",", "**", "kwargs", ")", ":", "def", "_get_encoder_class", "(", ")", ":", "if", "encoding", "==", "AMF0", ":", "try", ":", "from", "cpyamf", "import", "amf0", "except", "ImportError", ":", "from", "pyamf", "import", "amf0", "return", "amf0", ".", "Encoder", "elif", "encoding", "==", "AMF3", ":", "try", ":", "from", "cpyamf", "import", "amf3", "except", "ImportError", ":", "from", "pyamf", "import", "amf3", "return", "amf3", ".", "Encoder", "raise", "ValueError", "(", "\"Unknown encoding %r\"", "%", "(", "encoding", ",", ")", ")", "return", "_get_encoder_class", "(", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
python
Returns a L{codec.Encoder} capable of encoding AMF[C{encoding}] streams. @raise ValueError: Unknown C{encoding}.
true
2,691,709
def flex_loader(alias): """ Loader for L{Flex<pyamf.flex>} framework compatibility classes. @raise UnknownClassAlias: Trying to load an unknown Flex compatibility class. """ if not alias.startswith('flex.'): return try: if alias.startswith('flex.messaging.messages'): import pyamf.flex.messaging elif alias.startswith('flex.messaging.io'): import pyamf.flex elif alias.startswith('flex.data.messages'): import pyamf.flex.data return CLASS_CACHE[alias] except KeyError: raise UnknownClassAlias(alias)
[ "def", "flex_loader", "(", "alias", ")", ":", "if", "not", "alias", ".", "startswith", "(", "'flex.'", ")", ":", "return", "try", ":", "if", "alias", ".", "startswith", "(", "'flex.messaging.messages'", ")", ":", "import", "pyamf", ".", "flex", ".", "messaging", "elif", "alias", ".", "startswith", "(", "'flex.messaging.io'", ")", ":", "import", "pyamf", ".", "flex", "elif", "alias", ".", "startswith", "(", "'flex.data.messages'", ")", ":", "import", "pyamf", ".", "flex", ".", "data", "return", "CLASS_CACHE", "[", "alias", "]", "except", "KeyError", ":", "raise", "UnknownClassAlias", "(", "alias", ")" ]
python
Loader for L{Flex<pyamf.flex>} framework compatibility classes. @raise UnknownClassAlias: Trying to load an unknown Flex compatibility class.
true
2,691,710
def add_type(type_, func=None): """ Adds a custom type to L{TYPE_MAP}. A custom type allows fine grain control of what to encode to an AMF data stream. @raise TypeError: Unable to add as a custom type (expected a class or callable). @raise KeyError: Type already exists. @see: L{get_type} and L{remove_type} """ def _check_type(type_): if not (isinstance(type_, python.class_types) or hasattr(type_, '__call__')): raise TypeError(r'Unable to add '%r' as a custom type (expected a ' 'class or callable)' % (type_,)) if isinstance(type_, list): type_ = tuple(type_) if type_ in TYPE_MAP: raise KeyError('Type %r already exists' % (type_,)) if isinstance(type_, types.TupleType): for x in type_: _check_type(x) else: _check_type(type_) TYPE_MAP[type_] = func
[ "def", "add_type", "(", "type_", ",", "func", "=", "None", ")", ":", "def", "_check_type", "(", "type_", ")", ":", "if", "not", "(", "isinstance", "(", "type_", ",", "python", ".", "class_types", ")", "or", "hasattr", "(", "type_", ",", "'__call__'", ")", ")", ":", "raise", "TypeError", "(", "r'Unable to add '", "%", "r' as a custom type (expected a '", "'class or callable)'", "%", "(", "type_", ",", ")", ")", "if", "isinstance", "(", "type_", ",", "list", ")", ":", "type_", "=", "tuple", "(", "type_", ")", "if", "type_", "in", "TYPE_MAP", ":", "raise", "KeyError", "(", "'Type %r already exists'", "%", "(", "type_", ",", ")", ")", "if", "isinstance", "(", "type_", ",", "types", ".", "TupleType", ")", ":", "for", "x", "in", "type_", ":", "_check_type", "(", "x", ")", "else", ":", "_check_type", "(", "type_", ")", "TYPE_MAP", "[", "type_", "]", "=", "func" ]
python
Adds a custom type to L{TYPE_MAP}. A custom type allows fine grain control of what to encode to an AMF data stream. @raise TypeError: Unable to add as a custom type (expected a class or callable). @raise KeyError: Type already exists. @see: L{get_type} and L{remove_type}
true
2,691,712
def add_error_class(klass, code): """ Maps an exception class to a string code. Used to map remoting C{onStatus} objects to an exception class so that an exception can be built to represent that error. An example:: >>> class AuthenticationError(Exception): ... pass ... >>> pyamf.add_error_class(AuthenticationError, 'Auth.Failed') >>> print pyamf.ERROR_CLASS_MAP {'TypeError': <type 'exceptions.TypeError'>, 'IndexError': <type 'exceptions.IndexError'>, 'Auth.Failed': <class '__main__.AuthenticationError'>, 'KeyError': <type 'exceptions.KeyError'>, 'NameError': <type 'exceptions.NameError'>, 'LookupError': <type 'exceptions.LookupError'>} @param klass: Exception class @param code: Exception code @type code: C{str} @see: L{remove_error_class} """ if not isinstance(code, python.str_types): code = code.decode('utf-8') if not isinstance(klass, python.class_types): raise TypeError("klass must be a class type") mro = inspect.getmro(klass) if not Exception in mro: raise TypeError( 'Error classes must subclass the __builtin__.Exception class') if code in ERROR_CLASS_MAP: raise ValueError('Code %s is already registered' % (code,)) ERROR_CLASS_MAP[code] = klass
[ "def", "add_error_class", "(", "klass", ",", "code", ")", ":", "if", "not", "isinstance", "(", "code", ",", "python", ".", "str_types", ")", ":", "code", "=", "code", ".", "decode", "(", "'utf-8'", ")", "if", "not", "isinstance", "(", "klass", ",", "python", ".", "class_types", ")", ":", "raise", "TypeError", "(", "\"klass must be a class type\"", ")", "mro", "=", "inspect", ".", "getmro", "(", "klass", ")", "if", "not", "Exception", "in", "mro", ":", "raise", "TypeError", "(", "'Error classes must subclass the __builtin__.Exception class'", ")", "if", "code", "in", "ERROR_CLASS_MAP", ":", "raise", "ValueError", "(", "'Code %s is already registered'", "%", "(", "code", ",", ")", ")", "ERROR_CLASS_MAP", "[", "code", "]", "=", "klass" ]
python
Maps an exception class to a string code. Used to map remoting C{onStatus} objects to an exception class so that an exception can be built to represent that error. An example:: >>> class AuthenticationError(Exception): ... pass ... >>> pyamf.add_error_class(AuthenticationError, 'Auth.Failed') >>> print pyamf.ERROR_CLASS_MAP {'TypeError': <type 'exceptions.TypeError'>, 'IndexError': <type 'exceptions.IndexError'>, 'Auth.Failed': <class '__main__.AuthenticationError'>, 'KeyError': <type 'exceptions.KeyError'>, 'NameError': <type 'exceptions.NameError'>, 'LookupError': <type 'exceptions.LookupError'>} @param klass: Exception class @param code: Exception code @type code: C{str} @see: L{remove_error_class}
true
2,691,713
def remove_error_class(klass): """ Removes a class from the L{ERROR_CLASS_MAP}. An example:: >>> class AuthenticationError(Exception): ... pass ... >>> pyamf.add_error_class(AuthenticationError, 'Auth.Failed') >>> pyamf.remove_error_class(AuthenticationError) @see: L{add_error_class} """ if isinstance(klass, python.str_types): if klass not in ERROR_CLASS_MAP: raise ValueError('Code %s is not registered' % (klass,)) elif isinstance(klass, python.class_types): classes = ERROR_CLASS_MAP.values() if klass not in classes: raise ValueError('Class %s is not registered' % (klass,)) klass = ERROR_CLASS_MAP.keys()[classes.index(klass)] else: raise TypeError("Invalid type, expected class or string") del ERROR_CLASS_MAP[klass]
[ "def", "remove_error_class", "(", "klass", ")", ":", "if", "isinstance", "(", "klass", ",", "python", ".", "str_types", ")", ":", "if", "klass", "not", "in", "ERROR_CLASS_MAP", ":", "raise", "ValueError", "(", "'Code %s is not registered'", "%", "(", "klass", ",", ")", ")", "elif", "isinstance", "(", "klass", ",", "python", ".", "class_types", ")", ":", "classes", "=", "ERROR_CLASS_MAP", ".", "values", "(", ")", "if", "klass", "not", "in", "classes", ":", "raise", "ValueError", "(", "'Class %s is not registered'", "%", "(", "klass", ",", ")", ")", "klass", "=", "ERROR_CLASS_MAP", ".", "keys", "(", ")", "[", "classes", ".", "index", "(", "klass", ")", "]", "else", ":", "raise", "TypeError", "(", "\"Invalid type, expected class or string\"", ")", "del", "ERROR_CLASS_MAP", "[", "klass", "]" ]
python
Removes a class from the L{ERROR_CLASS_MAP}. An example:: >>> class AuthenticationError(Exception): ... pass ... >>> pyamf.add_error_class(AuthenticationError, 'Auth.Failed') >>> pyamf.remove_error_class(AuthenticationError) @see: L{add_error_class}
true
2,692,115
def expand_user(path): """Expand '~'-style usernames in strings. This is similar to :func:`os.path.expanduser`, but it computes and returns extra information that will be useful if the input was being used in computing completions, and you wish to return the completions with the original '~' instead of its expanded value. Parameters ---------- path : str String to be expanded. If no ~ is present, the output is the same as the input. Returns ------- newpath : str Result of ~ expansion in the input path. tilde_expand : bool Whether any expansion was performed or not. tilde_val : str The value that ~ was replaced with. """ # Default values tilde_expand = False tilde_val = '' newpath = path if path.startswith('~'): tilde_expand = True rest = len(path)-1 newpath = os.path.expanduser(path) if rest: tilde_val = newpath[:-rest] else: tilde_val = newpath return newpath, tilde_expand, tilde_val
[ "def", "expand_user", "(", "path", ")", ":", "tilde_expand", "=", "False", "tilde_val", "=", "''", "newpath", "=", "path", "if", "path", ".", "startswith", "(", "'~'", ")", ":", "tilde_expand", "=", "True", "rest", "=", "len", "(", "path", ")", "-", "1", "newpath", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if", "rest", ":", "tilde_val", "=", "newpath", "[", ":", "-", "rest", "]", "else", ":", "tilde_val", "=", "newpath", "return", "newpath", ",", "tilde_expand", ",", "tilde_val" ]
python
Expand '~'-style usernames in strings. This is similar to :func:`os.path.expanduser`, but it computes and returns extra information that will be useful if the input was being used in computing completions, and you wish to return the completions with the original '~' instead of its expanded value. Parameters ---------- path : str String to be expanded. If no ~ is present, the output is the same as the input. Returns ------- newpath : str Result of ~ expansion in the input path. tilde_expand : bool Whether any expansion was performed or not. tilde_val : str The value that ~ was replaced with.
true
2,692,784
def mysql_timestamp_converter(s): """Convert a MySQL TIMESTAMP to a Timestamp object.""" # MySQL>4.1 returns TIMESTAMP in the same format as DATETIME if s[4] == '-': return DateTime_or_None(s) s = s + "0"*(14-len(s)) # padding parts = map(int, filter(None, (s[:4],s[4:6],s[6:8], s[8:10],s[10:12],s[12:14]))) try: return Timestamp(*parts) except (SystemExit, KeyboardInterrupt): raise except: return None
[ "def", "mysql_timestamp_converter", "(", "s", ")", ":", "if", "s", "[", "4", "]", "==", "'-'", ":", "return", "DateTime_or_None", "(", "s", ")", "s", "=", "s", "+", "\"0\"", "*", "(", "14", "-", "len", "(", "s", ")", ")", "parts", "=", "map", "(", "int", ",", "filter", "(", "None", ",", "(", "s", "[", ":", "4", "]", ",", "s", "[", "4", ":", "6", "]", ",", "s", "[", "6", ":", "8", "]", ",", "s", "[", "8", ":", "10", "]", ",", "s", "[", "10", ":", "12", "]", ",", "s", "[", "12", ":", "14", "]", ")", ")", ")", "try", ":", "return", "Timestamp", "(", "*", "parts", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "return", "None" ]
python
Convert a MySQL TIMESTAMP to a Timestamp object.
true
2,692,970
def _check_for_int(x): """ This is a compatibility function that takes a C{float} and converts it to an C{int} if the values are equal. """ try: y = int(x) except (OverflowError, ValueError): pass else: # There is no way in AMF0 to distinguish between integers and floats if x == x and y == x: return y return x
[ "def", "_check_for_int", "(", "x", ")", ":", "try", ":", "y", "=", "int", "(", "x", ")", "except", "(", "OverflowError", ",", "ValueError", ")", ":", "pass", "else", ":", "if", "x", "==", "x", "and", "y", "==", "x", ":", "return", "y", "return", "x" ]
python
This is a compatibility function that takes a C{float} and converts it to an C{int} if the values are equal.
true
2,693,106
def cache_set(key, value, timeout=None, refreshed=False): """ Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur. """ if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), packed, real_timeout)
[ "def", "cache_set", "(", "key", ",", "value", ",", "timeout", "=", "None", ",", "refreshed", "=", "False", ")", ":", "if", "timeout", "is", "None", ":", "timeout", "=", "settings", ".", "CACHE_MIDDLEWARE_SECONDS", "refresh_time", "=", "timeout", "+", "time", "(", ")", "real_timeout", "=", "timeout", "+", "settings", ".", "CACHE_SET_DELAY_SECONDS", "packed", "=", "(", "value", ",", "refresh_time", ",", "refreshed", ")", "return", "cache", ".", "set", "(", "_hashed_key", "(", "key", ")", ",", "packed", ",", "real_timeout", ")" ]
python
Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur.
true
2,693,107
def cache_get(key): """ Wrapper for ``cache.get``. The expiry time for the cache entry is stored with the entry. If the expiry time has past, put the stale entry back into cache, and don't return it to trigger a fake cache miss. """ packed = cache.get(_hashed_key(key)) if packed is None: return None value, refresh_time, refreshed = packed if (time() > refresh_time) and not refreshed: cache_set(key, value, settings.CACHE_SET_DELAY_SECONDS, True) return None return value
[ "def", "cache_get", "(", "key", ")", ":", "packed", "=", "cache", ".", "get", "(", "_hashed_key", "(", "key", ")", ")", "if", "packed", "is", "None", ":", "return", "None", "value", ",", "refresh_time", ",", "refreshed", "=", "packed", "if", "(", "time", "(", ")", ">", "refresh_time", ")", "and", "not", "refreshed", ":", "cache_set", "(", "key", ",", "value", ",", "settings", ".", "CACHE_SET_DELAY_SECONDS", ",", "True", ")", "return", "None", "return", "value" ]
python
Wrapper for ``cache.get``. The expiry time for the cache entry is stored with the entry. If the expiry time has past, put the stale entry back into cache, and don't return it to trigger a fake cache miss.
true
2,693,109
def cache_key_prefix(request): """ Cache key for yacms's cache middleware. Adds the current device and site ID. """ cache_key = "%s.%s.%s." % ( settings.CACHE_MIDDLEWARE_KEY_PREFIX, current_site_id(), device_from_request(request) or "default", ) return _i18n_cache_key_suffix(request, cache_key)
[ "def", "cache_key_prefix", "(", "request", ")", ":", "cache_key", "=", "\"%s.%s.%s.\"", "%", "(", "settings", ".", "CACHE_MIDDLEWARE_KEY_PREFIX", ",", "current_site_id", "(", ")", ",", "device_from_request", "(", "request", ")", "or", "\"default\"", ",", ")", "return", "_i18n_cache_key_suffix", "(", "request", ",", "cache_key", ")" ]
python
Cache key for yacms's cache middleware. Adds the current device and site ID.
true
2,693,512
def absdir(path): """Return absolute, normalized path to directory, if it exists; None otherwise. """ if not os.path.isabs(path): path = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), path))) if path is None or not os.path.isdir(path): return None return path
[ "def", "absdir", "(", "path", ")", ":", "if", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "path", ")", ")", ")", "if", "path", "is", "None", "or", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "None", "return", "path" ]
python
Return absolute, normalized path to directory, if it exists; None otherwise.
true
2,693,514
def file_like(name): """A name is file-like if it is a path that exists, or it has a directory part, or it ends in .py, or it isn't a legal python identifier. """ return (os.path.exists(name) or os.path.dirname(name) or name.endswith('.py') or not ident_re.match(os.path.splitext(name)[0]))
[ "def", "file_like", "(", "name", ")", ":", "return", "(", "os", ".", "path", ".", "exists", "(", "name", ")", "or", "os", ".", "path", ".", "dirname", "(", "name", ")", "or", "name", ".", "endswith", "(", "'.py'", ")", "or", "not", "ident_re", ".", "match", "(", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "0", "]", ")", ")" ]
python
A name is file-like if it is a path that exists, or it has a directory part, or it ends in .py, or it isn't a legal python identifier.
true
2,693,517
def getfilename(package, relativeTo=None): """Find the python source file for a package, relative to a particular directory (defaults to current working directory if not given). """ if relativeTo is None: relativeTo = os.getcwd() path = os.path.join(relativeTo, os.sep.join(package.split('.'))) suffixes = ('/__init__.py', '.py') for suffix in suffixes: filename = path + suffix if os.path.exists(filename): return filename return None
[ "def", "getfilename", "(", "package", ",", "relativeTo", "=", "None", ")", ":", "if", "relativeTo", "is", "None", ":", "relativeTo", "=", "os", ".", "getcwd", "(", ")", "path", "=", "os", ".", "path", ".", "join", "(", "relativeTo", ",", "os", ".", "sep", ".", "join", "(", "package", ".", "split", "(", "'.'", ")", ")", ")", "suffixes", "=", "(", "'/__init__.py'", ",", "'.py'", ")", "for", "suffix", "in", "suffixes", ":", "filename", "=", "path", "+", "suffix", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "return", "filename", "return", "None" ]
python
Find the python source file for a package, relative to a particular directory (defaults to current working directory if not given).
true
2,693,518
def getpackage(filename): """ Find the full dotted package name for a given python source file name. Returns None if the file is not a python source file. >>> getpackage('foo.py') 'foo' >>> getpackage('biff/baf.py') 'baf' >>> getpackage('nose/util.py') 'nose.util' Works for directories too. >>> getpackage('nose') 'nose' >>> getpackage('nose/plugins') 'nose.plugins' And __init__ files stuck onto directories >>> getpackage('nose/plugins/__init__.py') 'nose.plugins' Absolute paths also work. >>> path = os.path.abspath(os.path.join('nose', 'plugins')) >>> getpackage(path) 'nose.plugins' """ src_file = src(filename) if not src_file.endswith('.py') and not ispackage(src_file): return None base, ext = os.path.splitext(os.path.basename(src_file)) if base == '__init__': mod_parts = [] else: mod_parts = [base] path, part = os.path.split(os.path.split(src_file)[0]) while part: if ispackage(os.path.join(path, part)): mod_parts.append(part) else: break path, part = os.path.split(path) mod_parts.reverse() return '.'.join(mod_parts)
[ "def", "getpackage", "(", "filename", ")", ":", "src_file", "=", "src", "(", "filename", ")", "if", "not", "src_file", ".", "endswith", "(", "'.py'", ")", "and", "not", "ispackage", "(", "src_file", ")", ":", "return", "None", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "src_file", ")", ")", "if", "base", "==", "'__init__'", ":", "mod_parts", "=", "[", "]", "else", ":", "mod_parts", "=", "[", "base", "]", "path", ",", "part", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "split", "(", "src_file", ")", "[", "0", "]", ")", "while", "part", ":", "if", "ispackage", "(", "os", ".", "path", ".", "join", "(", "path", ",", "part", ")", ")", ":", "mod_parts", ".", "append", "(", "part", ")", "else", ":", "break", "path", ",", "part", "=", "os", ".", "path", ".", "split", "(", "path", ")", "mod_parts", ".", "reverse", "(", ")", "return", "'.'", ".", "join", "(", "mod_parts", ")" ]
python
Find the full dotted package name for a given python source file name. Returns None if the file is not a python source file. >>> getpackage('foo.py') 'foo' >>> getpackage('biff/baf.py') 'baf' >>> getpackage('nose/util.py') 'nose.util' Works for directories too. >>> getpackage('nose') 'nose' >>> getpackage('nose/plugins') 'nose.plugins' And __init__ files stuck onto directories >>> getpackage('nose/plugins/__init__.py') 'nose.plugins' Absolute paths also work. >>> path = os.path.abspath(os.path.join('nose', 'plugins')) >>> getpackage(path) 'nose.plugins'
true
2,693,519
def ln(label): """Draw a 70-char-wide divider, with label in the middle. >>> ln('hello there') '---------------------------- hello there -----------------------------' """ label_len = len(label) + 2 chunk = (70 - label_len) // 2 out = '%s %s %s' % ('-' * chunk, label, '-' * chunk) pad = 70 - len(out) if pad > 0: out = out + ('-' * pad) return out
[ "def", "ln", "(", "label", ")", ":", "label_len", "=", "len", "(", "label", ")", "+", "2", "chunk", "=", "(", "70", "-", "label_len", ")", "//", "2", "out", "=", "'%s %s %s'", "%", "(", "'-'", "*", "chunk", ",", "label", ",", "'-'", "*", "chunk", ")", "pad", "=", "70", "-", "len", "(", "out", ")", "if", "pad", ">", "0", ":", "out", "=", "out", "+", "(", "'-'", "*", "pad", ")", "return", "out" ]
python
Draw a 70-char-wide divider, with label in the middle. >>> ln('hello there') '---------------------------- hello there -----------------------------'
true
2,693,523
def src(filename): """Find the python source file for a .pyc, .pyo or $py.class file on jython. Returns the filename provided if it is not a python source file. """ if filename is None: return filename if sys.platform.startswith('java') and filename.endswith('$py.class'): return '.'.join((filename[:-9], 'py')) base, ext = os.path.splitext(filename) if ext in ('.pyc', '.pyo', '.py'): return '.'.join((base, 'py')) return filename
[ "def", "src", "(", "filename", ")", ":", "if", "filename", "is", "None", ":", "return", "filename", "if", "sys", ".", "platform", ".", "startswith", "(", "'java'", ")", "and", "filename", ".", "endswith", "(", "'$py.class'", ")", ":", "return", "'.'", ".", "join", "(", "(", "filename", "[", ":", "-", "9", "]", ",", "'py'", ")", ")", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "ext", "in", "(", "'.pyc'", ",", "'.pyo'", ",", "'.py'", ")", ":", "return", "'.'", ".", "join", "(", "(", "base", ",", "'py'", ")", ")", "return", "filename" ]
python
Find the python source file for a .pyc, .pyo or $py.class file on jython. Returns the filename provided if it is not a python source file.
true
2,693,524
def regex_last_key(regex): """Sort key function factory that puts items that match a regular expression last. >>> from nose.config import Config >>> from nose.pyversion import sort_list >>> c = Config() >>> regex = c.testMatch >>> entries = ['.', '..', 'a_test', 'src', 'lib', 'test', 'foo.py'] >>> sort_list(entries, regex_last_key(regex)) >>> entries ['.', '..', 'foo.py', 'lib', 'src', 'a_test', 'test'] """ def k(obj): if regex.search(obj): return (1, obj) return (0, obj) return k
[ "def", "regex_last_key", "(", "regex", ")", ":", "def", "k", "(", "obj", ")", ":", "if", "regex", ".", "search", "(", "obj", ")", ":", "return", "(", "1", ",", "obj", ")", "return", "(", "0", ",", "obj", ")", "return", "k" ]
python
Sort key function factory that puts items that match a regular expression last. >>> from nose.config import Config >>> from nose.pyversion import sort_list >>> c = Config() >>> regex = c.testMatch >>> entries = ['.', '..', 'a_test', 'src', 'lib', 'test', 'foo.py'] >>> sort_list(entries, regex_last_key(regex)) >>> entries ['.', '..', 'foo.py', 'lib', 'src', 'a_test', 'test']
true
2,693,525
def tolist(val): """Convert a value that may be a list or a (possibly comma-separated) string into a list. The exception: None is returned as None, not [None]. >>> tolist(["one", "two"]) ['one', 'two'] >>> tolist("hello") ['hello'] >>> tolist("separate,values, with, commas, spaces , are ,ok") ['separate', 'values', 'with', 'commas', 'spaces', 'are', 'ok'] """ if val is None: return None try: # might already be a list val.extend([]) return val except AttributeError: pass # might be a string try: return re.split(r'\s*,\s*', val) except TypeError: # who knows... return list(val)
[ "def", "tolist", "(", "val", ")", ":", "if", "val", "is", "None", ":", "return", "None", "try", ":", "val", ".", "extend", "(", "[", "]", ")", "return", "val", "except", "AttributeError", ":", "pass", "try", ":", "return", "re", ".", "split", "(", "r'\\s*,\\s*'", ",", "val", ")", "except", "TypeError", ":", "return", "list", "(", "val", ")" ]
python
Convert a value that may be a list or a (possibly comma-separated) string into a list. The exception: None is returned as None, not [None]. >>> tolist(["one", "two"]) ['one', 'two'] >>> tolist("hello") ['hello'] >>> tolist("separate,values, with, commas, spaces , are ,ok") ['separate', 'values', 'with', 'commas', 'spaces', 'are', 'ok']
true
2,693,527
def transplant_class(cls, module): """ Make a class appear to reside in `module`, rather than the module in which it is actually defined. >>> from nose.failure import Failure >>> Failure.__module__ 'nose.failure' >>> Nf = transplant_class(Failure, __name__) >>> Nf.__module__ 'nose.util' >>> Nf.__name__ 'Failure' """ class C(cls): pass C.__module__ = module C.__name__ = cls.__name__ return C
[ "def", "transplant_class", "(", "cls", ",", "module", ")", ":", "class", "C", "(", "cls", ")", ":", "pass", "C", ".", "__module__", "=", "module", "C", ".", "__name__", "=", "cls", ".", "__name__", "return", "C" ]
python
Make a class appear to reside in `module`, rather than the module in which it is actually defined. >>> from nose.failure import Failure >>> Failure.__module__ 'nose.failure' >>> Nf = transplant_class(Failure, __name__) >>> Nf.__module__ 'nose.util' >>> Nf.__name__ 'Failure'
true
2,693,793
def isfile_strict(path): """Same as os.path.isfile() but does not swallow EACCES / EPERM exceptions, see: http://mail.python.org/pipermail/python-dev/2012-June/120787.html """ try: st = os.stat(path) except OSError: err = sys.exc_info()[1] if err.errno in (errno.EPERM, errno.EACCES): raise return False else: return stat.S_ISREG(st.st_mode)
[ "def", "isfile_strict", "(", "path", ")", ":", "try", ":", "st", "=", "os", ".", "stat", "(", "path", ")", "except", "OSError", ":", "err", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "if", "err", ".", "errno", "in", "(", "errno", ".", "EPERM", ",", "errno", ".", "EACCES", ")", ":", "raise", "return", "False", "else", ":", "return", "stat", ".", "S_ISREG", "(", "st", ".", "st_mode", ")" ]
python
Same as os.path.isfile() but does not swallow EACCES / EPERM exceptions, see: http://mail.python.org/pipermail/python-dev/2012-June/120787.html
true
2,693,999
def signup_verify(request, uidb36=None, token=None): """ View for the link in the verification email sent to a new user when they create an account and ``ACCOUNTS_VERIFICATION_REQUIRED`` is set to ``True``. Activates the user and logs them in, redirecting to the URL they tried to access when signing up. """ user = authenticate(uidb36=uidb36, token=token, is_active=False) if user is not None: user.is_active = True user.save() auth_login(request, user) info(request, _("Successfully signed up")) return login_redirect(request) else: error(request, _("The link you clicked is no longer valid.")) return redirect("/")
[ "def", "signup_verify", "(", "request", ",", "uidb36", "=", "None", ",", "token", "=", "None", ")", ":", "user", "=", "authenticate", "(", "uidb36", "=", "uidb36", ",", "token", "=", "token", ",", "is_active", "=", "False", ")", "if", "user", "is", "not", "None", ":", "user", ".", "is_active", "=", "True", "user", ".", "save", "(", ")", "auth_login", "(", "request", ",", "user", ")", "info", "(", "request", ",", "_", "(", "\"Successfully signed up\"", ")", ")", "return", "login_redirect", "(", "request", ")", "else", ":", "error", "(", "request", ",", "_", "(", "\"The link you clicked is no longer valid.\"", ")", ")", "return", "redirect", "(", "\"/\"", ")" ]
python
View for the link in the verification email sent to a new user when they create an account and ``ACCOUNTS_VERIFICATION_REQUIRED`` is set to ``True``. Activates the user and logs them in, redirecting to the URL they tried to access when signing up.
true
2,694,192
def abbrev_cwd(): """ Return abbreviated version of cwd, e.g. d:mydir """ cwd = os.getcwdu().replace('\\','/') drivepart = '' tail = cwd if sys.platform == 'win32': if len(cwd) < 4: return cwd drivepart,tail = os.path.splitdrive(cwd) parts = tail.split('/') if len(parts) > 2: tail = '/'.join(parts[-2:]) return (drivepart + ( cwd == '/' and '/' or tail))
[ "def", "abbrev_cwd", "(", ")", ":", "cwd", "=", "os", ".", "getcwdu", "(", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "drivepart", "=", "''", "tail", "=", "cwd", "if", "sys", ".", "platform", "==", "'win32'", ":", "if", "len", "(", "cwd", ")", "<", "4", ":", "return", "cwd", "drivepart", ",", "tail", "=", "os", ".", "path", ".", "splitdrive", "(", "cwd", ")", "parts", "=", "tail", ".", "split", "(", "'/'", ")", "if", "len", "(", "parts", ")", ">", "2", ":", "tail", "=", "'/'", ".", "join", "(", "parts", "[", "-", "2", ":", "]", ")", "return", "(", "drivepart", "+", "(", "cwd", "==", "'/'", "and", "'/'", "or", "tail", ")", ")" ]
python
Return abbreviated version of cwd, e.g. d:mydir
true
2,694,251
def perform_flag(request, comment): """ Actually perform the flagging of a comment from a request. """ flag, created = comments.models.CommentFlag.objects.get_or_create( comment = comment, user = request.user, flag = comments.models.CommentFlag.SUGGEST_REMOVAL ) signals.comment_was_flagged.send( sender = comment.__class__, comment = comment, flag = flag, created = created, request = request, )
[ "def", "perform_flag", "(", "request", ",", "comment", ")", ":", "flag", ",", "created", "=", "comments", ".", "models", ".", "CommentFlag", ".", "objects", ".", "get_or_create", "(", "comment", "=", "comment", ",", "user", "=", "request", ".", "user", ",", "flag", "=", "comments", ".", "models", ".", "CommentFlag", ".", "SUGGEST_REMOVAL", ")", "signals", ".", "comment_was_flagged", ".", "send", "(", "sender", "=", "comment", ".", "__class__", ",", "comment", "=", "comment", ",", "flag", "=", "flag", ",", "created", "=", "created", ",", "request", "=", "request", ",", ")" ]
python
Actually perform the flagging of a comment from a request.
true
2,694,731
def get_app_submodules(submodule_name): """ From wagtail.utils Searches each app module for the specified submodule yields tuples of (app_name, module) """ for name, module in get_app_modules(): if module_has_submodule(module, submodule_name): yield name, import_module('%s.%s' % (name, submodule_name))
[ "def", "get_app_submodules", "(", "submodule_name", ")", ":", "for", "name", ",", "module", "in", "get_app_modules", "(", ")", ":", "if", "module_has_submodule", "(", "module", ",", "submodule_name", ")", ":", "yield", "name", ",", "import_module", "(", "'%s.%s'", "%", "(", "name", ",", "submodule_name", ")", ")" ]
python
From wagtail.utils Searches each app module for the specified submodule yields tuples of (app_name, module)
true
2,695,606
def fix_frame_records_filenames(records): """Try to fix the filenames in each record from inspect.getinnerframes(). Particularly, modules loaded from within zip files have useless filenames attached to their code object, and inspect.getinnerframes() just uses it. """ fixed_records = [] for frame, filename, line_no, func_name, lines, index in records: # Look inside the frame's globals dictionary for __file__, which should # be better. better_fn = frame.f_globals.get('__file__', None) if isinstance(better_fn, str): # Check the type just in case someone did something weird with # __file__. It might also be None if the error occurred during # import. filename = better_fn fixed_records.append((frame, filename, line_no, func_name, lines, index)) return fixed_records
[ "def", "fix_frame_records_filenames", "(", "records", ")", ":", "fixed_records", "=", "[", "]", "for", "frame", ",", "filename", ",", "line_no", ",", "func_name", ",", "lines", ",", "index", "in", "records", ":", "better_fn", "=", "frame", ".", "f_globals", ".", "get", "(", "'__file__'", ",", "None", ")", "if", "isinstance", "(", "better_fn", ",", "str", ")", ":", "filename", "=", "better_fn", "fixed_records", ".", "append", "(", "(", "frame", ",", "filename", ",", "line_no", ",", "func_name", ",", "lines", ",", "index", ")", ")", "return", "fixed_records" ]
python
Try to fix the filenames in each record from inspect.getinnerframes(). Particularly, modules loaded from within zip files have useless filenames attached to their code object, and inspect.getinnerframes() just uses it.
true
2,695,837
def _write_header(name, header, required, stream, encoder, strict=False): """ Write AMF message header. @param name: Name of the header. @param header: Header value. @param required: Whether understanding this header is required (?). @param stream: L{BufferedByteStream<pyamf.util.BufferedByteStream>} that will receive the encoded header. @param encoder: An encoder capable of encoding C{AMF0}. @param strict: Use strict encoding policy. Default is C{False}. Will write the correct header length after writing the header. """ stream.write_ushort(len(name)) stream.write_utf8_string(name) stream.write_uchar(required) write_pos = stream.tell() stream.write_ulong(0) old_pos = stream.tell() encoder.writeElement(header) new_pos = stream.tell() if strict: stream.seek(write_pos) stream.write_ulong(new_pos - old_pos) stream.seek(new_pos)
[ "def", "_write_header", "(", "name", ",", "header", ",", "required", ",", "stream", ",", "encoder", ",", "strict", "=", "False", ")", ":", "stream", ".", "write_ushort", "(", "len", "(", "name", ")", ")", "stream", ".", "write_utf8_string", "(", "name", ")", "stream", ".", "write_uchar", "(", "required", ")", "write_pos", "=", "stream", ".", "tell", "(", ")", "stream", ".", "write_ulong", "(", "0", ")", "old_pos", "=", "stream", ".", "tell", "(", ")", "encoder", ".", "writeElement", "(", "header", ")", "new_pos", "=", "stream", ".", "tell", "(", ")", "if", "strict", ":", "stream", ".", "seek", "(", "write_pos", ")", "stream", ".", "write_ulong", "(", "new_pos", "-", "old_pos", ")", "stream", ".", "seek", "(", "new_pos", ")" ]
python
Write AMF message header. @param name: Name of the header. @param header: Header value. @param required: Whether understanding this header is required (?). @param stream: L{BufferedByteStream<pyamf.util.BufferedByteStream>} that will receive the encoded header. @param encoder: An encoder capable of encoding C{AMF0}. @param strict: Use strict encoding policy. Default is C{False}. Will write the correct header length after writing the header.
true
2,695,864
def get_parent(globals, level): """ parent, name = get_parent(globals, level) Return the package that an import is being performed in. If globals comes from the module foo.bar.bat (not itself a package), this returns the sys.modules entry for foo.bar. If globals is from a package's __init__.py, the package's entry in sys.modules is returned. If globals doesn't come from a package or a module in a package, or a corresponding entry is not found in sys.modules, None is returned. """ orig_level = level if not level or not isinstance(globals, dict): return None, '' pkgname = globals.get('__package__', None) if pkgname is not None: # __package__ is set, so use it if not hasattr(pkgname, 'rindex'): raise ValueError('__package__ set to non-string') if len(pkgname) == 0: if level > 0: raise ValueError('Attempted relative import in non-package') return None, '' name = pkgname else: # __package__ not set, so figure it out and set it if '__name__' not in globals: return None, '' modname = globals['__name__'] if '__path__' in globals: # __path__ is set, so modname is already the package name globals['__package__'] = name = modname else: # Normal module, so work out the package name if any lastdot = modname.rfind('.') if lastdot < 0 and level > 0: raise ValueError("Attempted relative import in non-package") if lastdot < 0: globals['__package__'] = None return None, '' globals['__package__'] = name = modname[:lastdot] dot = len(name) for x in xrange(level, 1, -1): try: dot = name.rindex('.', 0, dot) except ValueError: raise ValueError("attempted relative import beyond top-level " "package") name = name[:dot] try: parent = sys.modules[name] except: if orig_level < 1: warn("Parent module '%.200s' not found while handling absolute " "import" % name) parent = None else: raise SystemError("Parent module '%.200s' not loaded, cannot " "perform relative import" % name) # We expect, but can't guarantee, if parent != None, that: # - parent.__name__ == name # - parent.__dict__ is globals # If this is violated... Who cares? return parent, name
[ "def", "get_parent", "(", "globals", ",", "level", ")", ":", "orig_level", "=", "level", "if", "not", "level", "or", "not", "isinstance", "(", "globals", ",", "dict", ")", ":", "return", "None", ",", "''", "pkgname", "=", "globals", ".", "get", "(", "'__package__'", ",", "None", ")", "if", "pkgname", "is", "not", "None", ":", "if", "not", "hasattr", "(", "pkgname", ",", "'rindex'", ")", ":", "raise", "ValueError", "(", "'__package__ set to non-string'", ")", "if", "len", "(", "pkgname", ")", "==", "0", ":", "if", "level", ">", "0", ":", "raise", "ValueError", "(", "'Attempted relative import in non-package'", ")", "return", "None", ",", "''", "name", "=", "pkgname", "else", ":", "if", "'__name__'", "not", "in", "globals", ":", "return", "None", ",", "''", "modname", "=", "globals", "[", "'__name__'", "]", "if", "'__path__'", "in", "globals", ":", "globals", "[", "'__package__'", "]", "=", "name", "=", "modname", "else", ":", "lastdot", "=", "modname", ".", "rfind", "(", "'.'", ")", "if", "lastdot", "<", "0", "and", "level", ">", "0", ":", "raise", "ValueError", "(", "\"Attempted relative import in non-package\"", ")", "if", "lastdot", "<", "0", ":", "globals", "[", "'__package__'", "]", "=", "None", "return", "None", ",", "''", "globals", "[", "'__package__'", "]", "=", "name", "=", "modname", "[", ":", "lastdot", "]", "dot", "=", "len", "(", "name", ")", "for", "x", "in", "xrange", "(", "level", ",", "1", ",", "-", "1", ")", ":", "try", ":", "dot", "=", "name", ".", "rindex", "(", "'.'", ",", "0", ",", "dot", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"attempted relative import beyond top-level \"", "\"package\"", ")", "name", "=", "name", "[", ":", "dot", "]", "try", ":", "parent", "=", "sys", ".", "modules", "[", "name", "]", "except", ":", "if", "orig_level", "<", "1", ":", "warn", "(", "\"Parent module '%.200s' not found while handling absolute \"", "\"import\"", "%", "name", ")", "parent", "=", "None", "else", ":", "raise", "SystemError", "(", "\"Parent module '%.200s' not loaded, cannot \"", "\"perform relative import\"", "%", "name", ")", "return", "parent", ",", "name" ]
python
parent, name = get_parent(globals, level) Return the package that an import is being performed in. If globals comes from the module foo.bar.bat (not itself a package), this returns the sys.modules entry for foo.bar. If globals is from a package's __init__.py, the package's entry in sys.modules is returned. If globals doesn't come from a package or a module in a package, or a corresponding entry is not found in sys.modules, None is returned.
true
2,695,866
def import_submodule(mod, subname, fullname): """m = import_submodule(mod, subname, fullname)""" # Require: # if mod == None: subname == fullname # else: mod.__name__ + "." + subname == fullname global found_now if fullname in found_now and fullname in sys.modules: m = sys.modules[fullname] else: print 'Reloading', fullname found_now[fullname] = 1 oldm = sys.modules.get(fullname, None) if mod is None: path = None elif hasattr(mod, '__path__'): path = mod.__path__ else: return None try: # This appears to be necessary on Python 3, because imp.find_module() # tries to import standard libraries (like io) itself, and we don't # want them to be processed by our deep_import_hook. with replace_import_hook(original_import): fp, filename, stuff = imp.find_module(subname, path) except ImportError: return None try: m = imp.load_module(fullname, fp, filename, stuff) except: # load_module probably removed name from modules because of # the error. Put back the original module object. if oldm: sys.modules[fullname] = oldm raise finally: if fp: fp.close() add_submodule(mod, m, fullname, subname) return m
[ "def", "import_submodule", "(", "mod", ",", "subname", ",", "fullname", ")", ":", "global", "found_now", "if", "fullname", "in", "found_now", "and", "fullname", "in", "sys", ".", "modules", ":", "m", "=", "sys", ".", "modules", "[", "fullname", "]", "else", ":", "print", "'Reloading'", ",", "fullname", "found_now", "[", "fullname", "]", "=", "1", "oldm", "=", "sys", ".", "modules", ".", "get", "(", "fullname", ",", "None", ")", "if", "mod", "is", "None", ":", "path", "=", "None", "elif", "hasattr", "(", "mod", ",", "'__path__'", ")", ":", "path", "=", "mod", ".", "__path__", "else", ":", "return", "None", "try", ":", "with", "replace_import_hook", "(", "original_import", ")", ":", "fp", ",", "filename", ",", "stuff", "=", "imp", ".", "find_module", "(", "subname", ",", "path", ")", "except", "ImportError", ":", "return", "None", "try", ":", "m", "=", "imp", ".", "load_module", "(", "fullname", ",", "fp", ",", "filename", ",", "stuff", ")", "except", ":", "if", "oldm", ":", "sys", ".", "modules", "[", "fullname", "]", "=", "oldm", "raise", "finally", ":", "if", "fp", ":", "fp", ".", "close", "(", ")", "add_submodule", "(", "mod", ",", "m", ",", "fullname", ",", "subname", ")", "return", "m" ]
python
m = import_submodule(mod, subname, fullname)
true
2,695,868
def ensure_fromlist(mod, fromlist, buf, recursive): """Handle 'from module import a, b, c' imports.""" if not hasattr(mod, '__path__'): return for item in fromlist: if not hasattr(item, 'rindex'): raise TypeError("Item in ``from list'' not a string") if item == '*': if recursive: continue # avoid endless recursion try: all = mod.__all__ except AttributeError: pass else: ret = ensure_fromlist(mod, all, buf, 1) if not ret: return 0 elif not hasattr(mod, item): import_submodule(mod, item, buf + '.' + item)
[ "def", "ensure_fromlist", "(", "mod", ",", "fromlist", ",", "buf", ",", "recursive", ")", ":", "if", "not", "hasattr", "(", "mod", ",", "'__path__'", ")", ":", "return", "for", "item", "in", "fromlist", ":", "if", "not", "hasattr", "(", "item", ",", "'rindex'", ")", ":", "raise", "TypeError", "(", "\"Item in ``from list'' not a string\"", ")", "if", "item", "==", "'*'", ":", "if", "recursive", ":", "continue", "try", ":", "all", "=", "mod", ".", "__all__", "except", "AttributeError", ":", "pass", "else", ":", "ret", "=", "ensure_fromlist", "(", "mod", ",", "all", ",", "buf", ",", "1", ")", "if", "not", "ret", ":", "return", "0", "elif", "not", "hasattr", "(", "mod", ",", "item", ")", ":", "import_submodule", "(", "mod", ",", "item", ",", "buf", "+", "'.'", "+", "item", ")" ]
python
Handle 'from module import a, b, c' imports.
true
2,695,869
def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1): """Replacement for __import__()""" parent, buf = get_parent(globals, level) head, name, buf = load_next(parent, None if level < 0 else parent, name, buf) tail = head while name: tail, name, buf = load_next(tail, tail, name, buf) # If tail is None, both get_parent and load_next found # an empty module name: someone called __import__("") or # doctored faulty bytecode if tail is None: raise ValueError('Empty module name') if not fromlist: return head ensure_fromlist(tail, fromlist, buf, 0) return tail
[ "def", "deep_import_hook", "(", "name", ",", "globals", "=", "None", ",", "locals", "=", "None", ",", "fromlist", "=", "None", ",", "level", "=", "-", "1", ")", ":", "parent", ",", "buf", "=", "get_parent", "(", "globals", ",", "level", ")", "head", ",", "name", ",", "buf", "=", "load_next", "(", "parent", ",", "None", "if", "level", "<", "0", "else", "parent", ",", "name", ",", "buf", ")", "tail", "=", "head", "while", "name", ":", "tail", ",", "name", ",", "buf", "=", "load_next", "(", "tail", ",", "tail", ",", "name", ",", "buf", ")", "if", "tail", "is", "None", ":", "raise", "ValueError", "(", "'Empty module name'", ")", "if", "not", "fromlist", ":", "return", "head", "ensure_fromlist", "(", "tail", ",", "fromlist", ",", "buf", ",", "0", ")", "return", "tail" ]
python
Replacement for __import__()
true
2,696,048
def code_name(code, number=0): """ Compute a (probably) unique name for code for caching. This now expects code to be unicode. """ hash_digest = hashlib.md5(code.encode("utf-8")).hexdigest() # Include the number and 12 characters of the hash in the name. It's # pretty much impossible that in a single session we'll have collisions # even with truncated hashes, and the full one makes tracebacks too long return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12])
[ "def", "code_name", "(", "code", ",", "number", "=", "0", ")", ":", "hash_digest", "=", "hashlib", ".", "md5", "(", "code", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "hexdigest", "(", ")", "return", "'<ipython-input-{0}-{1}>'", ".", "format", "(", "number", ",", "hash_digest", "[", ":", "12", "]", ")" ]
python
Compute a (probably) unique name for code for caching. This now expects code to be unicode.
true
2,696,330
def user_config_files(): """Return path to any existing user config files """ return filter(os.path.exists, map(os.path.expanduser, config_files))
[ "def", "user_config_files", "(", ")", ":", "return", "filter", "(", "os", ".", "path", ".", "exists", ",", "map", "(", "os", ".", "path", ".", "expanduser", ",", "config_files", ")", ")" ]
python
Return path to any existing user config files
true
2,696,643
def timings_out(reps,func,*args,**kw): """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output) Execute a function reps times, return a tuple with the elapsed total CPU time in seconds, the time per call and the function's output. Under Unix, the return value is the sum of user+system time consumed by the process, computed via the resource module. This prevents problems related to the wraparound effect which the time.clock() function has. Under Windows the return value is in wall clock seconds. See the documentation for the time module for more details.""" reps = int(reps) assert reps >=1, 'reps must be >= 1' if reps==1: start = clock() out = func(*args,**kw) tot_time = clock()-start else: rng = xrange(reps-1) # the last time is executed separately to store output start = clock() for dummy in rng: func(*args,**kw) out = func(*args,**kw) # one last time tot_time = clock()-start av_time = tot_time / reps return tot_time,av_time,out
[ "def", "timings_out", "(", "reps", ",", "func", ",", "*", "args", ",", "**", "kw", ")", ":", "reps", "=", "int", "(", "reps", ")", "assert", "reps", ">=", "1", ",", "'reps must be >= 1'", "if", "reps", "==", "1", ":", "start", "=", "clock", "(", ")", "out", "=", "func", "(", "*", "args", ",", "**", "kw", ")", "tot_time", "=", "clock", "(", ")", "-", "start", "else", ":", "rng", "=", "xrange", "(", "reps", "-", "1", ")", "start", "=", "clock", "(", ")", "for", "dummy", "in", "rng", ":", "func", "(", "*", "args", ",", "**", "kw", ")", "out", "=", "func", "(", "*", "args", ",", "**", "kw", ")", "tot_time", "=", "clock", "(", ")", "-", "start", "av_time", "=", "tot_time", "/", "reps", "return", "tot_time", ",", "av_time", ",", "out" ]
python
timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output) Execute a function reps times, return a tuple with the elapsed total CPU time in seconds, the time per call and the function's output. Under Unix, the return value is the sum of user+system time consumed by the process, computed via the resource module. This prevents problems related to the wraparound effect which the time.clock() function has. Under Windows the return value is in wall clock seconds. See the documentation for the time module for more details.
true
2,696,644
def timings(reps,func,*args,**kw): """timings(reps,func,*args,**kw) -> (t_total,t_per_call) Execute a function reps times, return a tuple with the elapsed total CPU time in seconds and the time per call. These are just the first two values in timings_out().""" return timings_out(reps,func,*args,**kw)[0:2]
[ "def", "timings", "(", "reps", ",", "func", ",", "*", "args", ",", "**", "kw", ")", ":", "return", "timings_out", "(", "reps", ",", "func", ",", "*", "args", ",", "**", "kw", ")", "[", "0", ":", "2", "]" ]
python
timings(reps,func,*args,**kw) -> (t_total,t_per_call) Execute a function reps times, return a tuple with the elapsed total CPU time in seconds and the time per call. These are just the first two values in timings_out().
true
2,696,988
def encode_int(n): """ Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range. """ global ENCODED_INT_CACHE try: return ENCODED_INT_CACHE[n] except KeyError: pass if n < MIN_29B_INT or n > MAX_29B_INT: raise OverflowError("Out of range") if n < 0: n += 0x20000000 bytes = '' real_value = None if n > 0x1fffff: real_value = n n >>= 1 bytes += chr(0x80 | ((n >> 21) & 0xff)) if n > 0x3fff: bytes += chr(0x80 | ((n >> 14) & 0xff)) if n > 0x7f: bytes += chr(0x80 | ((n >> 7) & 0xff)) if real_value is not None: n = real_value if n > 0x1fffff: bytes += chr(n & 0xff) else: bytes += chr(n & 0x7f) ENCODED_INT_CACHE[n] = bytes return bytes
[ "def", "encode_int", "(", "n", ")", ":", "global", "ENCODED_INT_CACHE", "try", ":", "return", "ENCODED_INT_CACHE", "[", "n", "]", "except", "KeyError", ":", "pass", "if", "n", "<", "MIN_29B_INT", "or", "n", ">", "MAX_29B_INT", ":", "raise", "OverflowError", "(", "\"Out of range\"", ")", "if", "n", "<", "0", ":", "n", "+=", "0x20000000", "bytes", "=", "''", "real_value", "=", "None", "if", "n", ">", "0x1fffff", ":", "real_value", "=", "n", "n", ">>=", "1", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "21", ")", "&", "0xff", ")", ")", "if", "n", ">", "0x3fff", ":", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "14", ")", "&", "0xff", ")", ")", "if", "n", ">", "0x7f", ":", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "7", ")", "&", "0xff", ")", ")", "if", "real_value", "is", "not", "None", ":", "n", "=", "real_value", "if", "n", ">", "0x1fffff", ":", "bytes", "+=", "chr", "(", "n", "&", "0xff", ")", "else", ":", "bytes", "+=", "chr", "(", "n", "&", "0x7f", ")", "ENCODED_INT_CACHE", "[", "n", "]", "=", "bytes", "return", "bytes" ]
python
Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range.
true
2,697,291
def walk_egg(egg_dir): """Walk an unpacked egg's contents, skipping the metadata directory""" walker = os.walk(egg_dir) base,dirs,files = walker.next() if 'EGG-INFO' in dirs: dirs.remove('EGG-INFO') yield base,dirs,files for bdf in walker: yield bdf
[ "def", "walk_egg", "(", "egg_dir", ")", ":", "walker", "=", "os", ".", "walk", "(", "egg_dir", ")", "base", ",", "dirs", ",", "files", "=", "walker", ".", "next", "(", ")", "if", "'EGG-INFO'", "in", "dirs", ":", "dirs", ".", "remove", "(", "'EGG-INFO'", ")", "yield", "base", ",", "dirs", ",", "files", "for", "bdf", "in", "walker", ":", "yield", "bdf" ]
python
Walk an unpacked egg's contents, skipping the metadata directory
true
2,697,496
def get_tm_session(session_factory, transaction_manager): """ Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) """ dbsession = session_factory() zope.sqlalchemy.register( dbsession, transaction_manager=transaction_manager) return dbsession
[ "def", "get_tm_session", "(", "session_factory", ",", "transaction_manager", ")", ":", "dbsession", "=", "session_factory", "(", ")", "zope", ".", "sqlalchemy", ".", "register", "(", "dbsession", ",", "transaction_manager", "=", "transaction_manager", ")", "return", "dbsession" ]
python
Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager)
true
2,697,554
def script_args(f): """single decorator for adding script args""" args = [ magic_arguments.argument( '--out', type=str, help="""The variable in which to store stdout from the script. If the script is backgrounded, this will be the stdout *pipe*, instead of the stderr text itself. """ ), magic_arguments.argument( '--err', type=str, help="""The variable in which to store stderr from the script. If the script is backgrounded, this will be the stderr *pipe*, instead of the stderr text itself. """ ), magic_arguments.argument( '--bg', action="store_true", help="""Whether to run the script in the background. If given, the only way to see the output of the command is with --out/err. """ ), magic_arguments.argument( '--proc', type=str, help="""The variable in which to store Popen instance. This is used only when --bg option is given. """ ), ] for arg in args: f = arg(f) return f
[ "def", "script_args", "(", "f", ")", ":", "args", "=", "[", "magic_arguments", ".", "argument", "(", "'--out'", ",", "type", "=", "str", ",", "help", "=", "\"\"\"The variable in which to store stdout from the script.\n If the script is backgrounded, this will be the stdout *pipe*,\n instead of the stderr text itself.\n \"\"\"", ")", ",", "magic_arguments", ".", "argument", "(", "'--err'", ",", "type", "=", "str", ",", "help", "=", "\"\"\"The variable in which to store stderr from the script.\n If the script is backgrounded, this will be the stderr *pipe*,\n instead of the stderr text itself.\n \"\"\"", ")", ",", "magic_arguments", ".", "argument", "(", "'--bg'", ",", "action", "=", "\"store_true\"", ",", "help", "=", "\"\"\"Whether to run the script in the background.\n If given, the only way to see the output of the command is\n with --out/err.\n \"\"\"", ")", ",", "magic_arguments", ".", "argument", "(", "'--proc'", ",", "type", "=", "str", ",", "help", "=", "\"\"\"The variable in which to store Popen instance.\n This is used only when --bg option is given.\n \"\"\"", ")", ",", "]", "for", "arg", "in", "args", ":", "f", "=", "arg", "(", "f", ")", "return", "f" ]
python
single decorator for adding script args
true
2,698,016
def fix_error_editor(self,filename,linenum,column,msg): """Open the editor at the given filename, linenumber, column and show an error message. This is used for correcting syntax errors. The current implementation only has special support for the VIM editor, and falls back on the 'editor' hook if VIM is not used. Call ip.set_hook('fix_error_editor',youfunc) to use your own function, """ def vim_quickfix_file(): t = tempfile.NamedTemporaryFile() t.write('%s:%d:%d:%s\n' % (filename,linenum,column,msg)) t.flush() return t if os.path.basename(self.editor) != 'vim': self.hooks.editor(filename,linenum) return t = vim_quickfix_file() try: if os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name): raise TryNext() finally: t.close()
[ "def", "fix_error_editor", "(", "self", ",", "filename", ",", "linenum", ",", "column", ",", "msg", ")", ":", "def", "vim_quickfix_file", "(", ")", ":", "t", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "t", ".", "write", "(", "'%s:%d:%d:%s\\n'", "%", "(", "filename", ",", "linenum", ",", "column", ",", "msg", ")", ")", "t", ".", "flush", "(", ")", "return", "t", "if", "os", ".", "path", ".", "basename", "(", "self", ".", "editor", ")", "!=", "'vim'", ":", "self", ".", "hooks", ".", "editor", "(", "filename", ",", "linenum", ")", "return", "t", "=", "vim_quickfix_file", "(", ")", "try", ":", "if", "os", ".", "system", "(", "'vim --cmd \"set errorformat=%f:%l:%c:%m\" -q '", "+", "t", ".", "name", ")", ":", "raise", "TryNext", "(", ")", "finally", ":", "t", ".", "close", "(", ")" ]
python
Open the editor at the given filename, linenumber, column and show an error message. This is used for correcting syntax errors. The current implementation only has special support for the VIM editor, and falls back on the 'editor' hook if VIM is not used. Call ip.set_hook('fix_error_editor',youfunc) to use your own function,
true
2,698,017
def clipboard_get(self): """ Get text from the clipboard. """ from IPython.lib.clipboard import ( osx_clipboard_get, tkinter_clipboard_get, win32_clipboard_get ) if sys.platform == 'win32': chain = [win32_clipboard_get, tkinter_clipboard_get] elif sys.platform == 'darwin': chain = [osx_clipboard_get, tkinter_clipboard_get] else: chain = [tkinter_clipboard_get] dispatcher = CommandChainDispatcher() for func in chain: dispatcher.add(func) text = dispatcher() return text
[ "def", "clipboard_get", "(", "self", ")", ":", "from", "IPython", ".", "lib", ".", "clipboard", "import", "(", "osx_clipboard_get", ",", "tkinter_clipboard_get", ",", "win32_clipboard_get", ")", "if", "sys", ".", "platform", "==", "'win32'", ":", "chain", "=", "[", "win32_clipboard_get", ",", "tkinter_clipboard_get", "]", "elif", "sys", ".", "platform", "==", "'darwin'", ":", "chain", "=", "[", "osx_clipboard_get", ",", "tkinter_clipboard_get", "]", "else", ":", "chain", "=", "[", "tkinter_clipboard_get", "]", "dispatcher", "=", "CommandChainDispatcher", "(", ")", "for", "func", "in", "chain", ":", "dispatcher", ".", "add", "(", "func", ")", "text", "=", "dispatcher", "(", ")", "return", "text" ]
python
Get text from the clipboard.
true
2,698,372
def try_passwordless_ssh(server, keyfile, paramiko=None): """Attempt to make an ssh connection without a password. This is mainly used for requiring password input only once when many tunnels may be connected to the same server. If paramiko is None, the default for the platform is chosen. """ if paramiko is None: paramiko = sys.platform == 'win32' if not paramiko: f = _try_passwordless_openssh else: f = _try_passwordless_paramiko return f(server, keyfile)
[ "def", "try_passwordless_ssh", "(", "server", ",", "keyfile", ",", "paramiko", "=", "None", ")", ":", "if", "paramiko", "is", "None", ":", "paramiko", "=", "sys", ".", "platform", "==", "'win32'", "if", "not", "paramiko", ":", "f", "=", "_try_passwordless_openssh", "else", ":", "f", "=", "_try_passwordless_paramiko", "return", "f", "(", "server", ",", "keyfile", ")" ]
python
Attempt to make an ssh connection without a password. This is mainly used for requiring password input only once when many tunnels may be connected to the same server. If paramiko is None, the default for the platform is chosen.
true
2,698,374
def _try_passwordless_paramiko(server, keyfile): """Try passwordless login with paramiko.""" if paramiko is None: msg = "Paramiko unavaliable, " if sys.platform == 'win32': msg += "Paramiko is required for ssh tunneled connections on Windows." else: msg += "use OpenSSH." raise ImportError(msg) username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) try: client.connect(server, port, username=username, key_filename=keyfile, look_for_keys=True) except paramiko.AuthenticationException: return False else: client.close() return True
[ "def", "_try_passwordless_paramiko", "(", "server", ",", "keyfile", ")", ":", "if", "paramiko", "is", "None", ":", "msg", "=", "\"Paramiko unavaliable, \"", "if", "sys", ".", "platform", "==", "'win32'", ":", "msg", "+=", "\"Paramiko is required for ssh tunneled connections on Windows.\"", "else", ":", "msg", "+=", "\"use OpenSSH.\"", "raise", "ImportError", "(", "msg", ")", "username", ",", "server", ",", "port", "=", "_split_server", "(", "server", ")", "client", "=", "paramiko", ".", "SSHClient", "(", ")", "client", ".", "load_system_host_keys", "(", ")", "client", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "WarningPolicy", "(", ")", ")", "try", ":", "client", ".", "connect", "(", "server", ",", "port", ",", "username", "=", "username", ",", "key_filename", "=", "keyfile", ",", "look_for_keys", "=", "True", ")", "except", "paramiko", ".", "AuthenticationException", ":", "return", "False", "else", ":", "client", ".", "close", "(", ")", "return", "True" ]
python
Try passwordless login with paramiko.
true
2,698,375
def tunnel_connection(socket, addr, server, keyfile=None, password=None, paramiko=None, timeout=60): """Connect a socket to an address via an ssh tunnel. This is a wrapper for socket.connect(addr), when addr is not accessible from the local machine. It simply creates an ssh tunnel using the remaining args, and calls socket.connect('tcp://localhost:lport') where lport is the randomly selected local port of the tunnel. """ new_url, tunnel = open_tunnel(addr, server, keyfile=keyfile, password=password, paramiko=paramiko, timeout=timeout) socket.connect(new_url) return tunnel
[ "def", "tunnel_connection", "(", "socket", ",", "addr", ",", "server", ",", "keyfile", "=", "None", ",", "password", "=", "None", ",", "paramiko", "=", "None", ",", "timeout", "=", "60", ")", ":", "new_url", ",", "tunnel", "=", "open_tunnel", "(", "addr", ",", "server", ",", "keyfile", "=", "keyfile", ",", "password", "=", "password", ",", "paramiko", "=", "paramiko", ",", "timeout", "=", "timeout", ")", "socket", ".", "connect", "(", "new_url", ")", "return", "tunnel" ]
python
Connect a socket to an address via an ssh tunnel. This is a wrapper for socket.connect(addr), when addr is not accessible from the local machine. It simply creates an ssh tunnel using the remaining args, and calls socket.connect('tcp://localhost:lport') where lport is the randomly selected local port of the tunnel.
true
2,698,376
def open_tunnel(addr, server, keyfile=None, password=None, paramiko=None, timeout=60): """Open a tunneled connection from a 0MQ url. For use inside tunnel_connection. Returns ------- (url, tunnel): The 0MQ url that has been forwarded, and the tunnel object """ lport = select_random_ports(1)[0] transport, addr = addr.split('://') ip,rport = addr.split(':') rport = int(rport) if paramiko is None: paramiko = sys.platform == 'win32' if paramiko: tunnelf = paramiko_tunnel else: tunnelf = openssh_tunnel tunnel = tunnelf(lport, rport, server, remoteip=ip, keyfile=keyfile, password=password, timeout=timeout) return 'tcp://127.0.0.1:%i'%lport, tunnel
[ "def", "open_tunnel", "(", "addr", ",", "server", ",", "keyfile", "=", "None", ",", "password", "=", "None", ",", "paramiko", "=", "None", ",", "timeout", "=", "60", ")", ":", "lport", "=", "select_random_ports", "(", "1", ")", "[", "0", "]", "transport", ",", "addr", "=", "addr", ".", "split", "(", "'://'", ")", "ip", ",", "rport", "=", "addr", ".", "split", "(", "':'", ")", "rport", "=", "int", "(", "rport", ")", "if", "paramiko", "is", "None", ":", "paramiko", "=", "sys", ".", "platform", "==", "'win32'", "if", "paramiko", ":", "tunnelf", "=", "paramiko_tunnel", "else", ":", "tunnelf", "=", "openssh_tunnel", "tunnel", "=", "tunnelf", "(", "lport", ",", "rport", ",", "server", ",", "remoteip", "=", "ip", ",", "keyfile", "=", "keyfile", ",", "password", "=", "password", ",", "timeout", "=", "timeout", ")", "return", "'tcp://127.0.0.1:%i'", "%", "lport", ",", "tunnel" ]
python
Open a tunneled connection from a 0MQ url. For use inside tunnel_connection. Returns ------- (url, tunnel): The 0MQ url that has been forwarded, and the tunnel object
true
2,698,379
def paramiko_tunnel(lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=60): """launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. """ if paramiko is None: raise ImportError("Paramiko not available") if password is None: if not _try_passwordless_paramiko(server, keyfile): password = getpass("%s's password: "%(server)) p = Process(target=_paramiko_tunnel, args=(lport, rport, server, remoteip), kwargs=dict(keyfile=keyfile, password=password)) p.daemon=False p.start() atexit.register(_shutdown_process, p) return p
[ "def", "paramiko_tunnel", "(", "lport", ",", "rport", ",", "server", ",", "remoteip", "=", "'127.0.0.1'", ",", "keyfile", "=", "None", ",", "password", "=", "None", ",", "timeout", "=", "60", ")", ":", "if", "paramiko", "is", "None", ":", "raise", "ImportError", "(", "\"Paramiko not available\"", ")", "if", "password", "is", "None", ":", "if", "not", "_try_passwordless_paramiko", "(", "server", ",", "keyfile", ")", ":", "password", "=", "getpass", "(", "\"%s's password: \"", "%", "(", "server", ")", ")", "p", "=", "Process", "(", "target", "=", "_paramiko_tunnel", ",", "args", "=", "(", "lport", ",", "rport", ",", "server", ",", "remoteip", ")", ",", "kwargs", "=", "dict", "(", "keyfile", "=", "keyfile", ",", "password", "=", "password", ")", ")", "p", ".", "daemon", "=", "False", "p", ".", "start", "(", ")", "atexit", ".", "register", "(", "_shutdown_process", ",", "p", ")", "return", "p" ]
python
launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever.
true
2,698,380
def _paramiko_tunnel(lport, rport, server, remoteip, keyfile=None, password=None): """Function for actually starting a paramiko tunnel, to be passed to multiprocessing.Process(target=this), and not called directly. """ username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) try: client.connect(server, port, username=username, key_filename=keyfile, look_for_keys=True, password=password) # except paramiko.AuthenticationException: # if password is None: # password = getpass("%s@%s's password: "%(username, server)) # client.connect(server, port, username=username, password=password) # else: # raise except Exception as e: print ('*** Failed to connect to %s:%d: %r' % (server, port, e)) sys.exit(1) # print ('Now forwarding port %d to %s:%d ...' % (lport, server, rport)) try: forward_tunnel(lport, remoteip, rport, client.get_transport()) except KeyboardInterrupt: print ('SIGINT: Port forwarding stopped cleanly') sys.exit(0) except Exception as e: print ("Port forwarding stopped uncleanly: %s"%e) sys.exit(255)
[ "def", "_paramiko_tunnel", "(", "lport", ",", "rport", ",", "server", ",", "remoteip", ",", "keyfile", "=", "None", ",", "password", "=", "None", ")", ":", "username", ",", "server", ",", "port", "=", "_split_server", "(", "server", ")", "client", "=", "paramiko", ".", "SSHClient", "(", ")", "client", ".", "load_system_host_keys", "(", ")", "client", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "WarningPolicy", "(", ")", ")", "try", ":", "client", ".", "connect", "(", "server", ",", "port", ",", "username", "=", "username", ",", "key_filename", "=", "keyfile", ",", "look_for_keys", "=", "True", ",", "password", "=", "password", ")", "except", "Exception", "as", "e", ":", "print", "(", "'*** Failed to connect to %s:%d: %r'", "%", "(", "server", ",", "port", ",", "e", ")", ")", "sys", ".", "exit", "(", "1", ")", "try", ":", "forward_tunnel", "(", "lport", ",", "remoteip", ",", "rport", ",", "client", ".", "get_transport", "(", ")", ")", "except", "KeyboardInterrupt", ":", "print", "(", "'SIGINT: Port forwarding stopped cleanly'", ")", "sys", ".", "exit", "(", "0", ")", "except", "Exception", "as", "e", ":", "print", "(", "\"Port forwarding stopped uncleanly: %s\"", "%", "e", ")", "sys", ".", "exit", "(", "255", ")" ]
python
Function for actually starting a paramiko tunnel, to be passed to multiprocessing.Process(target=this), and not called directly.
true
2,698,473
def parse_media_range(range): """Carves up a media range and returns a tuple of the (type, subtype, params) where 'params' is a dictionary of all the parameters for the media range. For example, the media range 'application/*;q=0.5' would get parsed into: ('application', '*', {'q', '0.5'}) In addition this function also guarantees that there is a value for 'q' in the params dictionary, filling it in with a proper default if necessary. """ (type, subtype, params) = parse_mime_type(range) if 'q' not in params or not params['q'] or \ float(params['q']) > 1 or float(params['q']) < 0: params['q'] = '1' return (type, subtype, params)
[ "def", "parse_media_range", "(", "range", ")", ":", "(", "type", ",", "subtype", ",", "params", ")", "=", "parse_mime_type", "(", "range", ")", "if", "'q'", "not", "in", "params", "or", "not", "params", "[", "'q'", "]", "or", "float", "(", "params", "[", "'q'", "]", ")", ">", "1", "or", "float", "(", "params", "[", "'q'", "]", ")", "<", "0", ":", "params", "[", "'q'", "]", "=", "'1'", "return", "(", "type", ",", "subtype", ",", "params", ")" ]
python
Carves up a media range and returns a tuple of the (type, subtype, params) where 'params' is a dictionary of all the parameters for the media range. For example, the media range 'application/*;q=0.5' would get parsed into: ('application', '*', {'q', '0.5'}) In addition this function also guarantees that there is a value for 'q' in the params dictionary, filling it in with a proper default if necessary.
true
2,698,773
def interpret_distro_name(location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None ): """Generate alternative interpretations of a source distro name Note: if `location` is a filesystem filename, you should call ``pkg_resources.normalize_path()`` on it before passing it to this routine! """ # Generate alternative interpretations of a source distro name # Because some packages are ambiguous as to name/versions split # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, # the spurious interpretations should be ignored, because in the event # there's also an "adns" package, the spurious "python-1.1.0" version will # compare lower than any numeric version number, and is therefore unlikely # to match a request for it. It's still a potential problem, though, and # in the long run PyPI and the distutils should go for "safe" names and # versions in distribution archive names (sdist and bdist). parts = basename.split('-') if not py_version: for i,p in enumerate(parts[2:]): if len(p)==5 and p.startswith('py2.'): return # It's a bdist_dumb, not an sdist -- bail out for p in range(1,len(parts)+1): yield Distribution( location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), py_version=py_version, precedence = precedence, platform = platform )
[ "def", "interpret_distro_name", "(", "location", ",", "basename", ",", "metadata", ",", "py_version", "=", "None", ",", "precedence", "=", "SOURCE_DIST", ",", "platform", "=", "None", ")", ":", "parts", "=", "basename", ".", "split", "(", "'-'", ")", "if", "not", "py_version", ":", "for", "i", ",", "p", "in", "enumerate", "(", "parts", "[", "2", ":", "]", ")", ":", "if", "len", "(", "p", ")", "==", "5", "and", "p", ".", "startswith", "(", "'py2.'", ")", ":", "return", "for", "p", "in", "range", "(", "1", ",", "len", "(", "parts", ")", "+", "1", ")", ":", "yield", "Distribution", "(", "location", ",", "metadata", ",", "'-'", ".", "join", "(", "parts", "[", ":", "p", "]", ")", ",", "'-'", ".", "join", "(", "parts", "[", "p", ":", "]", ")", ",", "py_version", "=", "py_version", ",", "precedence", "=", "precedence", ",", "platform", "=", "platform", ")" ]
python
Generate alternative interpretations of a source distro name Note: if `location` is a filesystem filename, you should call ``pkg_resources.normalize_path()`` on it before passing it to this routine!
true
2,699,336
def get_best_local_timezone(): """ Compares local timezone offset to pytz's timezone db, to determine a matching timezone name to use when TIME_ZONE is not set. """ zone_name = tzlocal.get_localzone().zone if zone_name in pytz.all_timezones: return zone_name if time.daylight: local_offset = time.altzone localtz = time.tzname[1] else: local_offset = time.timezone localtz = time.tzname[0] local_offset = datetime.timedelta(seconds=-local_offset) for zone_name in pytz.all_timezones: timezone = pytz.timezone(zone_name) if not hasattr(timezone, '_tzinfos'): continue for utcoffset, daylight, tzname in timezone._tzinfos: if utcoffset == local_offset and tzname == localtz: return zone_name
[ "def", "get_best_local_timezone", "(", ")", ":", "zone_name", "=", "tzlocal", ".", "get_localzone", "(", ")", ".", "zone", "if", "zone_name", "in", "pytz", ".", "all_timezones", ":", "return", "zone_name", "if", "time", ".", "daylight", ":", "local_offset", "=", "time", ".", "altzone", "localtz", "=", "time", ".", "tzname", "[", "1", "]", "else", ":", "local_offset", "=", "time", ".", "timezone", "localtz", "=", "time", ".", "tzname", "[", "0", "]", "local_offset", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "-", "local_offset", ")", "for", "zone_name", "in", "pytz", ".", "all_timezones", ":", "timezone", "=", "pytz", ".", "timezone", "(", "zone_name", ")", "if", "not", "hasattr", "(", "timezone", ",", "'_tzinfos'", ")", ":", "continue", "for", "utcoffset", ",", "daylight", ",", "tzname", "in", "timezone", ".", "_tzinfos", ":", "if", "utcoffset", "==", "local_offset", "and", "tzname", "==", "localtz", ":", "return", "zone_name" ]
python
Compares local timezone offset to pytz's timezone db, to determine a matching timezone name to use when TIME_ZONE is not set.
true
2,700,061
def pkg_info(pkg_path): """Return dict describing the context of this package Parameters ---------- pkg_path : str path containing __init__.py for package Returns ------- context : dict with named parameters of interest """ src, hsh = pkg_commit_hash(pkg_path) return dict( ipython_version=release.version, ipython_path=pkg_path, commit_source=src, commit_hash=hsh, sys_version=sys.version, sys_executable=sys.executable, sys_platform=sys.platform, platform=platform.platform(), os_name=os.name, default_encoding=encoding.DEFAULT_ENCODING, )
[ "def", "pkg_info", "(", "pkg_path", ")", ":", "src", ",", "hsh", "=", "pkg_commit_hash", "(", "pkg_path", ")", "return", "dict", "(", "ipython_version", "=", "release", ".", "version", ",", "ipython_path", "=", "pkg_path", ",", "commit_source", "=", "src", ",", "commit_hash", "=", "hsh", ",", "sys_version", "=", "sys", ".", "version", ",", "sys_executable", "=", "sys", ".", "executable", ",", "sys_platform", "=", "sys", ".", "platform", ",", "platform", "=", "platform", ".", "platform", "(", ")", ",", "os_name", "=", "os", ".", "name", ",", "default_encoding", "=", "encoding", ".", "DEFAULT_ENCODING", ",", ")" ]
python
Return dict describing the context of this package Parameters ---------- pkg_path : str path containing __init__.py for package Returns ------- context : dict with named parameters of interest
true
2,700,063
def _num_cpus_darwin(): """Return the number of active CPUs on a Darwin system.""" p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE) return p.stdout.read()
[ "def", "_num_cpus_darwin", "(", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "[", "'sysctl'", ",", "'-n'", ",", "'hw.ncpu'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "return", "p", ".", "stdout", ".", "read", "(", ")" ]
python
Return the number of active CPUs on a Darwin system.
true
2,700,262
def inputhook_pyglet(): """Run the pyglet event loop by processing pending events only. This keeps processing pending events until stdin is ready. After processing all pending events, a call to time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. This sleep time should be tuned though for best performance. """ # We need to protect against a user pressing Control-C when IPython is # idle and this is running. We trap KeyboardInterrupt and pass. try: t = clock() while not stdin_ready(): pyglet.clock.tick() for window in pyglet.app.windows: window.switch_to() window.dispatch_events() window.dispatch_event('on_draw') flip(window) # We need to sleep at this point to keep the idle CPU load # low. However, if sleep to long, GUI response is poor. As # a compromise, we watch how often GUI events are being processed # and switch between a short and long sleep time. Here are some # stats useful in helping to tune this. # time CPU load # 0.001 13% # 0.005 3% # 0.01 1.5% # 0.05 0.5% used_time = clock() - t if used_time > 5*60.0: # print 'Sleep for 5 s' # dbg time.sleep(5.0) elif used_time > 10.0: # print 'Sleep for 1 s' # dbg time.sleep(1.0) elif used_time > 0.1: # Few GUI events coming in, so we can sleep longer # print 'Sleep for 0.05 s' # dbg time.sleep(0.05) else: # Many GUI events coming in, so sleep only very little time.sleep(0.001) except KeyboardInterrupt: pass return 0
[ "def", "inputhook_pyglet", "(", ")", ":", "try", ":", "t", "=", "clock", "(", ")", "while", "not", "stdin_ready", "(", ")", ":", "pyglet", ".", "clock", ".", "tick", "(", ")", "for", "window", "in", "pyglet", ".", "app", ".", "windows", ":", "window", ".", "switch_to", "(", ")", "window", ".", "dispatch_events", "(", ")", "window", ".", "dispatch_event", "(", "'on_draw'", ")", "flip", "(", "window", ")", "used_time", "=", "clock", "(", ")", "-", "t", "if", "used_time", ">", "5", "*", "60.0", ":", "time", ".", "sleep", "(", "5.0", ")", "elif", "used_time", ">", "10.0", ":", "time", ".", "sleep", "(", "1.0", ")", "elif", "used_time", ">", "0.1", ":", "time", ".", "sleep", "(", "0.05", ")", "else", ":", "time", ".", "sleep", "(", "0.001", ")", "except", "KeyboardInterrupt", ":", "pass", "return", "0" ]
python
Run the pyglet event loop by processing pending events only. This keeps processing pending events until stdin is ready. After processing all pending events, a call to time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. This sleep time should be tuned though for best performance.
true
2,700,503
def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): """Make new_fn have old_fn's doc string. This is particularly useful for the do_... commands that hook into the help system. Adapted from from a comp.lang.python posting by Duncan Booth.""" def wrapper(*args, **kw): return new_fn(*args, **kw) if old_fn.__doc__: wrapper.__doc__ = old_fn.__doc__ + additional_text return wrapper
[ "def", "decorate_fn_with_doc", "(", "new_fn", ",", "old_fn", ",", "additional_text", "=", "\"\"", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kw", ")", ":", "return", "new_fn", "(", "*", "args", ",", "**", "kw", ")", "if", "old_fn", ".", "__doc__", ":", "wrapper", ".", "__doc__", "=", "old_fn", ".", "__doc__", "+", "additional_text", "return", "wrapper" ]
python
Make new_fn have old_fn's doc string. This is particularly useful for the do_... commands that hook into the help system. Adapted from from a comp.lang.python posting by Duncan Booth.
true