repo
stringclasses
679 values
path
stringlengths
6
122
func_name
stringlengths
2
76
original_string
stringlengths
87
70.9k
language
stringclasses
1 value
code
stringlengths
87
70.9k
code_tokens
sequencelengths
20
6.91k
docstring
stringlengths
1
21.7k
docstring_tokens
sequencelengths
1
1.6k
sha
stringclasses
679 values
url
stringlengths
92
213
partition
stringclasses
1 value
crazy-canux/arguspy
arguspy/wmi_subprocess.py
Wmi.query
def query(self, wql): """Connect by wmi and run wql.""" try: self.__wql = ['wmic', '-U', self.args.domain + '\\' + self.args.user + '%' + self.args.password, '//' + self.args.host, '--namespace', self.args.namespace, '--delimiter', self.args.delimiter, wql] self.logger.debug("wql: {}".format(self.__wql)) self.__output = subprocess.check_output(self.__wql) self.logger.debug("output: {}".format(self.__output)) self.logger.debug("wmi connect succeed.") self.__wmi_output = self.__output.splitlines()[1:] self.logger.debug("wmi_output: {}".format(self.__wmi_output)) self.__csv_header = csv.DictReader(self.__wmi_output, delimiter='|') self.logger.debug("csv_header: {}".format(self.__csv_header)) return list(self.__csv_header) except subprocess.CalledProcessError as e: self.unknown("Connect by wmi and run wql error: %s" % e)
python
def query(self, wql): """Connect by wmi and run wql.""" try: self.__wql = ['wmic', '-U', self.args.domain + '\\' + self.args.user + '%' + self.args.password, '//' + self.args.host, '--namespace', self.args.namespace, '--delimiter', self.args.delimiter, wql] self.logger.debug("wql: {}".format(self.__wql)) self.__output = subprocess.check_output(self.__wql) self.logger.debug("output: {}".format(self.__output)) self.logger.debug("wmi connect succeed.") self.__wmi_output = self.__output.splitlines()[1:] self.logger.debug("wmi_output: {}".format(self.__wmi_output)) self.__csv_header = csv.DictReader(self.__wmi_output, delimiter='|') self.logger.debug("csv_header: {}".format(self.__csv_header)) return list(self.__csv_header) except subprocess.CalledProcessError as e: self.unknown("Connect by wmi and run wql error: %s" % e)
[ "def", "query", "(", "self", ",", "wql", ")", ":", "try", ":", "self", ".", "__wql", "=", "[", "'wmic'", ",", "'-U'", ",", "self", ".", "args", ".", "domain", "+", "'\\\\'", "+", "self", ".", "args", ".", "user", "+", "'%'", "+", "self", ".", "args", ".", "password", ",", "'//'", "+", "self", ".", "args", ".", "host", ",", "'--namespace'", ",", "self", ".", "args", ".", "namespace", ",", "'--delimiter'", ",", "self", ".", "args", ".", "delimiter", ",", "wql", "]", "self", ".", "logger", ".", "debug", "(", "\"wql: {}\"", ".", "format", "(", "self", ".", "__wql", ")", ")", "self", ".", "__output", "=", "subprocess", ".", "check_output", "(", "self", ".", "__wql", ")", "self", ".", "logger", ".", "debug", "(", "\"output: {}\"", ".", "format", "(", "self", ".", "__output", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"wmi connect succeed.\"", ")", "self", ".", "__wmi_output", "=", "self", ".", "__output", ".", "splitlines", "(", ")", "[", "1", ":", "]", "self", ".", "logger", ".", "debug", "(", "\"wmi_output: {}\"", ".", "format", "(", "self", ".", "__wmi_output", ")", ")", "self", ".", "__csv_header", "=", "csv", ".", "DictReader", "(", "self", ".", "__wmi_output", ",", "delimiter", "=", "'|'", ")", "self", ".", "logger", ".", "debug", "(", "\"csv_header: {}\"", ".", "format", "(", "self", ".", "__csv_header", ")", ")", "return", "list", "(", "self", ".", "__csv_header", ")", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "self", ".", "unknown", "(", "\"Connect by wmi and run wql error: %s\"", "%", "e", ")" ]
Connect by wmi and run wql.
[ "Connect", "by", "wmi", "and", "run", "wql", "." ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/arguspy/wmi_subprocess.py#L30-L49
valid
pip-services/pip-services-commons-python
pip_services_commons/cache/CacheEntry.py
CacheEntry.set_value
def set_value(self, value, timeout): """ Changes the cached value and updates creation time. Args: value: the new cached value. timeout: time to live for the object in milliseconds Returns: None """ self.value = value self.expiration = time.clock() * 1000 + timeout
python
def set_value(self, value, timeout): """ Changes the cached value and updates creation time. Args: value: the new cached value. timeout: time to live for the object in milliseconds Returns: None """ self.value = value self.expiration = time.clock() * 1000 + timeout
[ "def", "set_value", "(", "self", ",", "value", ",", "timeout", ")", ":", "self", ".", "value", "=", "value", "self", ".", "expiration", "=", "time", ".", "clock", "(", ")", "*", "1000", "+", "timeout" ]
Changes the cached value and updates creation time. Args: value: the new cached value. timeout: time to live for the object in milliseconds Returns: None
[ "Changes", "the", "cached", "value", "and", "updates", "creation", "time", ".", "Args", ":", "value", ":", "the", "new", "cached", "value", ".", "timeout", ":", "time", "to", "live", "for", "the", "object", "in", "milliseconds", "Returns", ":", "None" ]
2205b18c45c60372966c62c1f23ac4fbc31e11b3
https://github.com/pip-services/pip-services-commons-python/blob/2205b18c45c60372966c62c1f23ac4fbc31e11b3/pip_services_commons/cache/CacheEntry.py#L36-L47
valid
zwischenloesung/ardu-report-lib
libardurep/datareporter.py
DataReporter.log
def log(self, url=None, credentials=None, do_verify_certificate=True): """ Wrapper for the other log methods, decide which one based on the URL parameter. """ if url is None: url = self.url if re.match("file://", url): self.log_file(url) elif re.match("https://", url) or re.match("http://", url): self.log_post(url, credentials, do_verify_certificate) else: self.log_stdout()
python
def log(self, url=None, credentials=None, do_verify_certificate=True): """ Wrapper for the other log methods, decide which one based on the URL parameter. """ if url is None: url = self.url if re.match("file://", url): self.log_file(url) elif re.match("https://", url) or re.match("http://", url): self.log_post(url, credentials, do_verify_certificate) else: self.log_stdout()
[ "def", "log", "(", "self", ",", "url", "=", "None", ",", "credentials", "=", "None", ",", "do_verify_certificate", "=", "True", ")", ":", "if", "url", "is", "None", ":", "url", "=", "self", ".", "url", "if", "re", ".", "match", "(", "\"file://\"", ",", "url", ")", ":", "self", ".", "log_file", "(", "url", ")", "elif", "re", ".", "match", "(", "\"https://\"", ",", "url", ")", "or", "re", ".", "match", "(", "\"http://\"", ",", "url", ")", ":", "self", ".", "log_post", "(", "url", ",", "credentials", ",", "do_verify_certificate", ")", "else", ":", "self", ".", "log_stdout", "(", ")" ]
Wrapper for the other log methods, decide which one based on the URL parameter.
[ "Wrapper", "for", "the", "other", "log", "methods", "decide", "which", "one", "based", "on", "the", "URL", "parameter", "." ]
51bd4a07e036065aafcb1273b151bea3fdfa50fa
https://github.com/zwischenloesung/ardu-report-lib/blob/51bd4a07e036065aafcb1273b151bea3fdfa50fa/libardurep/datareporter.py#L34-L46
valid
zwischenloesung/ardu-report-lib
libardurep/datareporter.py
DataReporter.log_file
def log_file(self, url=None): """ Write to a local log file """ if url is None: url = self.url f = re.sub("file://", "", url) try: with open(f, "a") as of: of.write(str(self.store.get_json_tuples(True))) except IOError as e: print(e) print("Could not write the content to the file..")
python
def log_file(self, url=None): """ Write to a local log file """ if url is None: url = self.url f = re.sub("file://", "", url) try: with open(f, "a") as of: of.write(str(self.store.get_json_tuples(True))) except IOError as e: print(e) print("Could not write the content to the file..")
[ "def", "log_file", "(", "self", ",", "url", "=", "None", ")", ":", "if", "url", "is", "None", ":", "url", "=", "self", ".", "url", "f", "=", "re", ".", "sub", "(", "\"file://\"", ",", "\"\"", ",", "url", ")", "try", ":", "with", "open", "(", "f", ",", "\"a\"", ")", "as", "of", ":", "of", ".", "write", "(", "str", "(", "self", ".", "store", ".", "get_json_tuples", "(", "True", ")", ")", ")", "except", "IOError", "as", "e", ":", "print", "(", "e", ")", "print", "(", "\"Could not write the content to the file..\"", ")" ]
Write to a local log file
[ "Write", "to", "a", "local", "log", "file" ]
51bd4a07e036065aafcb1273b151bea3fdfa50fa
https://github.com/zwischenloesung/ardu-report-lib/blob/51bd4a07e036065aafcb1273b151bea3fdfa50fa/libardurep/datareporter.py#L54-L66
valid
zwischenloesung/ardu-report-lib
libardurep/datareporter.py
DataReporter.log_post
def log_post(self, url=None, credentials=None, do_verify_certificate=True): """ Write to a remote host via HTTP POST """ if url is None: url = self.url if credentials is None: credentials = self.credentials if do_verify_certificate is None: do_verify_certificate = self.do_verify_certificate if credentials and "base64" in credentials: headers = {"Content-Type": "application/json", \ 'Authorization': 'Basic %s' % credentials["base64"]} else: headers = {"Content-Type": "application/json"} try: request = requests.post(url, headers=headers, \ data=self.store.get_json(), verify=do_verify_certificate) except httplib.IncompleteRead as e: request = e.partial
python
def log_post(self, url=None, credentials=None, do_verify_certificate=True): """ Write to a remote host via HTTP POST """ if url is None: url = self.url if credentials is None: credentials = self.credentials if do_verify_certificate is None: do_verify_certificate = self.do_verify_certificate if credentials and "base64" in credentials: headers = {"Content-Type": "application/json", \ 'Authorization': 'Basic %s' % credentials["base64"]} else: headers = {"Content-Type": "application/json"} try: request = requests.post(url, headers=headers, \ data=self.store.get_json(), verify=do_verify_certificate) except httplib.IncompleteRead as e: request = e.partial
[ "def", "log_post", "(", "self", ",", "url", "=", "None", ",", "credentials", "=", "None", ",", "do_verify_certificate", "=", "True", ")", ":", "if", "url", "is", "None", ":", "url", "=", "self", ".", "url", "if", "credentials", "is", "None", ":", "credentials", "=", "self", ".", "credentials", "if", "do_verify_certificate", "is", "None", ":", "do_verify_certificate", "=", "self", ".", "do_verify_certificate", "if", "credentials", "and", "\"base64\"", "in", "credentials", ":", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/json\"", ",", "'Authorization'", ":", "'Basic %s'", "%", "credentials", "[", "\"base64\"", "]", "}", "else", ":", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/json\"", "}", "try", ":", "request", "=", "requests", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "self", ".", "store", ".", "get_json", "(", ")", ",", "verify", "=", "do_verify_certificate", ")", "except", "httplib", ".", "IncompleteRead", "as", "e", ":", "request", "=", "e", ".", "partial" ]
Write to a remote host via HTTP POST
[ "Write", "to", "a", "remote", "host", "via", "HTTP", "POST" ]
51bd4a07e036065aafcb1273b151bea3fdfa50fa
https://github.com/zwischenloesung/ardu-report-lib/blob/51bd4a07e036065aafcb1273b151bea3fdfa50fa/libardurep/datareporter.py#L68-L87
valid
zwischenloesung/ardu-report-lib
libardurep/datareporter.py
DataReporter.register_credentials
def register_credentials(self, credentials=None, user=None, user_file=None, password=None, password_file=None): """ Helper method to store username and password """ # lets store all kind of credential data into this dict if credentials is not None: self.credentials = credentials else: self.credentials = {} # set the user from CLI or file if user: self.credentials["user"] = user elif user_file: with open(user_file, "r") as of: # what would the file entry look like? pattern = re.compile("^user: ") for l in of: if re.match(pattern, l): # strip away the newline l = l[0:-1] self.credentials["user"] = re.sub(pattern, "", l) # remove any surrounding quotes if self.credentials["user"][0:1] == '"' and \ self.credentials["user"][-1:] == '"': self.credentials["user"] = self.credentials["user"][1:-1] # set the password from CLI or file if password: self.credentials["password"] = password elif password_file: with open(password_file, "r") as of: # what would the file entry look like? pattern = re.compile("^password: ") for l in of: if re.match(pattern, l): # strip away the newline l = l[0:-1] self.credentials["password"] = \ re.sub(pattern, "", l) # remove any surrounding quotes if self.credentials["password"][0:1] == '"' and \ self.credentials["password"][-1:] == '"': self.credentials["password"] = \ self.credentials["password"][1:-1] # if both user and password is set, # 1. encode to base 64 for basic auth if "user" in self.credentials and "password" in self.credentials: c = self.credentials["user"] + ":" + self.credentials["password"] self.credentials["base64"] = b64encode(c.encode()).decode("ascii")
python
def register_credentials(self, credentials=None, user=None, user_file=None, password=None, password_file=None): """ Helper method to store username and password """ # lets store all kind of credential data into this dict if credentials is not None: self.credentials = credentials else: self.credentials = {} # set the user from CLI or file if user: self.credentials["user"] = user elif user_file: with open(user_file, "r") as of: # what would the file entry look like? pattern = re.compile("^user: ") for l in of: if re.match(pattern, l): # strip away the newline l = l[0:-1] self.credentials["user"] = re.sub(pattern, "", l) # remove any surrounding quotes if self.credentials["user"][0:1] == '"' and \ self.credentials["user"][-1:] == '"': self.credentials["user"] = self.credentials["user"][1:-1] # set the password from CLI or file if password: self.credentials["password"] = password elif password_file: with open(password_file, "r") as of: # what would the file entry look like? pattern = re.compile("^password: ") for l in of: if re.match(pattern, l): # strip away the newline l = l[0:-1] self.credentials["password"] = \ re.sub(pattern, "", l) # remove any surrounding quotes if self.credentials["password"][0:1] == '"' and \ self.credentials["password"][-1:] == '"': self.credentials["password"] = \ self.credentials["password"][1:-1] # if both user and password is set, # 1. encode to base 64 for basic auth if "user" in self.credentials and "password" in self.credentials: c = self.credentials["user"] + ":" + self.credentials["password"] self.credentials["base64"] = b64encode(c.encode()).decode("ascii")
[ "def", "register_credentials", "(", "self", ",", "credentials", "=", "None", ",", "user", "=", "None", ",", "user_file", "=", "None", ",", "password", "=", "None", ",", "password_file", "=", "None", ")", ":", "# lets store all kind of credential data into this dict", "if", "credentials", "is", "not", "None", ":", "self", ".", "credentials", "=", "credentials", "else", ":", "self", ".", "credentials", "=", "{", "}", "# set the user from CLI or file", "if", "user", ":", "self", ".", "credentials", "[", "\"user\"", "]", "=", "user", "elif", "user_file", ":", "with", "open", "(", "user_file", ",", "\"r\"", ")", "as", "of", ":", "# what would the file entry look like?", "pattern", "=", "re", ".", "compile", "(", "\"^user: \"", ")", "for", "l", "in", "of", ":", "if", "re", ".", "match", "(", "pattern", ",", "l", ")", ":", "# strip away the newline", "l", "=", "l", "[", "0", ":", "-", "1", "]", "self", ".", "credentials", "[", "\"user\"", "]", "=", "re", ".", "sub", "(", "pattern", ",", "\"\"", ",", "l", ")", "# remove any surrounding quotes", "if", "self", ".", "credentials", "[", "\"user\"", "]", "[", "0", ":", "1", "]", "==", "'\"'", "and", "self", ".", "credentials", "[", "\"user\"", "]", "[", "-", "1", ":", "]", "==", "'\"'", ":", "self", ".", "credentials", "[", "\"user\"", "]", "=", "self", ".", "credentials", "[", "\"user\"", "]", "[", "1", ":", "-", "1", "]", "# set the password from CLI or file", "if", "password", ":", "self", ".", "credentials", "[", "\"password\"", "]", "=", "password", "elif", "password_file", ":", "with", "open", "(", "password_file", ",", "\"r\"", ")", "as", "of", ":", "# what would the file entry look like?", "pattern", "=", "re", ".", "compile", "(", "\"^password: \"", ")", "for", "l", "in", "of", ":", "if", "re", ".", "match", "(", "pattern", ",", "l", ")", ":", "# strip away the newline", "l", "=", "l", "[", "0", ":", "-", "1", "]", "self", ".", "credentials", "[", "\"password\"", "]", "=", "re", ".", "sub", "(", "pattern", ",", "\"\"", ",", "l", ")", "# remove any surrounding quotes", "if", "self", ".", "credentials", "[", "\"password\"", "]", "[", "0", ":", "1", "]", "==", "'\"'", "and", "self", ".", "credentials", "[", "\"password\"", "]", "[", "-", "1", ":", "]", "==", "'\"'", ":", "self", ".", "credentials", "[", "\"password\"", "]", "=", "self", ".", "credentials", "[", "\"password\"", "]", "[", "1", ":", "-", "1", "]", "# if both user and password is set,", "# 1. encode to base 64 for basic auth", "if", "\"user\"", "in", "self", ".", "credentials", "and", "\"password\"", "in", "self", ".", "credentials", ":", "c", "=", "self", ".", "credentials", "[", "\"user\"", "]", "+", "\":\"", "+", "self", ".", "credentials", "[", "\"password\"", "]", "self", ".", "credentials", "[", "\"base64\"", "]", "=", "b64encode", "(", "c", ".", "encode", "(", ")", ")", ".", "decode", "(", "\"ascii\"", ")" ]
Helper method to store username and password
[ "Helper", "method", "to", "store", "username", "and", "password" ]
51bd4a07e036065aafcb1273b151bea3fdfa50fa
https://github.com/zwischenloesung/ardu-report-lib/blob/51bd4a07e036065aafcb1273b151bea3fdfa50fa/libardurep/datareporter.py#L95-L143
valid
zvoase/django-relax
relax/utils.py
generator_to_list
def generator_to_list(function): """ Wrap a generator function so that it returns a list when called. For example: # Define a generator >>> def mygen(n): ... i = 0 ... while i < n: ... yield i ... i += 1 # This is how it might work >>> generator = mygen(5) >>> generator.next() 0 >>> generator.next() 1 # Wrap it in generator_to_list, and it will behave differently. >>> mygen = generator_to_list(mygen) >>> mygen(5) [0, 1, 2, 3, 4] """ def wrapper(*args, **kwargs): return list(function(*args, **kwargs)) wrapper.__name__ = function.__name__ wrapper.__doc__ = function.__doc__ return wrapper
python
def generator_to_list(function): """ Wrap a generator function so that it returns a list when called. For example: # Define a generator >>> def mygen(n): ... i = 0 ... while i < n: ... yield i ... i += 1 # This is how it might work >>> generator = mygen(5) >>> generator.next() 0 >>> generator.next() 1 # Wrap it in generator_to_list, and it will behave differently. >>> mygen = generator_to_list(mygen) >>> mygen(5) [0, 1, 2, 3, 4] """ def wrapper(*args, **kwargs): return list(function(*args, **kwargs)) wrapper.__name__ = function.__name__ wrapper.__doc__ = function.__doc__ return wrapper
[ "def", "generator_to_list", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "list", "(", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "wrapper", ".", "__name__", "=", "function", ".", "__name__", "wrapper", ".", "__doc__", "=", "function", ".", "__doc__", "return", "wrapper" ]
Wrap a generator function so that it returns a list when called. For example: # Define a generator >>> def mygen(n): ... i = 0 ... while i < n: ... yield i ... i += 1 # This is how it might work >>> generator = mygen(5) >>> generator.next() 0 >>> generator.next() 1 # Wrap it in generator_to_list, and it will behave differently. >>> mygen = generator_to_list(mygen) >>> mygen(5) [0, 1, 2, 3, 4]
[ "Wrap", "a", "generator", "function", "so", "that", "it", "returns", "a", "list", "when", "called", ".", "For", "example", ":", "#", "Define", "a", "generator", ">>>", "def", "mygen", "(", "n", ")", ":", "...", "i", "=", "0", "...", "while", "i", "<", "n", ":", "...", "yield", "i", "...", "i", "+", "=", "1", "#", "This", "is", "how", "it", "might", "work", ">>>", "generator", "=", "mygen", "(", "5", ")", ">>>", "generator", ".", "next", "()", "0", ">>>", "generator", ".", "next", "()", "1", "#", "Wrap", "it", "in", "generator_to_list", "and", "it", "will", "behave", "differently", ".", ">>>", "mygen", "=", "generator_to_list", "(", "mygen", ")", ">>>", "mygen", "(", "5", ")", "[", "0", "1", "2", "3", "4", "]" ]
10bb37bf3a512b290816856a6877c17fa37e930f
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/utils.py#L7-L35
valid
zvoase/django-relax
relax/utils.py
logrotate
def logrotate(filename): """ Return the next available filename for a particular filename prefix. For example: >>> import os # Make three (empty) files in a directory >>> fp0 = open('file.0', 'w') >>> fp1 = open('file.1', 'w') >>> fp2 = open('file.2', 'w') >>> fp0.close(), fp1.close(), fp2.close() (None, None, None) # Use logrotate to get the next available filename. >>> logrotate('file') 'file.3' >>> logrotate('file.2') 'file.3' >>> logrotate('file.1') 'file.3' This can be used to get the next available filename for logging, allowing you to rotate log files, without using Python's ``logging`` module. """ match = re.match(r'(.*)' + re.escape(os.path.extsep) + r'(\d+)', filename) if os.path.exists(filename): if match: prefix, number = match.groups() number = int(number) while os.path.exists(os.path.extsep.join((prefix, str(number)))): number += 1 return os.path.extsep.join((prefix, str(number))) elif match: return filename return logrotate(os.path.extsep.join((filename, '0')))
python
def logrotate(filename): """ Return the next available filename for a particular filename prefix. For example: >>> import os # Make three (empty) files in a directory >>> fp0 = open('file.0', 'w') >>> fp1 = open('file.1', 'w') >>> fp2 = open('file.2', 'w') >>> fp0.close(), fp1.close(), fp2.close() (None, None, None) # Use logrotate to get the next available filename. >>> logrotate('file') 'file.3' >>> logrotate('file.2') 'file.3' >>> logrotate('file.1') 'file.3' This can be used to get the next available filename for logging, allowing you to rotate log files, without using Python's ``logging`` module. """ match = re.match(r'(.*)' + re.escape(os.path.extsep) + r'(\d+)', filename) if os.path.exists(filename): if match: prefix, number = match.groups() number = int(number) while os.path.exists(os.path.extsep.join((prefix, str(number)))): number += 1 return os.path.extsep.join((prefix, str(number))) elif match: return filename return logrotate(os.path.extsep.join((filename, '0')))
[ "def", "logrotate", "(", "filename", ")", ":", "match", "=", "re", ".", "match", "(", "r'(.*)'", "+", "re", ".", "escape", "(", "os", ".", "path", ".", "extsep", ")", "+", "r'(\\d+)'", ",", "filename", ")", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "if", "match", ":", "prefix", ",", "number", "=", "match", ".", "groups", "(", ")", "number", "=", "int", "(", "number", ")", "while", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "extsep", ".", "join", "(", "(", "prefix", ",", "str", "(", "number", ")", ")", ")", ")", ":", "number", "+=", "1", "return", "os", ".", "path", ".", "extsep", ".", "join", "(", "(", "prefix", ",", "str", "(", "number", ")", ")", ")", "elif", "match", ":", "return", "filename", "return", "logrotate", "(", "os", ".", "path", ".", "extsep", ".", "join", "(", "(", "filename", ",", "'0'", ")", ")", ")" ]
Return the next available filename for a particular filename prefix. For example: >>> import os # Make three (empty) files in a directory >>> fp0 = open('file.0', 'w') >>> fp1 = open('file.1', 'w') >>> fp2 = open('file.2', 'w') >>> fp0.close(), fp1.close(), fp2.close() (None, None, None) # Use logrotate to get the next available filename. >>> logrotate('file') 'file.3' >>> logrotate('file.2') 'file.3' >>> logrotate('file.1') 'file.3' This can be used to get the next available filename for logging, allowing you to rotate log files, without using Python's ``logging`` module.
[ "Return", "the", "next", "available", "filename", "for", "a", "particular", "filename", "prefix", ".", "For", "example", ":", ">>>", "import", "os", "#", "Make", "three", "(", "empty", ")", "files", "in", "a", "directory", ">>>", "fp0", "=", "open", "(", "file", ".", "0", "w", ")", ">>>", "fp1", "=", "open", "(", "file", ".", "1", "w", ")", ">>>", "fp2", "=", "open", "(", "file", ".", "2", "w", ")", ">>>", "fp0", ".", "close", "()", "fp1", ".", "close", "()", "fp2", ".", "close", "()", "(", "None", "None", "None", ")", "#", "Use", "logrotate", "to", "get", "the", "next", "available", "filename", ".", ">>>", "logrotate", "(", "file", ")", "file", ".", "3", ">>>", "logrotate", "(", "file", ".", "2", ")", "file", ".", "3", ">>>", "logrotate", "(", "file", ".", "1", ")", "file", ".", "3", "This", "can", "be", "used", "to", "get", "the", "next", "available", "filename", "for", "logging", "allowing", "you", "to", "rotate", "log", "files", "without", "using", "Python", "s", "logging", "module", "." ]
10bb37bf3a512b290816856a6877c17fa37e930f
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/utils.py#L38-L74
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
set_connection
def set_connection(host=None, database=None, user=None, password=None): """Set connection parameters. Call set_connection with no arguments to clear.""" c.CONNECTION['HOST'] = host c.CONNECTION['DATABASE'] = database c.CONNECTION['USER'] = user c.CONNECTION['PASSWORD'] = password
python
def set_connection(host=None, database=None, user=None, password=None): """Set connection parameters. Call set_connection with no arguments to clear.""" c.CONNECTION['HOST'] = host c.CONNECTION['DATABASE'] = database c.CONNECTION['USER'] = user c.CONNECTION['PASSWORD'] = password
[ "def", "set_connection", "(", "host", "=", "None", ",", "database", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ")", ":", "c", ".", "CONNECTION", "[", "'HOST'", "]", "=", "host", "c", ".", "CONNECTION", "[", "'DATABASE'", "]", "=", "database", "c", ".", "CONNECTION", "[", "'USER'", "]", "=", "user", "c", ".", "CONNECTION", "[", "'PASSWORD'", "]", "=", "password" ]
Set connection parameters. Call set_connection with no arguments to clear.
[ "Set", "connection", "parameters", ".", "Call", "set_connection", "with", "no", "arguments", "to", "clear", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L49-L54
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
set_delegate
def set_delegate(address=None, pubkey=None, secret=None): """Set delegate parameters. Call set_delegate with no arguments to clear.""" c.DELEGATE['ADDRESS'] = address c.DELEGATE['PUBKEY'] = pubkey c.DELEGATE['PASSPHRASE'] = secret
python
def set_delegate(address=None, pubkey=None, secret=None): """Set delegate parameters. Call set_delegate with no arguments to clear.""" c.DELEGATE['ADDRESS'] = address c.DELEGATE['PUBKEY'] = pubkey c.DELEGATE['PASSPHRASE'] = secret
[ "def", "set_delegate", "(", "address", "=", "None", ",", "pubkey", "=", "None", ",", "secret", "=", "None", ")", ":", "c", ".", "DELEGATE", "[", "'ADDRESS'", "]", "=", "address", "c", ".", "DELEGATE", "[", "'PUBKEY'", "]", "=", "pubkey", "c", ".", "DELEGATE", "[", "'PASSPHRASE'", "]", "=", "secret" ]
Set delegate parameters. Call set_delegate with no arguments to clear.
[ "Set", "delegate", "parameters", ".", "Call", "set_delegate", "with", "no", "arguments", "to", "clear", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L57-L61
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
get_transactionlist
def get_transactionlist(delegate_pubkey): """returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId'""" res = DbCursor().execute_and_fetchall(""" SELECT transactions."id", transactions."amount", blocks."timestamp", transactions."recipientId", transactions."senderId", transactions."rawasset", transactions."type", transactions."fee", transactions."blockId" FROM transactions INNER JOIN blocks ON transactions."blockId" = blocks.id WHERE transactions."senderId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') OR transactions."recipientId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') ORDER BY blocks."timestamp" ASC;""".format(delegate_pubkey)) Transaction = namedtuple( 'transaction', 'id amount timestamp recipientId senderId rawasset type fee') named_transactions = [] for i in res: tx_id = Transaction( id=i[0], amount=i[1], timestamp=i[2], recipientId=i[3], senderId=i[4], rawasset=i[5], type=i[6], fee=i[7], ) named_transactions.append(tx_id) return named_transactions
python
def get_transactionlist(delegate_pubkey): """returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId'""" res = DbCursor().execute_and_fetchall(""" SELECT transactions."id", transactions."amount", blocks."timestamp", transactions."recipientId", transactions."senderId", transactions."rawasset", transactions."type", transactions."fee", transactions."blockId" FROM transactions INNER JOIN blocks ON transactions."blockId" = blocks.id WHERE transactions."senderId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') OR transactions."recipientId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') ORDER BY blocks."timestamp" ASC;""".format(delegate_pubkey)) Transaction = namedtuple( 'transaction', 'id amount timestamp recipientId senderId rawasset type fee') named_transactions = [] for i in res: tx_id = Transaction( id=i[0], amount=i[1], timestamp=i[2], recipientId=i[3], senderId=i[4], rawasset=i[5], type=i[6], fee=i[7], ) named_transactions.append(tx_id) return named_transactions
[ "def", "get_transactionlist", "(", "delegate_pubkey", ")", ":", "res", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT transactions.\"id\", transactions.\"amount\",\n blocks.\"timestamp\", transactions.\"recipientId\",\n transactions.\"senderId\", transactions.\"rawasset\",\n transactions.\"type\", transactions.\"fee\", transactions.\"blockId\"\n FROM transactions \n INNER JOIN blocks\n ON transactions.\"blockId\" = blocks.id\n WHERE transactions.\"senderId\" IN\n (SELECT transactions.\"recipientId\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{0}')\n OR transactions.\"recipientId\" IN\n (SELECT transactions.\"recipientId\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{0}')\n ORDER BY blocks.\"timestamp\" ASC;\"\"\"", ".", "format", "(", "delegate_pubkey", ")", ")", "Transaction", "=", "namedtuple", "(", "'transaction'", ",", "'id amount timestamp recipientId senderId rawasset type fee'", ")", "named_transactions", "=", "[", "]", "for", "i", "in", "res", ":", "tx_id", "=", "Transaction", "(", "id", "=", "i", "[", "0", "]", ",", "amount", "=", "i", "[", "1", "]", ",", "timestamp", "=", "i", "[", "2", "]", ",", "recipientId", "=", "i", "[", "3", "]", ",", "senderId", "=", "i", "[", "4", "]", ",", "rawasset", "=", "i", "[", "5", "]", ",", "type", "=", "i", "[", "6", "]", ",", "fee", "=", "i", "[", "7", "]", ",", ")", "named_transactions", ".", "append", "(", "tx_id", ")", "return", "named_transactions" ]
returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId
[ "returns", "a", "list", "of", "named", "tuples", "of", "all", "transactions", "relevant", "to", "a", "specific", "delegates", "voters", ".", "Flow", ":", "finds", "all", "voters", "and", "unvoters", "SELECTs", "all", "transactions", "of", "those", "voters", "names", "all", "transactions", "according", "to", "the", "scheme", ":", "transaction", "id", "amount", "timestamp", "recipientId", "senderId", "rawasset", "type", "fee", "blockId" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L989-L1032
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
get_events
def get_events(delegate_pubkey): """returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId'""" res = DbCursor().execute_and_fetchall(""" SELECT * FROM( SELECT transactions."id", transactions."amount", transactions."fee", blocks."timestamp", transactions."recipientId", transactions."senderId", transactions."type", transactions."rawasset" FROM transactions INNER JOIN blocks ON transactions."blockId" = blocks.id WHERE transactions."senderId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') OR transactions."recipientId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') UNION SELECT blocks."id", blocks."reward", blocks."totalFee", blocks."timestamp", mem_accounts."address", NULL, 100, blocks."rawtxs" FROM blocks INNER JOIN mem_accounts ON mem_accounts."publicKey" = blocks."generatorPublicKey" WHERE mem_accounts."address" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}')) AS events ORDER BY events."timestamp";""".format(delegate_pubkey)) Event = namedtuple( 'Event', 'id amount fee timestamp recipientId senderId type raw') named_events = [] for i in res: tx_id = Event( id=i[0], amount=i[1], fee=i[2], timestamp=i[3], recipientId=i[4], senderId=i[5], type=i[6], raw=i[7] ) named_events.append(tx_id) return named_events
python
def get_events(delegate_pubkey): """returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId'""" res = DbCursor().execute_and_fetchall(""" SELECT * FROM( SELECT transactions."id", transactions."amount", transactions."fee", blocks."timestamp", transactions."recipientId", transactions."senderId", transactions."type", transactions."rawasset" FROM transactions INNER JOIN blocks ON transactions."blockId" = blocks.id WHERE transactions."senderId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') OR transactions."recipientId" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}') UNION SELECT blocks."id", blocks."reward", blocks."totalFee", blocks."timestamp", mem_accounts."address", NULL, 100, blocks."rawtxs" FROM blocks INNER JOIN mem_accounts ON mem_accounts."publicKey" = blocks."generatorPublicKey" WHERE mem_accounts."address" IN (SELECT transactions."recipientId" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{0}')) AS events ORDER BY events."timestamp";""".format(delegate_pubkey)) Event = namedtuple( 'Event', 'id amount fee timestamp recipientId senderId type raw') named_events = [] for i in res: tx_id = Event( id=i[0], amount=i[1], fee=i[2], timestamp=i[3], recipientId=i[4], senderId=i[5], type=i[6], raw=i[7] ) named_events.append(tx_id) return named_events
[ "def", "get_events", "(", "delegate_pubkey", ")", ":", "res", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT *\n FROM(\n SELECT transactions.\"id\",\n transactions.\"amount\",\n transactions.\"fee\",\n blocks.\"timestamp\",\n transactions.\"recipientId\",\n transactions.\"senderId\",\n transactions.\"type\",\n transactions.\"rawasset\"\n FROM transactions\n INNER JOIN blocks\n ON transactions.\"blockId\" = blocks.id\n WHERE transactions.\"senderId\" IN\n (SELECT transactions.\"recipientId\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{0}')\n OR transactions.\"recipientId\" IN\n (SELECT transactions.\"recipientId\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{0}')\n UNION\n SELECT blocks.\"id\",\n blocks.\"reward\",\n blocks.\"totalFee\",\n blocks.\"timestamp\",\n mem_accounts.\"address\",\n NULL,\n 100,\n blocks.\"rawtxs\"\n FROM blocks\n INNER JOIN mem_accounts\n ON mem_accounts.\"publicKey\" = blocks.\"generatorPublicKey\"\n WHERE mem_accounts.\"address\" IN\n (SELECT transactions.\"recipientId\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{0}')) AS events\n ORDER BY events.\"timestamp\";\"\"\"", ".", "format", "(", "delegate_pubkey", ")", ")", "Event", "=", "namedtuple", "(", "'Event'", ",", "'id amount fee timestamp recipientId senderId type raw'", ")", "named_events", "=", "[", "]", "for", "i", "in", "res", ":", "tx_id", "=", "Event", "(", "id", "=", "i", "[", "0", "]", ",", "amount", "=", "i", "[", "1", "]", ",", "fee", "=", "i", "[", "2", "]", ",", "timestamp", "=", "i", "[", "3", "]", ",", "recipientId", "=", "i", "[", "4", "]", ",", "senderId", "=", "i", "[", "5", "]", ",", "type", "=", "i", "[", "6", "]", ",", "raw", "=", "i", "[", "7", "]", ")", "named_events", ".", "append", "(", "tx_id", ")", "return", "named_events" ]
returns a list of named tuples of all transactions relevant to a specific delegates voters. Flow: finds all voters and unvoters, SELECTs all transactions of those voters, names all transactions according to the scheme: 'transaction', 'id amount timestamp recipientId senderId rawasset type fee blockId
[ "returns", "a", "list", "of", "named", "tuples", "of", "all", "transactions", "relevant", "to", "a", "specific", "delegates", "voters", ".", "Flow", ":", "finds", "all", "voters", "and", "unvoters", "SELECTs", "all", "transactions", "of", "those", "voters", "names", "all", "transactions", "according", "to", "the", "scheme", ":", "transaction", "id", "amount", "timestamp", "recipientId", "senderId", "rawasset", "type", "fee", "blockId" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L1035-L1100
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Address.payout
def payout(address): """returns all received transactions between the address and registered delegate accounts ORDER by timestamp ASC.""" qry = DbCursor().execute_and_fetchall(""" SELECT DISTINCT transactions."id", transactions."amount", transactions."timestamp", transactions."recipientId", transactions."senderId", transactions."rawasset", transactions."type", transactions."fee" FROM transactions, delegates WHERE transactions."senderId" IN ( SELECT transactions."senderId" FROM transactions, delegates WHERE transactions."id" = delegates."transactionId" ) AND transactions."recipientId" = '{}' ORDER BY transactions."timestamp" ASC""".format(address)) Transaction = namedtuple( 'transaction', 'id amount timestamp recipientId senderId rawasset type fee') named_transactions = [] for i in qry: tx_id = Transaction( id=i[0], amount=i[1], timestamp=i[2], recipientId=i[3], senderId=i[4], rawasset=i[5], type=i[6], fee=i[7], ) named_transactions.append(tx_id) return named_transactions
python
def payout(address): """returns all received transactions between the address and registered delegate accounts ORDER by timestamp ASC.""" qry = DbCursor().execute_and_fetchall(""" SELECT DISTINCT transactions."id", transactions."amount", transactions."timestamp", transactions."recipientId", transactions."senderId", transactions."rawasset", transactions."type", transactions."fee" FROM transactions, delegates WHERE transactions."senderId" IN ( SELECT transactions."senderId" FROM transactions, delegates WHERE transactions."id" = delegates."transactionId" ) AND transactions."recipientId" = '{}' ORDER BY transactions."timestamp" ASC""".format(address)) Transaction = namedtuple( 'transaction', 'id amount timestamp recipientId senderId rawasset type fee') named_transactions = [] for i in qry: tx_id = Transaction( id=i[0], amount=i[1], timestamp=i[2], recipientId=i[3], senderId=i[4], rawasset=i[5], type=i[6], fee=i[7], ) named_transactions.append(tx_id) return named_transactions
[ "def", "payout", "(", "address", ")", ":", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT DISTINCT transactions.\"id\", transactions.\"amount\",\n transactions.\"timestamp\", transactions.\"recipientId\",\n transactions.\"senderId\", transactions.\"rawasset\",\n transactions.\"type\", transactions.\"fee\"\n FROM transactions, delegates\n WHERE transactions.\"senderId\" IN (\n SELECT transactions.\"senderId\" \n FROM transactions, delegates \n WHERE transactions.\"id\" = delegates.\"transactionId\"\n )\n AND transactions.\"recipientId\" = '{}'\n ORDER BY transactions.\"timestamp\" ASC\"\"\"", ".", "format", "(", "address", ")", ")", "Transaction", "=", "namedtuple", "(", "'transaction'", ",", "'id amount timestamp recipientId senderId rawasset type fee'", ")", "named_transactions", "=", "[", "]", "for", "i", "in", "qry", ":", "tx_id", "=", "Transaction", "(", "id", "=", "i", "[", "0", "]", ",", "amount", "=", "i", "[", "1", "]", ",", "timestamp", "=", "i", "[", "2", "]", ",", "recipientId", "=", "i", "[", "3", "]", ",", "senderId", "=", "i", "[", "4", "]", ",", "rawasset", "=", "i", "[", "5", "]", ",", "type", "=", "i", "[", "6", "]", ",", "fee", "=", "i", "[", "7", "]", ",", ")", "named_transactions", ".", "append", "(", "tx_id", ")", "return", "named_transactions" ]
returns all received transactions between the address and registered delegate accounts ORDER by timestamp ASC.
[ "returns", "all", "received", "transactions", "between", "the", "address", "and", "registered", "delegate", "accounts", "ORDER", "by", "timestamp", "ASC", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L177-L212
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Address.votes
def votes(address): """Returns a list of namedtuples all votes made by an address, {(+/-)pubkeydelegate:timestamp}, timestamp DESC""" qry = DbCursor().execute_and_fetchall(""" SELECT votes."votes", transactions."timestamp" FROM votes, transactions WHERE transactions."id" = votes."transactionId" AND transactions."senderId" = '{}' ORDER BY transactions."timestamp" DESC """.format(address)) Vote = namedtuple( 'vote', 'direction delegate timestamp') res = [] for i in qry: if i[0][0] == '+': direction = True elif i[0][0] == '-': direction = False else: logger.fatal('failed to interpret direction for: {}'.format(i)) raise ParseError('failed to interpret direction of vote for: {}'.format(i)) vote = Vote( direction=direction, delegate=i[0][1:], timestamp=i[1], ) res.append(vote) return res
python
def votes(address): """Returns a list of namedtuples all votes made by an address, {(+/-)pubkeydelegate:timestamp}, timestamp DESC""" qry = DbCursor().execute_and_fetchall(""" SELECT votes."votes", transactions."timestamp" FROM votes, transactions WHERE transactions."id" = votes."transactionId" AND transactions."senderId" = '{}' ORDER BY transactions."timestamp" DESC """.format(address)) Vote = namedtuple( 'vote', 'direction delegate timestamp') res = [] for i in qry: if i[0][0] == '+': direction = True elif i[0][0] == '-': direction = False else: logger.fatal('failed to interpret direction for: {}'.format(i)) raise ParseError('failed to interpret direction of vote for: {}'.format(i)) vote = Vote( direction=direction, delegate=i[0][1:], timestamp=i[1], ) res.append(vote) return res
[ "def", "votes", "(", "address", ")", ":", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT votes.\"votes\", transactions.\"timestamp\"\n FROM votes, transactions\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND transactions.\"senderId\" = '{}'\n ORDER BY transactions.\"timestamp\" DESC\n \"\"\"", ".", "format", "(", "address", ")", ")", "Vote", "=", "namedtuple", "(", "'vote'", ",", "'direction delegate timestamp'", ")", "res", "=", "[", "]", "for", "i", "in", "qry", ":", "if", "i", "[", "0", "]", "[", "0", "]", "==", "'+'", ":", "direction", "=", "True", "elif", "i", "[", "0", "]", "[", "0", "]", "==", "'-'", ":", "direction", "=", "False", "else", ":", "logger", ".", "fatal", "(", "'failed to interpret direction for: {}'", ".", "format", "(", "i", ")", ")", "raise", "ParseError", "(", "'failed to interpret direction of vote for: {}'", ".", "format", "(", "i", ")", ")", "vote", "=", "Vote", "(", "direction", "=", "direction", ",", "delegate", "=", "i", "[", "0", "]", "[", "1", ":", "]", ",", "timestamp", "=", "i", "[", "1", "]", ",", ")", "res", ".", "append", "(", "vote", ")", "return", "res" ]
Returns a list of namedtuples all votes made by an address, {(+/-)pubkeydelegate:timestamp}, timestamp DESC
[ "Returns", "a", "list", "of", "namedtuples", "all", "votes", "made", "by", "an", "address", "{", "(", "+", "/", "-", ")", "pubkeydelegate", ":", "timestamp", "}", "timestamp", "DESC" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L251-L279
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Address.balance
def balance(address): """ Takes a single address and returns the current balance. """ txhistory = Address.transactions(address) balance = 0 for i in txhistory: if i.recipientId == address: balance += i.amount if i.senderId == address: balance -= (i.amount + i.fee) delegates = Delegate.delegates() for i in delegates: if address == i.address: forged_blocks = Delegate.blocks(i.pubkey) for block in forged_blocks: balance += (block.reward + block.totalFee) if balance < 0: height = Node.height() logger.fatal('Negative balance for address {0}, Nodeheight: {1)'.format(address, height)) raise NegativeBalanceError('Negative balance for address {0}, Nodeheight: {1)'.format(address, height)) return balance
python
def balance(address): """ Takes a single address and returns the current balance. """ txhistory = Address.transactions(address) balance = 0 for i in txhistory: if i.recipientId == address: balance += i.amount if i.senderId == address: balance -= (i.amount + i.fee) delegates = Delegate.delegates() for i in delegates: if address == i.address: forged_blocks = Delegate.blocks(i.pubkey) for block in forged_blocks: balance += (block.reward + block.totalFee) if balance < 0: height = Node.height() logger.fatal('Negative balance for address {0}, Nodeheight: {1)'.format(address, height)) raise NegativeBalanceError('Negative balance for address {0}, Nodeheight: {1)'.format(address, height)) return balance
[ "def", "balance", "(", "address", ")", ":", "txhistory", "=", "Address", ".", "transactions", "(", "address", ")", "balance", "=", "0", "for", "i", "in", "txhistory", ":", "if", "i", ".", "recipientId", "==", "address", ":", "balance", "+=", "i", ".", "amount", "if", "i", ".", "senderId", "==", "address", ":", "balance", "-=", "(", "i", ".", "amount", "+", "i", ".", "fee", ")", "delegates", "=", "Delegate", ".", "delegates", "(", ")", "for", "i", "in", "delegates", ":", "if", "address", "==", "i", ".", "address", ":", "forged_blocks", "=", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", "for", "block", "in", "forged_blocks", ":", "balance", "+=", "(", "block", ".", "reward", "+", "block", ".", "totalFee", ")", "if", "balance", "<", "0", ":", "height", "=", "Node", ".", "height", "(", ")", "logger", ".", "fatal", "(", "'Negative balance for address {0}, Nodeheight: {1)'", ".", "format", "(", "address", ",", "height", ")", ")", "raise", "NegativeBalanceError", "(", "'Negative balance for address {0}, Nodeheight: {1)'", ".", "format", "(", "address", ",", "height", ")", ")", "return", "balance" ]
Takes a single address and returns the current balance.
[ "Takes", "a", "single", "address", "and", "returns", "the", "current", "balance", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L282-L305
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Address.balance_over_time
def balance_over_time(address): """returns a list of named tuples, x.timestamp, x.amount including block rewards""" forged_blocks = None txhistory = Address.transactions(address) delegates = Delegate.delegates() for i in delegates: if address == i.address: forged_blocks = Delegate.blocks(i.pubkey) balance_over_time = [] balance = 0 block = 0 Balance = namedtuple( 'balance', 'timestamp amount') for tx in txhistory: if forged_blocks: while forged_blocks[block].timestamp <= tx.timestamp: balance += (forged_blocks[block].reward + forged_blocks[block].totalFee) balance_over_time.append(Balance(timestamp=forged_blocks[block].timestamp, amount=balance)) block += 1 if tx.senderId == address: balance -= (tx.amount + tx.fee) res = Balance(timestamp=tx.timestamp, amount=balance) balance_over_time.append(res) if tx.recipientId == address: balance += tx.amount res = Balance(timestamp=tx.timestamp, amount=balance) balance_over_time.append(res) if forged_blocks and block <= len(forged_blocks) - 1: if forged_blocks[block].timestamp > txhistory[-1].timestamp: for i in forged_blocks[block:]: balance += (i.reward + i.totalFee) res = Balance(timestamp=i.timestamp, amount=balance) balance_over_time.append(res) return balance_over_time
python
def balance_over_time(address): """returns a list of named tuples, x.timestamp, x.amount including block rewards""" forged_blocks = None txhistory = Address.transactions(address) delegates = Delegate.delegates() for i in delegates: if address == i.address: forged_blocks = Delegate.blocks(i.pubkey) balance_over_time = [] balance = 0 block = 0 Balance = namedtuple( 'balance', 'timestamp amount') for tx in txhistory: if forged_blocks: while forged_blocks[block].timestamp <= tx.timestamp: balance += (forged_blocks[block].reward + forged_blocks[block].totalFee) balance_over_time.append(Balance(timestamp=forged_blocks[block].timestamp, amount=balance)) block += 1 if tx.senderId == address: balance -= (tx.amount + tx.fee) res = Balance(timestamp=tx.timestamp, amount=balance) balance_over_time.append(res) if tx.recipientId == address: balance += tx.amount res = Balance(timestamp=tx.timestamp, amount=balance) balance_over_time.append(res) if forged_blocks and block <= len(forged_blocks) - 1: if forged_blocks[block].timestamp > txhistory[-1].timestamp: for i in forged_blocks[block:]: balance += (i.reward + i.totalFee) res = Balance(timestamp=i.timestamp, amount=balance) balance_over_time.append(res) return balance_over_time
[ "def", "balance_over_time", "(", "address", ")", ":", "forged_blocks", "=", "None", "txhistory", "=", "Address", ".", "transactions", "(", "address", ")", "delegates", "=", "Delegate", ".", "delegates", "(", ")", "for", "i", "in", "delegates", ":", "if", "address", "==", "i", ".", "address", ":", "forged_blocks", "=", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", "balance_over_time", "=", "[", "]", "balance", "=", "0", "block", "=", "0", "Balance", "=", "namedtuple", "(", "'balance'", ",", "'timestamp amount'", ")", "for", "tx", "in", "txhistory", ":", "if", "forged_blocks", ":", "while", "forged_blocks", "[", "block", "]", ".", "timestamp", "<=", "tx", ".", "timestamp", ":", "balance", "+=", "(", "forged_blocks", "[", "block", "]", ".", "reward", "+", "forged_blocks", "[", "block", "]", ".", "totalFee", ")", "balance_over_time", ".", "append", "(", "Balance", "(", "timestamp", "=", "forged_blocks", "[", "block", "]", ".", "timestamp", ",", "amount", "=", "balance", ")", ")", "block", "+=", "1", "if", "tx", ".", "senderId", "==", "address", ":", "balance", "-=", "(", "tx", ".", "amount", "+", "tx", ".", "fee", ")", "res", "=", "Balance", "(", "timestamp", "=", "tx", ".", "timestamp", ",", "amount", "=", "balance", ")", "balance_over_time", ".", "append", "(", "res", ")", "if", "tx", ".", "recipientId", "==", "address", ":", "balance", "+=", "tx", ".", "amount", "res", "=", "Balance", "(", "timestamp", "=", "tx", ".", "timestamp", ",", "amount", "=", "balance", ")", "balance_over_time", ".", "append", "(", "res", ")", "if", "forged_blocks", "and", "block", "<=", "len", "(", "forged_blocks", ")", "-", "1", ":", "if", "forged_blocks", "[", "block", "]", ".", "timestamp", ">", "txhistory", "[", "-", "1", "]", ".", "timestamp", ":", "for", "i", "in", "forged_blocks", "[", "block", ":", "]", ":", "balance", "+=", "(", "i", ".", "reward", "+", "i", ".", "totalFee", ")", "res", "=", "Balance", "(", "timestamp", "=", "i", ".", "timestamp", ",", "amount", "=", "balance", ")", "balance_over_time", ".", "append", "(", "res", ")", "return", "balance_over_time" ]
returns a list of named tuples, x.timestamp, x.amount including block rewards
[ "returns", "a", "list", "of", "named", "tuples", "x", ".", "timestamp", "x", ".", "amount", "including", "block", "rewards" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L308-L348
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.delegates
def delegates(): """returns a list of named tuples of all delegates. {username: {'pubkey':pubkey, 'timestamp':timestamp, 'address':address}}""" qry = DbCursor().execute_and_fetchall(""" SELECT delegates."username", delegates."transactionId", transactions."timestamp", transactions."senderId", transactions."senderPublicKey" FROM transactions JOIN delegates ON transactions."id" = delegates."transactionId" """) Delegate = namedtuple( 'delegate', 'username pubkey timestamp address transactionId') res = [] for i in qry: registration = Delegate( username=i[0], pubkey=binascii.hexlify(i[4]).decode("utf-8"), timestamp=i[2], address=i[3], transactionId=i[1] ) res.append(registration) return res
python
def delegates(): """returns a list of named tuples of all delegates. {username: {'pubkey':pubkey, 'timestamp':timestamp, 'address':address}}""" qry = DbCursor().execute_and_fetchall(""" SELECT delegates."username", delegates."transactionId", transactions."timestamp", transactions."senderId", transactions."senderPublicKey" FROM transactions JOIN delegates ON transactions."id" = delegates."transactionId" """) Delegate = namedtuple( 'delegate', 'username pubkey timestamp address transactionId') res = [] for i in qry: registration = Delegate( username=i[0], pubkey=binascii.hexlify(i[4]).decode("utf-8"), timestamp=i[2], address=i[3], transactionId=i[1] ) res.append(registration) return res
[ "def", "delegates", "(", ")", ":", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT delegates.\"username\", delegates.\"transactionId\", transactions.\"timestamp\", transactions.\"senderId\", \n transactions.\"senderPublicKey\" \n FROM transactions\n JOIN delegates ON transactions.\"id\" = delegates.\"transactionId\"\n \"\"\"", ")", "Delegate", "=", "namedtuple", "(", "'delegate'", ",", "'username pubkey timestamp address transactionId'", ")", "res", "=", "[", "]", "for", "i", "in", "qry", ":", "registration", "=", "Delegate", "(", "username", "=", "i", "[", "0", "]", ",", "pubkey", "=", "binascii", ".", "hexlify", "(", "i", "[", "4", "]", ")", ".", "decode", "(", "\"utf-8\"", ")", ",", "timestamp", "=", "i", "[", "2", "]", ",", "address", "=", "i", "[", "3", "]", ",", "transactionId", "=", "i", "[", "1", "]", ")", "res", ".", "append", "(", "registration", ")", "return", "res" ]
returns a list of named tuples of all delegates. {username: {'pubkey':pubkey, 'timestamp':timestamp, 'address':address}}
[ "returns", "a", "list", "of", "named", "tuples", "of", "all", "delegates", ".", "{", "username", ":", "{", "pubkey", ":", "pubkey", "timestamp", ":", "timestamp", "address", ":", "address", "}}" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L354-L377
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.lastpayout
def lastpayout(delegate_address, blacklist=None): ''' Assumes that all send transactions from a delegate are payouts. Use blacklist to remove rewardwallet and other transactions if the address is not a voter. blacklist can contain both addresses and transactionIds''' if blacklist and len(blacklist) > 1: command_blacklist = 'NOT IN ' + str(tuple(blacklist)) elif blacklist and len(blacklist) == 1: command_blacklist = '!= ' + "'" + blacklist[0] + "'" else: command_blacklist = "!= 'nothing'" qry = DbCursor().execute_and_fetchall(""" SELECT ts."recipientId", ts."id", ts."timestamp" FROM transactions ts, (SELECT MAX(transactions."timestamp") AS max_timestamp, transactions."recipientId" FROM transactions WHERE transactions."senderId" = '{0}' AND transactions."id" {1} GROUP BY transactions."recipientId") maxresults WHERE ts."recipientId" = maxresults."recipientId" AND ts."recipientId" {1} AND ts."timestamp"= maxresults.max_timestamp; """.format(delegate_address, command_blacklist)) result = [] Payout = namedtuple( 'payout', 'address id timestamp') for i in qry: payout = Payout( address=i[0], id=i[1], timestamp=i[2] ) result.append(payout) return result
python
def lastpayout(delegate_address, blacklist=None): ''' Assumes that all send transactions from a delegate are payouts. Use blacklist to remove rewardwallet and other transactions if the address is not a voter. blacklist can contain both addresses and transactionIds''' if blacklist and len(blacklist) > 1: command_blacklist = 'NOT IN ' + str(tuple(blacklist)) elif blacklist and len(blacklist) == 1: command_blacklist = '!= ' + "'" + blacklist[0] + "'" else: command_blacklist = "!= 'nothing'" qry = DbCursor().execute_and_fetchall(""" SELECT ts."recipientId", ts."id", ts."timestamp" FROM transactions ts, (SELECT MAX(transactions."timestamp") AS max_timestamp, transactions."recipientId" FROM transactions WHERE transactions."senderId" = '{0}' AND transactions."id" {1} GROUP BY transactions."recipientId") maxresults WHERE ts."recipientId" = maxresults."recipientId" AND ts."recipientId" {1} AND ts."timestamp"= maxresults.max_timestamp; """.format(delegate_address, command_blacklist)) result = [] Payout = namedtuple( 'payout', 'address id timestamp') for i in qry: payout = Payout( address=i[0], id=i[1], timestamp=i[2] ) result.append(payout) return result
[ "def", "lastpayout", "(", "delegate_address", ",", "blacklist", "=", "None", ")", ":", "if", "blacklist", "and", "len", "(", "blacklist", ")", ">", "1", ":", "command_blacklist", "=", "'NOT IN '", "+", "str", "(", "tuple", "(", "blacklist", ")", ")", "elif", "blacklist", "and", "len", "(", "blacklist", ")", "==", "1", ":", "command_blacklist", "=", "'!= '", "+", "\"'\"", "+", "blacklist", "[", "0", "]", "+", "\"'\"", "else", ":", "command_blacklist", "=", "\"!= 'nothing'\"", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT ts.\"recipientId\", ts.\"id\", ts.\"timestamp\"\n FROM transactions ts,\n (SELECT MAX(transactions.\"timestamp\") AS max_timestamp, transactions.\"recipientId\"\n FROM transactions\n WHERE transactions.\"senderId\" = '{0}'\n AND transactions.\"id\" {1}\n GROUP BY transactions.\"recipientId\") maxresults\n WHERE ts.\"recipientId\" = maxresults.\"recipientId\"\n AND ts.\"recipientId\" {1}\n AND ts.\"timestamp\"= maxresults.max_timestamp;\n\n \"\"\"", ".", "format", "(", "delegate_address", ",", "command_blacklist", ")", ")", "result", "=", "[", "]", "Payout", "=", "namedtuple", "(", "'payout'", ",", "'address id timestamp'", ")", "for", "i", "in", "qry", ":", "payout", "=", "Payout", "(", "address", "=", "i", "[", "0", "]", ",", "id", "=", "i", "[", "1", "]", ",", "timestamp", "=", "i", "[", "2", "]", ")", "result", ".", "append", "(", "payout", ")", "return", "result" ]
Assumes that all send transactions from a delegate are payouts. Use blacklist to remove rewardwallet and other transactions if the address is not a voter. blacklist can contain both addresses and transactionIds
[ "Assumes", "that", "all", "send", "transactions", "from", "a", "delegate", "are", "payouts", ".", "Use", "blacklist", "to", "remove", "rewardwallet", "and", "other", "transactions", "if", "the", "address", "is", "not", "a", "voter", ".", "blacklist", "can", "contain", "both", "addresses", "and", "transactionIds" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L380-L417
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.votes
def votes(delegate_pubkey): """returns every address that has voted for a delegate. Current voters can be obtained using voters. ORDER BY timestamp ASC""" qry = DbCursor().execute_and_fetchall(""" SELECT transactions."recipientId", transactions."timestamp" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{}' ORDER BY transactions."timestamp" ASC; """.format(delegate_pubkey)) Voter = namedtuple( 'voter', 'address timestamp') voters = [] for i in qry: voter = Voter( address=i[0], timestamp=i[1] ) voters.append(voter) return voters
python
def votes(delegate_pubkey): """returns every address that has voted for a delegate. Current voters can be obtained using voters. ORDER BY timestamp ASC""" qry = DbCursor().execute_and_fetchall(""" SELECT transactions."recipientId", transactions."timestamp" FROM transactions, votes WHERE transactions."id" = votes."transactionId" AND votes."votes" = '+{}' ORDER BY transactions."timestamp" ASC; """.format(delegate_pubkey)) Voter = namedtuple( 'voter', 'address timestamp') voters = [] for i in qry: voter = Voter( address=i[0], timestamp=i[1] ) voters.append(voter) return voters
[ "def", "votes", "(", "delegate_pubkey", ")", ":", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT transactions.\"recipientId\", transactions.\"timestamp\"\n FROM transactions, votes\n WHERE transactions.\"id\" = votes.\"transactionId\"\n AND votes.\"votes\" = '+{}'\n ORDER BY transactions.\"timestamp\" ASC;\n \"\"\"", ".", "format", "(", "delegate_pubkey", ")", ")", "Voter", "=", "namedtuple", "(", "'voter'", ",", "'address timestamp'", ")", "voters", "=", "[", "]", "for", "i", "in", "qry", ":", "voter", "=", "Voter", "(", "address", "=", "i", "[", "0", "]", ",", "timestamp", "=", "i", "[", "1", "]", ")", "voters", ".", "append", "(", "voter", ")", "return", "voters" ]
returns every address that has voted for a delegate. Current voters can be obtained using voters. ORDER BY timestamp ASC
[ "returns", "every", "address", "that", "has", "voted", "for", "a", "delegate", ".", "Current", "voters", "can", "be", "obtained", "using", "voters", ".", "ORDER", "BY", "timestamp", "ASC" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L420-L441
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.blocks
def blocks(delegate_pubkey=None, max_timestamp=None): """returns a list of named tuples of all blocks forged by a delegate. if delegate_pubkey is not specified, set_delegate needs to be called in advance. max_timestamp can be configured to retrieve blocks up to a certain timestamp.""" if not delegate_pubkey: delegate_pubkey = c.DELEGATE['PUBKEY'] if max_timestamp: max_timestamp_sql = """ blocks."timestamp" <= {} AND""".format(max_timestamp) else: max_timestamp_sql = '' qry = DbCursor().execute_and_fetchall(""" SELECT blocks."timestamp", blocks."height", blocks."id", blocks."totalFee", blocks."reward" FROM blocks WHERE {0} blocks."generatorPublicKey" = '\\x{1}' ORDER BY blocks."timestamp" ASC""".format( max_timestamp_sql, delegate_pubkey)) Block = namedtuple('block', 'timestamp height id totalFee reward') block_list = [] for block in qry: block_value = Block(timestamp=block[0], height=block[1], id=block[2], totalFee=block[3], reward=block[4]) block_list.append(block_value) return block_list
python
def blocks(delegate_pubkey=None, max_timestamp=None): """returns a list of named tuples of all blocks forged by a delegate. if delegate_pubkey is not specified, set_delegate needs to be called in advance. max_timestamp can be configured to retrieve blocks up to a certain timestamp.""" if not delegate_pubkey: delegate_pubkey = c.DELEGATE['PUBKEY'] if max_timestamp: max_timestamp_sql = """ blocks."timestamp" <= {} AND""".format(max_timestamp) else: max_timestamp_sql = '' qry = DbCursor().execute_and_fetchall(""" SELECT blocks."timestamp", blocks."height", blocks."id", blocks."totalFee", blocks."reward" FROM blocks WHERE {0} blocks."generatorPublicKey" = '\\x{1}' ORDER BY blocks."timestamp" ASC""".format( max_timestamp_sql, delegate_pubkey)) Block = namedtuple('block', 'timestamp height id totalFee reward') block_list = [] for block in qry: block_value = Block(timestamp=block[0], height=block[1], id=block[2], totalFee=block[3], reward=block[4]) block_list.append(block_value) return block_list
[ "def", "blocks", "(", "delegate_pubkey", "=", "None", ",", "max_timestamp", "=", "None", ")", ":", "if", "not", "delegate_pubkey", ":", "delegate_pubkey", "=", "c", ".", "DELEGATE", "[", "'PUBKEY'", "]", "if", "max_timestamp", ":", "max_timestamp_sql", "=", "\"\"\" blocks.\"timestamp\" <= {} AND\"\"\"", ".", "format", "(", "max_timestamp", ")", "else", ":", "max_timestamp_sql", "=", "''", "qry", "=", "DbCursor", "(", ")", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT blocks.\"timestamp\", blocks.\"height\", blocks.\"id\", blocks.\"totalFee\", blocks.\"reward\"\n FROM blocks\n WHERE {0} blocks.\"generatorPublicKey\" = '\\\\x{1}'\n ORDER BY blocks.\"timestamp\" \n ASC\"\"\"", ".", "format", "(", "max_timestamp_sql", ",", "delegate_pubkey", ")", ")", "Block", "=", "namedtuple", "(", "'block'", ",", "'timestamp height id totalFee reward'", ")", "block_list", "=", "[", "]", "for", "block", "in", "qry", ":", "block_value", "=", "Block", "(", "timestamp", "=", "block", "[", "0", "]", ",", "height", "=", "block", "[", "1", "]", ",", "id", "=", "block", "[", "2", "]", ",", "totalFee", "=", "block", "[", "3", "]", ",", "reward", "=", "block", "[", "4", "]", ")", "block_list", ".", "append", "(", "block_value", ")", "return", "block_list" ]
returns a list of named tuples of all blocks forged by a delegate. if delegate_pubkey is not specified, set_delegate needs to be called in advance. max_timestamp can be configured to retrieve blocks up to a certain timestamp.
[ "returns", "a", "list", "of", "named", "tuples", "of", "all", "blocks", "forged", "by", "a", "delegate", ".", "if", "delegate_pubkey", "is", "not", "specified", "set_delegate", "needs", "to", "be", "called", "in", "advance", ".", "max_timestamp", "can", "be", "configured", "to", "retrieve", "blocks", "up", "to", "a", "certain", "timestamp", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L480-L512
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.share
def share(passphrase=None, last_payout=None, start_block=0, del_pubkey=None, del_address=None): """Calculate the true blockweight payout share for a given delegate, assuming no exceptions were made for a voter. last_payout is a map of addresses and timestamps: {address: timestamp}. If no argument are given, it will start the calculation at the first forged block by the delegate, generate a last_payout from transaction history, and use the set_delegate info. If a passphrase is provided, it is only used to generate the adddress and keys, no transactions are sent. (Still not recommended unless you know what you are doing, version control could store your passphrase for example; very risky) """ logger.info('starting share calculation using settings: {0} {1}'.format(c.DELEGATE, c.CALCULATION_SETTINGS)) delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey logger.info('Starting share calculation, using address:{0}, pubkey:{1}'.format(delegate_address, delegate_pubkey)) max_timestamp = Node.max_timestamp() logger.info('Share calculation max_timestamp = {}'.format(max_timestamp)) # utils function transactions = get_transactionlist( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) # check if a voter is/used to be a forging delegate delegates = Delegate.delegates() for i in delegates: if i.address in voter_dict: logger.info('A registered delegate is a voter: {0}, {1}, {2}'.format(i.username, i.address, i.pubkey)) try: blocks_by_voter = Delegate.blocks(i.pubkey) voter_dict[i.address]['blocks_forged'].extend(Delegate.blocks(i.pubkey)) logger.info('delegate {0} has forged {1} blocks'.format(i.username, len(blocks_by_voter))) except Exception: logger.info('delegate {} has not forged any blocks'.format(i)) pass try: for i in c.CALCULATION_SETTINGS['BLACKLIST']: voter_dict.pop(i) logger.debug('popped {} from calculations'.format(i)) except Exception: pass if not last_payout: last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass elif type(last_payout) is int: for address in voter_dict: if address['vote_timestamp'] < last_payout: address['last_payout'] = last_payout elif type(last_payout) is dict: for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass else: logger.fatal('last_payout object not recognised: {}'.format(type(last_payout))) raise InputError('last_payout object not recognised: {}'.format(type(last_payout))) # get all forged blocks of delegate: blocks = Delegate.blocks(max_timestamp=max_timestamp, delegate_pubkey=delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for tx in transactions: while tx.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] try: if voter_dict[i]['balance'] > c.CALCULATION_SETTINGS['MAX']: balance = c.CALCULATION_SETTINGS['MAX'] except Exception: pass try: if balance > c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE']: balance = c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE'] except Exception: pass try: for x in voter_dict[i]['blocks_forged']: if x.timestamp < blocks[block_nr].timestamp: voter_dict[i]['balance'] += (x.reward + x.totalFee) voter_dict[i]['blocks_forged'].remove(x) balance = voter_dict[i]['balance'] except Exception: pass if voter_dict[i]['status']: if not voter_dict[i]['balance'] < -20 * c.ARK: poolbalance += balance else: logger.fatal('balance lower than zero for: {0}'.format(i)) raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['balance'] > c.CALCULATION_SETTINGS['MAX']: balance = c.CALCULATION_SETTINGS['MAX'] try: if balance > c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE']: balance = c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE'] except Exception: pass if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if c.CALCULATION_SETTINGS['SHARE_FEES']: share = (balance/poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance/poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False if tx.recipientId in voter_dict: voter_dict[tx.recipientId]['balance'] += tx.amount if tx.senderId in voter_dict: voter_dict[tx.senderId]['balance'] -= (tx.amount + tx.fee) if tx.senderId in voter_dict and tx.type == 3 and plusvote in tx.rawasset: voter_dict[tx.senderId]['status'] = True if tx.senderId in voter_dict and tx.type == 3 and minvote in tx.rawasset: voter_dict[tx.senderId]['status'] = False remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] # an IndexError occurs if max(transactions.timestamp) > max(blocks.timestamp) This means we parsed every block except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format(i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
python
def share(passphrase=None, last_payout=None, start_block=0, del_pubkey=None, del_address=None): """Calculate the true blockweight payout share for a given delegate, assuming no exceptions were made for a voter. last_payout is a map of addresses and timestamps: {address: timestamp}. If no argument are given, it will start the calculation at the first forged block by the delegate, generate a last_payout from transaction history, and use the set_delegate info. If a passphrase is provided, it is only used to generate the adddress and keys, no transactions are sent. (Still not recommended unless you know what you are doing, version control could store your passphrase for example; very risky) """ logger.info('starting share calculation using settings: {0} {1}'.format(c.DELEGATE, c.CALCULATION_SETTINGS)) delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey logger.info('Starting share calculation, using address:{0}, pubkey:{1}'.format(delegate_address, delegate_pubkey)) max_timestamp = Node.max_timestamp() logger.info('Share calculation max_timestamp = {}'.format(max_timestamp)) # utils function transactions = get_transactionlist( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) # check if a voter is/used to be a forging delegate delegates = Delegate.delegates() for i in delegates: if i.address in voter_dict: logger.info('A registered delegate is a voter: {0}, {1}, {2}'.format(i.username, i.address, i.pubkey)) try: blocks_by_voter = Delegate.blocks(i.pubkey) voter_dict[i.address]['blocks_forged'].extend(Delegate.blocks(i.pubkey)) logger.info('delegate {0} has forged {1} blocks'.format(i.username, len(blocks_by_voter))) except Exception: logger.info('delegate {} has not forged any blocks'.format(i)) pass try: for i in c.CALCULATION_SETTINGS['BLACKLIST']: voter_dict.pop(i) logger.debug('popped {} from calculations'.format(i)) except Exception: pass if not last_payout: last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass elif type(last_payout) is int: for address in voter_dict: if address['vote_timestamp'] < last_payout: address['last_payout'] = last_payout elif type(last_payout) is dict: for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass else: logger.fatal('last_payout object not recognised: {}'.format(type(last_payout))) raise InputError('last_payout object not recognised: {}'.format(type(last_payout))) # get all forged blocks of delegate: blocks = Delegate.blocks(max_timestamp=max_timestamp, delegate_pubkey=delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for tx in transactions: while tx.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] try: if voter_dict[i]['balance'] > c.CALCULATION_SETTINGS['MAX']: balance = c.CALCULATION_SETTINGS['MAX'] except Exception: pass try: if balance > c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE']: balance = c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE'] except Exception: pass try: for x in voter_dict[i]['blocks_forged']: if x.timestamp < blocks[block_nr].timestamp: voter_dict[i]['balance'] += (x.reward + x.totalFee) voter_dict[i]['blocks_forged'].remove(x) balance = voter_dict[i]['balance'] except Exception: pass if voter_dict[i]['status']: if not voter_dict[i]['balance'] < -20 * c.ARK: poolbalance += balance else: logger.fatal('balance lower than zero for: {0}'.format(i)) raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['balance'] > c.CALCULATION_SETTINGS['MAX']: balance = c.CALCULATION_SETTINGS['MAX'] try: if balance > c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE']: balance = c.CALCULATION_SETTINGS['EXCEPTIONS'][i]['REPLACE'] except Exception: pass if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if c.CALCULATION_SETTINGS['SHARE_FEES']: share = (balance/poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance/poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False if tx.recipientId in voter_dict: voter_dict[tx.recipientId]['balance'] += tx.amount if tx.senderId in voter_dict: voter_dict[tx.senderId]['balance'] -= (tx.amount + tx.fee) if tx.senderId in voter_dict and tx.type == 3 and plusvote in tx.rawasset: voter_dict[tx.senderId]['status'] = True if tx.senderId in voter_dict and tx.type == 3 and minvote in tx.rawasset: voter_dict[tx.senderId]['status'] = False remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] # an IndexError occurs if max(transactions.timestamp) > max(blocks.timestamp) This means we parsed every block except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format(i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
[ "def", "share", "(", "passphrase", "=", "None", ",", "last_payout", "=", "None", ",", "start_block", "=", "0", ",", "del_pubkey", "=", "None", ",", "del_address", "=", "None", ")", ":", "logger", ".", "info", "(", "'starting share calculation using settings: {0} {1}'", ".", "format", "(", "c", ".", "DELEGATE", ",", "c", ".", "CALCULATION_SETTINGS", ")", ")", "delegate_pubkey", "=", "c", ".", "DELEGATE", "[", "'PUBKEY'", "]", "delegate_address", "=", "c", ".", "DELEGATE", "[", "'ADDRESS'", "]", "if", "del_pubkey", "and", "del_address", ":", "delegate_address", "=", "del_address", "delegate_pubkey", "=", "del_pubkey", "logger", ".", "info", "(", "'Starting share calculation, using address:{0}, pubkey:{1}'", ".", "format", "(", "delegate_address", ",", "delegate_pubkey", ")", ")", "max_timestamp", "=", "Node", ".", "max_timestamp", "(", ")", "logger", ".", "info", "(", "'Share calculation max_timestamp = {}'", ".", "format", "(", "max_timestamp", ")", ")", "# utils function", "transactions", "=", "get_transactionlist", "(", "delegate_pubkey", "=", "delegate_pubkey", ")", "votes", "=", "Delegate", ".", "votes", "(", "delegate_pubkey", ")", "# create a map of voters", "voter_dict", "=", "{", "}", "for", "voter", "in", "votes", ":", "voter_dict", ".", "update", "(", "{", "voter", ".", "address", ":", "{", "'balance'", ":", "0.0", ",", "'status'", ":", "False", ",", "'last_payout'", ":", "voter", ".", "timestamp", ",", "'share'", ":", "0.0", ",", "'vote_timestamp'", ":", "voter", ".", "timestamp", ",", "'blocks_forged'", ":", "[", "]", "}", "}", ")", "# check if a voter is/used to be a forging delegate", "delegates", "=", "Delegate", ".", "delegates", "(", ")", "for", "i", "in", "delegates", ":", "if", "i", ".", "address", "in", "voter_dict", ":", "logger", ".", "info", "(", "'A registered delegate is a voter: {0}, {1}, {2}'", ".", "format", "(", "i", ".", "username", ",", "i", ".", "address", ",", "i", ".", "pubkey", ")", ")", "try", ":", "blocks_by_voter", "=", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", "voter_dict", "[", "i", ".", "address", "]", "[", "'blocks_forged'", "]", ".", "extend", "(", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", ")", "logger", ".", "info", "(", "'delegate {0} has forged {1} blocks'", ".", "format", "(", "i", ".", "username", ",", "len", "(", "blocks_by_voter", ")", ")", ")", "except", "Exception", ":", "logger", ".", "info", "(", "'delegate {} has not forged any blocks'", ".", "format", "(", "i", ")", ")", "pass", "try", ":", "for", "i", "in", "c", ".", "CALCULATION_SETTINGS", "[", "'BLACKLIST'", "]", ":", "voter_dict", ".", "pop", "(", "i", ")", "logger", ".", "debug", "(", "'popped {} from calculations'", ".", "format", "(", "i", ")", ")", "except", "Exception", ":", "pass", "if", "not", "last_payout", ":", "last_payout", "=", "Delegate", ".", "lastpayout", "(", "delegate_address", ")", "for", "payout", "in", "last_payout", ":", "try", ":", "voter_dict", "[", "payout", ".", "address", "]", "[", "'last_payout'", "]", "=", "payout", ".", "timestamp", "except", "Exception", ":", "pass", "elif", "type", "(", "last_payout", ")", "is", "int", ":", "for", "address", "in", "voter_dict", ":", "if", "address", "[", "'vote_timestamp'", "]", "<", "last_payout", ":", "address", "[", "'last_payout'", "]", "=", "last_payout", "elif", "type", "(", "last_payout", ")", "is", "dict", ":", "for", "payout", "in", "last_payout", ":", "try", ":", "voter_dict", "[", "payout", ".", "address", "]", "[", "'last_payout'", "]", "=", "payout", ".", "timestamp", "except", "Exception", ":", "pass", "else", ":", "logger", ".", "fatal", "(", "'last_payout object not recognised: {}'", ".", "format", "(", "type", "(", "last_payout", ")", ")", ")", "raise", "InputError", "(", "'last_payout object not recognised: {}'", ".", "format", "(", "type", "(", "last_payout", ")", ")", ")", "# get all forged blocks of delegate:", "blocks", "=", "Delegate", ".", "blocks", "(", "max_timestamp", "=", "max_timestamp", ",", "delegate_pubkey", "=", "delegate_pubkey", ")", "block_nr", "=", "start_block", "chunk_dict", "=", "{", "}", "reuse", "=", "False", "try", ":", "for", "tx", "in", "transactions", ":", "while", "tx", ".", "timestamp", ">", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "reuse", ":", "block_nr", "+=", "1", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "continue", "block_nr", "+=", "1", "poolbalance", "=", "0", "chunk_dict", "=", "{", "}", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "try", ":", "if", "voter_dict", "[", "i", "]", "[", "'balance'", "]", ">", "c", ".", "CALCULATION_SETTINGS", "[", "'MAX'", "]", ":", "balance", "=", "c", ".", "CALCULATION_SETTINGS", "[", "'MAX'", "]", "except", "Exception", ":", "pass", "try", ":", "if", "balance", ">", "c", ".", "CALCULATION_SETTINGS", "[", "'EXCEPTIONS'", "]", "[", "i", "]", "[", "'REPLACE'", "]", ":", "balance", "=", "c", ".", "CALCULATION_SETTINGS", "[", "'EXCEPTIONS'", "]", "[", "i", "]", "[", "'REPLACE'", "]", "except", "Exception", ":", "pass", "try", ":", "for", "x", "in", "voter_dict", "[", "i", "]", "[", "'blocks_forged'", "]", ":", "if", "x", ".", "timestamp", "<", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "+=", "(", "x", ".", "reward", "+", "x", ".", "totalFee", ")", "voter_dict", "[", "i", "]", "[", "'blocks_forged'", "]", ".", "remove", "(", "x", ")", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "except", "Exception", ":", "pass", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", ":", "if", "not", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "<", "-", "20", "*", "c", ".", "ARK", ":", "poolbalance", "+=", "balance", "else", ":", "logger", ".", "fatal", "(", "'balance lower than zero for: {0}'", ".", "format", "(", "i", ")", ")", "raise", "NegativeBalanceError", "(", "'balance lower than zero for: {0}'", ".", "format", "(", "i", ")", ")", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "voter_dict", "[", "i", "]", "[", "'balance'", "]", ">", "c", ".", "CALCULATION_SETTINGS", "[", "'MAX'", "]", ":", "balance", "=", "c", ".", "CALCULATION_SETTINGS", "[", "'MAX'", "]", "try", ":", "if", "balance", ">", "c", ".", "CALCULATION_SETTINGS", "[", "'EXCEPTIONS'", "]", "[", "i", "]", "[", "'REPLACE'", "]", ":", "balance", "=", "c", ".", "CALCULATION_SETTINGS", "[", "'EXCEPTIONS'", "]", "[", "i", "]", "[", "'REPLACE'", "]", "except", "Exception", ":", "pass", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", "and", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", "<", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "c", ".", "CALCULATION_SETTINGS", "[", "'SHARE_FEES'", "]", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "(", "blocks", "[", "block_nr", "]", ".", "reward", "+", "blocks", "[", "block_nr", "]", ".", "totalFee", ")", "else", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "blocks", "[", "block_nr", "]", ".", "reward", "voter_dict", "[", "i", "]", "[", "'share'", "]", "+=", "share", "chunk_dict", ".", "update", "(", "{", "i", ":", "share", "}", ")", "reuse", "=", "True", "# parsing a transaction", "minvote", "=", "'{{\"votes\":[\"-{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "plusvote", "=", "'{{\"votes\":[\"+{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "reuse", "=", "False", "if", "tx", ".", "recipientId", "in", "voter_dict", ":", "voter_dict", "[", "tx", ".", "recipientId", "]", "[", "'balance'", "]", "+=", "tx", ".", "amount", "if", "tx", ".", "senderId", "in", "voter_dict", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'balance'", "]", "-=", "(", "tx", ".", "amount", "+", "tx", ".", "fee", ")", "if", "tx", ".", "senderId", "in", "voter_dict", "and", "tx", ".", "type", "==", "3", "and", "plusvote", "in", "tx", ".", "rawasset", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'status'", "]", "=", "True", "if", "tx", ".", "senderId", "in", "voter_dict", "and", "tx", ".", "type", "==", "3", "and", "minvote", "in", "tx", ".", "rawasset", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'status'", "]", "=", "False", "remaining_blocks", "=", "len", "(", "blocks", ")", "-", "block_nr", "-", "1", "for", "i", "in", "range", "(", "remaining_blocks", ")", ":", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "# an IndexError occurs if max(transactions.timestamp) > max(blocks.timestamp) This means we parsed every block", "except", "IndexError", ":", "pass", "for", "i", "in", "voter_dict", ":", "logger", ".", "info", "(", "\"{0} {1} {2} {3} {4}\"", ".", "format", "(", "i", ",", "voter_dict", "[", "i", "]", "[", "'share'", "]", ",", "voter_dict", "[", "i", "]", "[", "'status'", "]", ",", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", ",", "voter_dict", "[", "i", "]", "[", "'vote_timestamp'", "]", ")", ")", "return", "voter_dict", ",", "max_timestamp" ]
Calculate the true blockweight payout share for a given delegate, assuming no exceptions were made for a voter. last_payout is a map of addresses and timestamps: {address: timestamp}. If no argument are given, it will start the calculation at the first forged block by the delegate, generate a last_payout from transaction history, and use the set_delegate info. If a passphrase is provided, it is only used to generate the adddress and keys, no transactions are sent. (Still not recommended unless you know what you are doing, version control could store your passphrase for example; very risky)
[ "Calculate", "the", "true", "blockweight", "payout", "share", "for", "a", "given", "delegate", "assuming", "no", "exceptions", "were", "made", "for", "a", "voter", ".", "last_payout", "is", "a", "map", "of", "addresses", "and", "timestamps", ":", "{", "address", ":", "timestamp", "}", ".", "If", "no", "argument", "are", "given", "it", "will", "start", "the", "calculation", "at", "the", "first", "forged", "block", "by", "the", "delegate", "generate", "a", "last_payout", "from", "transaction", "history", "and", "use", "the", "set_delegate", "info", "." ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L515-L701
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.dep_trueshare
def dep_trueshare(start_block=0, del_pubkey=None, del_address=None, blacklist=None, share_fees=False, max_weight=float('inf'), raiseError=True): ''' Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter ''' delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey max_timestamp = Node.max_timestamp() # utils function transactions = get_transactionlist( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) try: for i in blacklist: voter_dict.pop(i) except Exception: pass # check if a voter is/used to be a forging delegate delegates = Delegate.delegates() for i in delegates: if i.address in voter_dict: try: blocks_by_voter = Delegate.blocks(i.pubkey) voter_dict[i.address]['blocks_forged'].extend(Delegate.blocks(i.pubkey)) except Exception: pass last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass blocks = Delegate.blocks(delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for tx in transactions: while tx.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] if balance > max_weight: balance = max_weight #checks if a delegate that votes for us is has forged blocks in the mean time try: for x in voter_dict[i]['blocks_forged']: if x.timestamp < blocks[block_nr].timestamp: voter_dict[i]['balance'] += (x.reward + x.totalFee) voter_dict[i]['blocks_forged'].remove(x) balance = voter_dict[i]['balance'] except Exception: pass if voter_dict[i]['status']: if not voter_dict[i]['balance'] < -20 * c.ARK: poolbalance += balance else: if raiseError: raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) pass for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if share_fees: share = (balance / poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance / poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False if tx.recipientId in voter_dict: voter_dict[tx.recipientId]['balance'] += tx.amount if tx.senderId in voter_dict: voter_dict[tx.senderId]['balance'] -= (tx.amount + tx.fee) if tx.senderId in voter_dict and tx.type == 3 and plusvote in tx.rawasset: voter_dict[tx.senderId]['status'] = True if tx.senderId in voter_dict and tx.type == 3 and minvote in tx.rawasset: voter_dict[tx.senderId]['status'] = False remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format( i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
python
def dep_trueshare(start_block=0, del_pubkey=None, del_address=None, blacklist=None, share_fees=False, max_weight=float('inf'), raiseError=True): ''' Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter ''' delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey max_timestamp = Node.max_timestamp() # utils function transactions = get_transactionlist( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) try: for i in blacklist: voter_dict.pop(i) except Exception: pass # check if a voter is/used to be a forging delegate delegates = Delegate.delegates() for i in delegates: if i.address in voter_dict: try: blocks_by_voter = Delegate.blocks(i.pubkey) voter_dict[i.address]['blocks_forged'].extend(Delegate.blocks(i.pubkey)) except Exception: pass last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass blocks = Delegate.blocks(delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for tx in transactions: while tx.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] if balance > max_weight: balance = max_weight #checks if a delegate that votes for us is has forged blocks in the mean time try: for x in voter_dict[i]['blocks_forged']: if x.timestamp < blocks[block_nr].timestamp: voter_dict[i]['balance'] += (x.reward + x.totalFee) voter_dict[i]['blocks_forged'].remove(x) balance = voter_dict[i]['balance'] except Exception: pass if voter_dict[i]['status']: if not voter_dict[i]['balance'] < -20 * c.ARK: poolbalance += balance else: if raiseError: raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) pass for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if share_fees: share = (balance / poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance / poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False if tx.recipientId in voter_dict: voter_dict[tx.recipientId]['balance'] += tx.amount if tx.senderId in voter_dict: voter_dict[tx.senderId]['balance'] -= (tx.amount + tx.fee) if tx.senderId in voter_dict and tx.type == 3 and plusvote in tx.rawasset: voter_dict[tx.senderId]['status'] = True if tx.senderId in voter_dict and tx.type == 3 and minvote in tx.rawasset: voter_dict[tx.senderId]['status'] = False remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format( i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
[ "def", "dep_trueshare", "(", "start_block", "=", "0", ",", "del_pubkey", "=", "None", ",", "del_address", "=", "None", ",", "blacklist", "=", "None", ",", "share_fees", "=", "False", ",", "max_weight", "=", "float", "(", "'inf'", ")", ",", "raiseError", "=", "True", ")", ":", "delegate_pubkey", "=", "c", ".", "DELEGATE", "[", "'PUBKEY'", "]", "delegate_address", "=", "c", ".", "DELEGATE", "[", "'ADDRESS'", "]", "if", "del_pubkey", "and", "del_address", ":", "delegate_address", "=", "del_address", "delegate_pubkey", "=", "del_pubkey", "max_timestamp", "=", "Node", ".", "max_timestamp", "(", ")", "# utils function", "transactions", "=", "get_transactionlist", "(", "delegate_pubkey", "=", "delegate_pubkey", ")", "votes", "=", "Delegate", ".", "votes", "(", "delegate_pubkey", ")", "# create a map of voters", "voter_dict", "=", "{", "}", "for", "voter", "in", "votes", ":", "voter_dict", ".", "update", "(", "{", "voter", ".", "address", ":", "{", "'balance'", ":", "0.0", ",", "'status'", ":", "False", ",", "'last_payout'", ":", "voter", ".", "timestamp", ",", "'share'", ":", "0.0", ",", "'vote_timestamp'", ":", "voter", ".", "timestamp", ",", "'blocks_forged'", ":", "[", "]", "}", "}", ")", "try", ":", "for", "i", "in", "blacklist", ":", "voter_dict", ".", "pop", "(", "i", ")", "except", "Exception", ":", "pass", "# check if a voter is/used to be a forging delegate", "delegates", "=", "Delegate", ".", "delegates", "(", ")", "for", "i", "in", "delegates", ":", "if", "i", ".", "address", "in", "voter_dict", ":", "try", ":", "blocks_by_voter", "=", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", "voter_dict", "[", "i", ".", "address", "]", "[", "'blocks_forged'", "]", ".", "extend", "(", "Delegate", ".", "blocks", "(", "i", ".", "pubkey", ")", ")", "except", "Exception", ":", "pass", "last_payout", "=", "Delegate", ".", "lastpayout", "(", "delegate_address", ")", "for", "payout", "in", "last_payout", ":", "try", ":", "voter_dict", "[", "payout", ".", "address", "]", "[", "'last_payout'", "]", "=", "payout", ".", "timestamp", "except", "Exception", ":", "pass", "blocks", "=", "Delegate", ".", "blocks", "(", "delegate_pubkey", ")", "block_nr", "=", "start_block", "chunk_dict", "=", "{", "}", "reuse", "=", "False", "try", ":", "for", "tx", "in", "transactions", ":", "while", "tx", ".", "timestamp", ">", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "reuse", ":", "block_nr", "+=", "1", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "continue", "block_nr", "+=", "1", "poolbalance", "=", "0", "chunk_dict", "=", "{", "}", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "balance", ">", "max_weight", ":", "balance", "=", "max_weight", "#checks if a delegate that votes for us is has forged blocks in the mean time", "try", ":", "for", "x", "in", "voter_dict", "[", "i", "]", "[", "'blocks_forged'", "]", ":", "if", "x", ".", "timestamp", "<", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "+=", "(", "x", ".", "reward", "+", "x", ".", "totalFee", ")", "voter_dict", "[", "i", "]", "[", "'blocks_forged'", "]", ".", "remove", "(", "x", ")", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "except", "Exception", ":", "pass", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", ":", "if", "not", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "<", "-", "20", "*", "c", ".", "ARK", ":", "poolbalance", "+=", "balance", "else", ":", "if", "raiseError", ":", "raise", "NegativeBalanceError", "(", "'balance lower than zero for: {0}'", ".", "format", "(", "i", ")", ")", "pass", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", "and", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", "<", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "share_fees", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "(", "blocks", "[", "block_nr", "]", ".", "reward", "+", "blocks", "[", "block_nr", "]", ".", "totalFee", ")", "else", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "blocks", "[", "block_nr", "]", ".", "reward", "voter_dict", "[", "i", "]", "[", "'share'", "]", "+=", "share", "chunk_dict", ".", "update", "(", "{", "i", ":", "share", "}", ")", "reuse", "=", "True", "# parsing a transaction", "minvote", "=", "'{{\"votes\":[\"-{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "plusvote", "=", "'{{\"votes\":[\"+{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "reuse", "=", "False", "if", "tx", ".", "recipientId", "in", "voter_dict", ":", "voter_dict", "[", "tx", ".", "recipientId", "]", "[", "'balance'", "]", "+=", "tx", ".", "amount", "if", "tx", ".", "senderId", "in", "voter_dict", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'balance'", "]", "-=", "(", "tx", ".", "amount", "+", "tx", ".", "fee", ")", "if", "tx", ".", "senderId", "in", "voter_dict", "and", "tx", ".", "type", "==", "3", "and", "plusvote", "in", "tx", ".", "rawasset", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'status'", "]", "=", "True", "if", "tx", ".", "senderId", "in", "voter_dict", "and", "tx", ".", "type", "==", "3", "and", "minvote", "in", "tx", ".", "rawasset", ":", "voter_dict", "[", "tx", ".", "senderId", "]", "[", "'status'", "]", "=", "False", "remaining_blocks", "=", "len", "(", "blocks", ")", "-", "block_nr", "-", "1", "for", "i", "in", "range", "(", "remaining_blocks", ")", ":", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "except", "IndexError", ":", "pass", "for", "i", "in", "voter_dict", ":", "logger", ".", "info", "(", "\"{0} {1} {2} {3} {4}\"", ".", "format", "(", "i", ",", "voter_dict", "[", "i", "]", "[", "'share'", "]", ",", "voter_dict", "[", "i", "]", "[", "'status'", "]", ",", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", ",", "voter_dict", "[", "i", "]", "[", "'vote_timestamp'", "]", ")", ")", "return", "voter_dict", ",", "max_timestamp" ]
Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter
[ "Legacy", "TBW", "script", "(", "still", "pretty", "performant", "but", "has", "some", "quirky", "behavior", "when", "forging", "delegates", "are", "amongst", "your", "voters", ")" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L705-L852
valid
BlockHub/blockhubdpostools
dpostools/legacy.py
Delegate.trueshare
def trueshare(start_block=0, del_pubkey=None, del_address=None, blacklist=None, share_fees=False, max_weight=float('inf')): ''' Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter ''' delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey max_timestamp = Node.max_timestamp() # utils function events = get_events( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) try: for i in blacklist: voter_dict.pop(i) except Exception: pass last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass blocks = Delegate.blocks(delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for e in events: while e.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] if balance > max_weight: balance = max_weight if voter_dict[i]['status']: if voter_dict[i]['balance'] >= 0: poolbalance += balance else: print(voter_dict[i]) raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if share_fees: share = (balance / poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance / poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False # type 100 is a forged block if e.type != 100: if e.recipientId in voter_dict: voter_dict[e.recipientId]['balance'] += e.amount if e.senderId in voter_dict: voter_dict[e.senderId]['balance'] -= (e.amount + e.fee) if e.senderId in voter_dict and e.type == 3 and plusvote in e.raw: voter_dict[e.senderId]['status'] = True if e.senderId in voter_dict and e.type == 3 and minvote in e.raw: voter_dict[e.senderId]['status'] = False elif e.type == 100: if e.recipientId in voter_dict: voter_dict[e.recipientId]['balance'] += e.amount + e.fee remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format( i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
python
def trueshare(start_block=0, del_pubkey=None, del_address=None, blacklist=None, share_fees=False, max_weight=float('inf')): ''' Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter ''' delegate_pubkey = c.DELEGATE['PUBKEY'] delegate_address = c.DELEGATE['ADDRESS'] if del_pubkey and del_address: delegate_address = del_address delegate_pubkey = del_pubkey max_timestamp = Node.max_timestamp() # utils function events = get_events( delegate_pubkey=delegate_pubkey ) votes = Delegate.votes(delegate_pubkey) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter.address: { 'balance': 0.0, 'status': False, 'last_payout': voter.timestamp, 'share': 0.0, 'vote_timestamp': voter.timestamp, 'blocks_forged': []} }) try: for i in blacklist: voter_dict.pop(i) except Exception: pass last_payout = Delegate.lastpayout(delegate_address) for payout in last_payout: try: voter_dict[payout.address]['last_payout'] = payout.timestamp except Exception: pass blocks = Delegate.blocks(delegate_pubkey) block_nr = start_block chunk_dict = {} reuse = False try: for e in events: while e.timestamp > blocks[block_nr].timestamp: if reuse: block_nr += 1 for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] continue block_nr += 1 poolbalance = 0 chunk_dict = {} for i in voter_dict: balance = voter_dict[i]['balance'] if balance > max_weight: balance = max_weight if voter_dict[i]['status']: if voter_dict[i]['balance'] >= 0: poolbalance += balance else: print(voter_dict[i]) raise NegativeBalanceError('balance lower than zero for: {0}'.format(i)) for i in voter_dict: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_nr].timestamp: if share_fees: share = (balance / poolbalance) * (blocks[block_nr].reward + blocks[block_nr].totalFee) else: share = (balance / poolbalance) * blocks[block_nr].reward voter_dict[i]['share'] += share chunk_dict.update({i: share}) reuse = True # parsing a transaction minvote = '{{"votes":["-{0}"]}}'.format(delegate_pubkey) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_pubkey) reuse = False # type 100 is a forged block if e.type != 100: if e.recipientId in voter_dict: voter_dict[e.recipientId]['balance'] += e.amount if e.senderId in voter_dict: voter_dict[e.senderId]['balance'] -= (e.amount + e.fee) if e.senderId in voter_dict and e.type == 3 and plusvote in e.raw: voter_dict[e.senderId]['status'] = True if e.senderId in voter_dict and e.type == 3 and minvote in e.raw: voter_dict[e.senderId]['status'] = False elif e.type == 100: if e.recipientId in voter_dict: voter_dict[e.recipientId]['balance'] += e.amount + e.fee remaining_blocks = len(blocks) - block_nr - 1 for i in range(remaining_blocks): for x in chunk_dict: voter_dict[x]['share'] += chunk_dict[x] except IndexError: pass for i in voter_dict: logger.info("{0} {1} {2} {3} {4}".format( i, voter_dict[i]['share'], voter_dict[i]['status'], voter_dict[i]['last_payout'], voter_dict[i]['vote_timestamp'])) return voter_dict, max_timestamp
[ "def", "trueshare", "(", "start_block", "=", "0", ",", "del_pubkey", "=", "None", ",", "del_address", "=", "None", ",", "blacklist", "=", "None", ",", "share_fees", "=", "False", ",", "max_weight", "=", "float", "(", "'inf'", ")", ")", ":", "delegate_pubkey", "=", "c", ".", "DELEGATE", "[", "'PUBKEY'", "]", "delegate_address", "=", "c", ".", "DELEGATE", "[", "'ADDRESS'", "]", "if", "del_pubkey", "and", "del_address", ":", "delegate_address", "=", "del_address", "delegate_pubkey", "=", "del_pubkey", "max_timestamp", "=", "Node", ".", "max_timestamp", "(", ")", "# utils function", "events", "=", "get_events", "(", "delegate_pubkey", "=", "delegate_pubkey", ")", "votes", "=", "Delegate", ".", "votes", "(", "delegate_pubkey", ")", "# create a map of voters", "voter_dict", "=", "{", "}", "for", "voter", "in", "votes", ":", "voter_dict", ".", "update", "(", "{", "voter", ".", "address", ":", "{", "'balance'", ":", "0.0", ",", "'status'", ":", "False", ",", "'last_payout'", ":", "voter", ".", "timestamp", ",", "'share'", ":", "0.0", ",", "'vote_timestamp'", ":", "voter", ".", "timestamp", ",", "'blocks_forged'", ":", "[", "]", "}", "}", ")", "try", ":", "for", "i", "in", "blacklist", ":", "voter_dict", ".", "pop", "(", "i", ")", "except", "Exception", ":", "pass", "last_payout", "=", "Delegate", ".", "lastpayout", "(", "delegate_address", ")", "for", "payout", "in", "last_payout", ":", "try", ":", "voter_dict", "[", "payout", ".", "address", "]", "[", "'last_payout'", "]", "=", "payout", ".", "timestamp", "except", "Exception", ":", "pass", "blocks", "=", "Delegate", ".", "blocks", "(", "delegate_pubkey", ")", "block_nr", "=", "start_block", "chunk_dict", "=", "{", "}", "reuse", "=", "False", "try", ":", "for", "e", "in", "events", ":", "while", "e", ".", "timestamp", ">", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "reuse", ":", "block_nr", "+=", "1", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "continue", "block_nr", "+=", "1", "poolbalance", "=", "0", "chunk_dict", "=", "{", "}", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "balance", ">", "max_weight", ":", "balance", "=", "max_weight", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", ":", "if", "voter_dict", "[", "i", "]", "[", "'balance'", "]", ">=", "0", ":", "poolbalance", "+=", "balance", "else", ":", "print", "(", "voter_dict", "[", "i", "]", ")", "raise", "NegativeBalanceError", "(", "'balance lower than zero for: {0}'", ".", "format", "(", "i", ")", ")", "for", "i", "in", "voter_dict", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", "and", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", "<", "blocks", "[", "block_nr", "]", ".", "timestamp", ":", "if", "share_fees", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "(", "blocks", "[", "block_nr", "]", ".", "reward", "+", "blocks", "[", "block_nr", "]", ".", "totalFee", ")", "else", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "blocks", "[", "block_nr", "]", ".", "reward", "voter_dict", "[", "i", "]", "[", "'share'", "]", "+=", "share", "chunk_dict", ".", "update", "(", "{", "i", ":", "share", "}", ")", "reuse", "=", "True", "# parsing a transaction", "minvote", "=", "'{{\"votes\":[\"-{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "plusvote", "=", "'{{\"votes\":[\"+{0}\"]}}'", ".", "format", "(", "delegate_pubkey", ")", "reuse", "=", "False", "# type 100 is a forged block", "if", "e", ".", "type", "!=", "100", ":", "if", "e", ".", "recipientId", "in", "voter_dict", ":", "voter_dict", "[", "e", ".", "recipientId", "]", "[", "'balance'", "]", "+=", "e", ".", "amount", "if", "e", ".", "senderId", "in", "voter_dict", ":", "voter_dict", "[", "e", ".", "senderId", "]", "[", "'balance'", "]", "-=", "(", "e", ".", "amount", "+", "e", ".", "fee", ")", "if", "e", ".", "senderId", "in", "voter_dict", "and", "e", ".", "type", "==", "3", "and", "plusvote", "in", "e", ".", "raw", ":", "voter_dict", "[", "e", ".", "senderId", "]", "[", "'status'", "]", "=", "True", "if", "e", ".", "senderId", "in", "voter_dict", "and", "e", ".", "type", "==", "3", "and", "minvote", "in", "e", ".", "raw", ":", "voter_dict", "[", "e", ".", "senderId", "]", "[", "'status'", "]", "=", "False", "elif", "e", ".", "type", "==", "100", ":", "if", "e", ".", "recipientId", "in", "voter_dict", ":", "voter_dict", "[", "e", ".", "recipientId", "]", "[", "'balance'", "]", "+=", "e", ".", "amount", "+", "e", ".", "fee", "remaining_blocks", "=", "len", "(", "blocks", ")", "-", "block_nr", "-", "1", "for", "i", "in", "range", "(", "remaining_blocks", ")", ":", "for", "x", "in", "chunk_dict", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "chunk_dict", "[", "x", "]", "except", "IndexError", ":", "pass", "for", "i", "in", "voter_dict", ":", "logger", ".", "info", "(", "\"{0} {1} {2} {3} {4}\"", ".", "format", "(", "i", ",", "voter_dict", "[", "i", "]", "[", "'share'", "]", ",", "voter_dict", "[", "i", "]", "[", "'status'", "]", ",", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", ",", "voter_dict", "[", "i", "]", "[", "'vote_timestamp'", "]", ")", ")", "return", "voter_dict", ",", "max_timestamp" ]
Legacy TBW script (still pretty performant, but has some quirky behavior when forging delegates are amongst your voters) :param int start_block: block from which we start adding to the share (we calculate balances from block 0 anyways) :param str del_pubkey: delegate public key as is presented in the ark wallet :param str del_address: delegate address :param list blacklist: blacklist for addresses to be removed BEFORE calculation. Their share is removed from the pool balance :param bool share_fees: if tx fees should be shared as well. :param float max_weight: max_balance of a voter
[ "Legacy", "TBW", "script", "(", "still", "pretty", "performant", "but", "has", "some", "quirky", "behavior", "when", "forging", "delegates", "are", "amongst", "your", "voters", ")" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/legacy.py#L855-L986
valid
eflee/pyechoip
src/echoip/sources.py
IPSourceFactory.add_source
def add_source(self, source_class, *constructor_args): """ Adds a source to the factory provided it's type and constructor arguments :param source_class: The class used to instantiate the source :type source_class: type :param constructor_args: Arguments to be passed into the constructor :type constructor_args: Iterable """ if not IIPSource.implementedBy(source_class): raise TypeError("source_class {} must implement IIPSource".format(source_class)) else: self._sources.add((source_class, constructor_args))
python
def add_source(self, source_class, *constructor_args): """ Adds a source to the factory provided it's type and constructor arguments :param source_class: The class used to instantiate the source :type source_class: type :param constructor_args: Arguments to be passed into the constructor :type constructor_args: Iterable """ if not IIPSource.implementedBy(source_class): raise TypeError("source_class {} must implement IIPSource".format(source_class)) else: self._sources.add((source_class, constructor_args))
[ "def", "add_source", "(", "self", ",", "source_class", ",", "*", "constructor_args", ")", ":", "if", "not", "IIPSource", ".", "implementedBy", "(", "source_class", ")", ":", "raise", "TypeError", "(", "\"source_class {} must implement IIPSource\"", ".", "format", "(", "source_class", ")", ")", "else", ":", "self", ".", "_sources", ".", "add", "(", "(", "source_class", ",", "constructor_args", ")", ")" ]
Adds a source to the factory provided it's type and constructor arguments :param source_class: The class used to instantiate the source :type source_class: type :param constructor_args: Arguments to be passed into the constructor :type constructor_args: Iterable
[ "Adds", "a", "source", "to", "the", "factory", "provided", "it", "s", "type", "and", "constructor", "arguments", ":", "param", "source_class", ":", "The", "class", "used", "to", "instantiate", "the", "source", ":", "type", "source_class", ":", "type", ":", "param", "constructor_args", ":", "Arguments", "to", "be", "passed", "into", "the", "constructor", ":", "type", "constructor_args", ":", "Iterable" ]
226e5eab21dbfdfb59b9af312a56a8ddc3675419
https://github.com/eflee/pyechoip/blob/226e5eab21dbfdfb59b9af312a56a8ddc3675419/src/echoip/sources.py#L158-L169
valid
eflee/pyechoip
src/echoip/sources.py
IPSourceFactory.get_sources
def get_sources(self, limit=sys.maxsize, types_list=None): """ Generates instantiated sources from the factory :param limit: the max number of sources to yield :type limit: int :param types_list: filter by types so the constructor can be used to accomidate many types :type types_list: class or list of classes :return: Yields types added by add_source :rtype: generator """ if types_list and not isinstance(types_list, (tuple, list)): types_list = [types_list] sources = list(self._sources) random.shuffle(sources) for source in sources: if not types_list or source[0] in types_list: limit -= 1 yield source[0](*source[1]) if limit <= 0: break
python
def get_sources(self, limit=sys.maxsize, types_list=None): """ Generates instantiated sources from the factory :param limit: the max number of sources to yield :type limit: int :param types_list: filter by types so the constructor can be used to accomidate many types :type types_list: class or list of classes :return: Yields types added by add_source :rtype: generator """ if types_list and not isinstance(types_list, (tuple, list)): types_list = [types_list] sources = list(self._sources) random.shuffle(sources) for source in sources: if not types_list or source[0] in types_list: limit -= 1 yield source[0](*source[1]) if limit <= 0: break
[ "def", "get_sources", "(", "self", ",", "limit", "=", "sys", ".", "maxsize", ",", "types_list", "=", "None", ")", ":", "if", "types_list", "and", "not", "isinstance", "(", "types_list", ",", "(", "tuple", ",", "list", ")", ")", ":", "types_list", "=", "[", "types_list", "]", "sources", "=", "list", "(", "self", ".", "_sources", ")", "random", ".", "shuffle", "(", "sources", ")", "for", "source", "in", "sources", ":", "if", "not", "types_list", "or", "source", "[", "0", "]", "in", "types_list", ":", "limit", "-=", "1", "yield", "source", "[", "0", "]", "(", "*", "source", "[", "1", "]", ")", "if", "limit", "<=", "0", ":", "break" ]
Generates instantiated sources from the factory :param limit: the max number of sources to yield :type limit: int :param types_list: filter by types so the constructor can be used to accomidate many types :type types_list: class or list of classes :return: Yields types added by add_source :rtype: generator
[ "Generates", "instantiated", "sources", "from", "the", "factory", ":", "param", "limit", ":", "the", "max", "number", "of", "sources", "to", "yield", ":", "type", "limit", ":", "int", ":", "param", "types_list", ":", "filter", "by", "types", "so", "the", "constructor", "can", "be", "used", "to", "accomidate", "many", "types", ":", "type", "types_list", ":", "class", "or", "list", "of", "classes", ":", "return", ":", "Yields", "types", "added", "by", "add_source", ":", "rtype", ":", "generator" ]
226e5eab21dbfdfb59b9af312a56a8ddc3675419
https://github.com/eflee/pyechoip/blob/226e5eab21dbfdfb59b9af312a56a8ddc3675419/src/echoip/sources.py#L171-L193
valid
gtaylor/evarify
evarify/filters/python_basics.py
value_to_bool
def value_to_bool(config_val, evar): """ Massages the 'true' and 'false' strings to bool equivalents. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :rtype: bool :return: True or False, depending on the value. """ if not config_val: return False if config_val.strip().lower() == 'true': return True else: return False
python
def value_to_bool(config_val, evar): """ Massages the 'true' and 'false' strings to bool equivalents. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :rtype: bool :return: True or False, depending on the value. """ if not config_val: return False if config_val.strip().lower() == 'true': return True else: return False
[ "def", "value_to_bool", "(", "config_val", ",", "evar", ")", ":", "if", "not", "config_val", ":", "return", "False", "if", "config_val", ".", "strip", "(", ")", ".", "lower", "(", ")", "==", "'true'", ":", "return", "True", "else", ":", "return", "False" ]
Massages the 'true' and 'false' strings to bool equivalents. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :rtype: bool :return: True or False, depending on the value.
[ "Massages", "the", "true", "and", "false", "strings", "to", "bool", "equivalents", "." ]
37cec29373c820eda96939633e2067d55598915b
https://github.com/gtaylor/evarify/blob/37cec29373c820eda96939633e2067d55598915b/evarify/filters/python_basics.py#L64-L79
valid
gtaylor/evarify
evarify/filters/python_basics.py
validate_is_not_none
def validate_is_not_none(config_val, evar): """ If the value is ``None``, fail validation. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value is None. """ if config_val is None: raise ValueError( "Value for environment variable '{evar_name}' can't " "be empty.".format(evar_name=evar.name)) return config_val
python
def validate_is_not_none(config_val, evar): """ If the value is ``None``, fail validation. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value is None. """ if config_val is None: raise ValueError( "Value for environment variable '{evar_name}' can't " "be empty.".format(evar_name=evar.name)) return config_val
[ "def", "validate_is_not_none", "(", "config_val", ",", "evar", ")", ":", "if", "config_val", "is", "None", ":", "raise", "ValueError", "(", "\"Value for environment variable '{evar_name}' can't \"", "\"be empty.\"", ".", "format", "(", "evar_name", "=", "evar", ".", "name", ")", ")", "return", "config_val" ]
If the value is ``None``, fail validation. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value is None.
[ "If", "the", "value", "is", "None", "fail", "validation", "." ]
37cec29373c820eda96939633e2067d55598915b
https://github.com/gtaylor/evarify/blob/37cec29373c820eda96939633e2067d55598915b/evarify/filters/python_basics.py#L83-L96
valid
gtaylor/evarify
evarify/filters/python_basics.py
validate_is_boolean_true
def validate_is_boolean_true(config_val, evar): """ Make sure the value evaluates to boolean True. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value evaluates to boolean False. """ if config_val is None: raise ValueError( "Value for environment variable '{evar_name}' can't " "be empty.".format(evar_name=evar.name)) return config_val
python
def validate_is_boolean_true(config_val, evar): """ Make sure the value evaluates to boolean True. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value evaluates to boolean False. """ if config_val is None: raise ValueError( "Value for environment variable '{evar_name}' can't " "be empty.".format(evar_name=evar.name)) return config_val
[ "def", "validate_is_boolean_true", "(", "config_val", ",", "evar", ")", ":", "if", "config_val", "is", "None", ":", "raise", "ValueError", "(", "\"Value for environment variable '{evar_name}' can't \"", "\"be empty.\"", ".", "format", "(", "evar_name", "=", "evar", ".", "name", ")", ")", "return", "config_val" ]
Make sure the value evaluates to boolean True. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :raises: ValueError if the config value evaluates to boolean False.
[ "Make", "sure", "the", "value", "evaluates", "to", "boolean", "True", "." ]
37cec29373c820eda96939633e2067d55598915b
https://github.com/gtaylor/evarify/blob/37cec29373c820eda96939633e2067d55598915b/evarify/filters/python_basics.py#L100-L113
valid
gtaylor/evarify
evarify/filters/python_basics.py
value_to_python_log_level
def value_to_python_log_level(config_val, evar): """ Convert an evar value into a Python logging level constant. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :return: A validated string. :raises: ValueError if the log level is invalid. """ if not config_val: config_val = evar.default_val config_val = config_val.upper() # noinspection PyProtectedMember return logging._checkLevel(config_val)
python
def value_to_python_log_level(config_val, evar): """ Convert an evar value into a Python logging level constant. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :return: A validated string. :raises: ValueError if the log level is invalid. """ if not config_val: config_val = evar.default_val config_val = config_val.upper() # noinspection PyProtectedMember return logging._checkLevel(config_val)
[ "def", "value_to_python_log_level", "(", "config_val", ",", "evar", ")", ":", "if", "not", "config_val", ":", "config_val", "=", "evar", ".", "default_val", "config_val", "=", "config_val", ".", "upper", "(", ")", "# noinspection PyProtectedMember", "return", "logging", ".", "_checkLevel", "(", "config_val", ")" ]
Convert an evar value into a Python logging level constant. :param str config_val: The env var value. :param EnvironmentVariable evar: The EVar object we are validating a value for. :return: A validated string. :raises: ValueError if the log level is invalid.
[ "Convert", "an", "evar", "value", "into", "a", "Python", "logging", "level", "constant", "." ]
37cec29373c820eda96939633e2067d55598915b
https://github.com/gtaylor/evarify/blob/37cec29373c820eda96939633e2067d55598915b/evarify/filters/python_basics.py#L117-L131
valid
runfalk/psycospans
psycospans/__init__.py
register_range_type
def register_range_type(pgrange, pyrange, conn): """ Register a new range type as a PostgreSQL range. >>> register_range_type("int4range", intrange, conn) The above will make sure intrange is regarded as an int4range for queries and that int4ranges will be cast into intrange when fetching rows. pgrange should be the full name including schema for the custom range type. Note that adaption is global, meaning if a range type is passed to a regular psycopg2 connection it will adapt it to its proper range type. Parsing of rows from the database however is not global and just set on a per connection basis. """ register_adapter(pyrange, partial(adapt_range, pgrange)) register_range_caster( pgrange, pyrange, *query_range_oids(pgrange, conn), scope=conn)
python
def register_range_type(pgrange, pyrange, conn): """ Register a new range type as a PostgreSQL range. >>> register_range_type("int4range", intrange, conn) The above will make sure intrange is regarded as an int4range for queries and that int4ranges will be cast into intrange when fetching rows. pgrange should be the full name including schema for the custom range type. Note that adaption is global, meaning if a range type is passed to a regular psycopg2 connection it will adapt it to its proper range type. Parsing of rows from the database however is not global and just set on a per connection basis. """ register_adapter(pyrange, partial(adapt_range, pgrange)) register_range_caster( pgrange, pyrange, *query_range_oids(pgrange, conn), scope=conn)
[ "def", "register_range_type", "(", "pgrange", ",", "pyrange", ",", "conn", ")", ":", "register_adapter", "(", "pyrange", ",", "partial", "(", "adapt_range", ",", "pgrange", ")", ")", "register_range_caster", "(", "pgrange", ",", "pyrange", ",", "*", "query_range_oids", "(", "pgrange", ",", "conn", ")", ",", "scope", "=", "conn", ")" ]
Register a new range type as a PostgreSQL range. >>> register_range_type("int4range", intrange, conn) The above will make sure intrange is regarded as an int4range for queries and that int4ranges will be cast into intrange when fetching rows. pgrange should be the full name including schema for the custom range type. Note that adaption is global, meaning if a range type is passed to a regular psycopg2 connection it will adapt it to its proper range type. Parsing of rows from the database however is not global and just set on a per connection basis.
[ "Register", "a", "new", "range", "type", "as", "a", "PostgreSQL", "range", "." ]
77a2d33cb280e7ff78252e173d702dd800f03133
https://github.com/runfalk/psycospans/blob/77a2d33cb280e7ff78252e173d702dd800f03133/psycospans/__init__.py#L36-L55
valid
agsimeonov/cbexchange
cbexchange/error.py
get_api_error
def get_api_error(response): """Acquires the correct error for a given response. :param requests.Response response: HTTP error response :returns: the appropriate error for a given response :rtype: APIError """ error_class = _status_code_to_class.get(response.status_code, APIError) return error_class(response)
python
def get_api_error(response): """Acquires the correct error for a given response. :param requests.Response response: HTTP error response :returns: the appropriate error for a given response :rtype: APIError """ error_class = _status_code_to_class.get(response.status_code, APIError) return error_class(response)
[ "def", "get_api_error", "(", "response", ")", ":", "error_class", "=", "_status_code_to_class", ".", "get", "(", "response", ".", "status_code", ",", "APIError", ")", "return", "error_class", "(", "response", ")" ]
Acquires the correct error for a given response. :param requests.Response response: HTTP error response :returns: the appropriate error for a given response :rtype: APIError
[ "Acquires", "the", "correct", "error", "for", "a", "given", "response", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/error.py#L42-L51
valid
orb-framework/pyramid_orb
pyramid_orb/utils.py
get_param_values
def get_param_values(request, model=None): """ Converts the request parameters to Python. :param request: <pyramid.request.Request> || <dict> :return: <dict> """ if type(request) == dict: return request params = get_payload(request) # support in-place editing formatted request try: del params['pk'] params[params.pop('name')] = params.pop('value') except KeyError: pass return { k.rstrip('[]'): safe_eval(v) if not type(v) == list else [safe_eval(sv) for sv in v] for k, v in params.items() }
python
def get_param_values(request, model=None): """ Converts the request parameters to Python. :param request: <pyramid.request.Request> || <dict> :return: <dict> """ if type(request) == dict: return request params = get_payload(request) # support in-place editing formatted request try: del params['pk'] params[params.pop('name')] = params.pop('value') except KeyError: pass return { k.rstrip('[]'): safe_eval(v) if not type(v) == list else [safe_eval(sv) for sv in v] for k, v in params.items() }
[ "def", "get_param_values", "(", "request", ",", "model", "=", "None", ")", ":", "if", "type", "(", "request", ")", "==", "dict", ":", "return", "request", "params", "=", "get_payload", "(", "request", ")", "# support in-place editing formatted request", "try", ":", "del", "params", "[", "'pk'", "]", "params", "[", "params", ".", "pop", "(", "'name'", ")", "]", "=", "params", ".", "pop", "(", "'value'", ")", "except", "KeyError", ":", "pass", "return", "{", "k", ".", "rstrip", "(", "'[]'", ")", ":", "safe_eval", "(", "v", ")", "if", "not", "type", "(", "v", ")", "==", "list", "else", "[", "safe_eval", "(", "sv", ")", "for", "sv", "in", "v", "]", "for", "k", ",", "v", "in", "params", ".", "items", "(", ")", "}" ]
Converts the request parameters to Python. :param request: <pyramid.request.Request> || <dict> :return: <dict>
[ "Converts", "the", "request", "parameters", "to", "Python", "." ]
e5c716fc75626e1cd966f7bd87b470a8b71126bf
https://github.com/orb-framework/pyramid_orb/blob/e5c716fc75626e1cd966f7bd87b470a8b71126bf/pyramid_orb/utils.py#L10-L33
valid
orb-framework/pyramid_orb
pyramid_orb/utils.py
get_context
def get_context(request, model=None): """ Extracts ORB context information from the request. :param request: <pyramid.request.Request> :param model: <orb.Model> || None :return: {<str> key: <variant> value} values, <orb.Context> """ # convert request parameters to python param_values = get_param_values(request, model=model) # extract the full orb context if provided context = param_values.pop('orb_context', {}) if isinstance(context, (unicode, str)): context = projex.rest.unjsonify(context) # otherwise, extract the limit information has_limit = 'limit' in context or 'limit' in param_values # create the new orb context orb_context = orb.Context(**context) # build up context information from the request params used = set() query_context = {} for key in orb.Context.Defaults: if key in param_values: used.add(key) query_context[key] = param_values.get(key) # generate a simple query object schema_values = {} if model: # extract match dict items for key, value in request.matchdict.items(): if model.schema().column(key, raise_=False): schema_values[key] = value # extract payload items for key, value in param_values.items(): root_key = key.split('.')[0] schema_object = model.schema().column(root_key, raise_=False) or model.schema().collector(root_key) if schema_object: value = param_values.pop(key) if isinstance(schema_object, orb.Collector) and type(value) not in (tuple, list): value = [value] schema_values[key] = value # generate the base context information query_context['scope'] = { 'request': request } # include any request specific scoping or information from the request # first, look for default ORB context for all requests try: default_context = request.orb_default_context # then, look for scope specific information for all requests except AttributeError: try: query_context['scope'].update(request.orb_scope) except AttributeError: pass # if request specific context defaults exist, then # merge them with the rest of the query context else: if 'scope' in default_context: query_context['scope'].update(default_context.pop('scope')) # setup defaults based on the request for k, v in default_context.items(): query_context.setdefault(k, v) orb_context.update(query_context) return schema_values, orb_context
python
def get_context(request, model=None): """ Extracts ORB context information from the request. :param request: <pyramid.request.Request> :param model: <orb.Model> || None :return: {<str> key: <variant> value} values, <orb.Context> """ # convert request parameters to python param_values = get_param_values(request, model=model) # extract the full orb context if provided context = param_values.pop('orb_context', {}) if isinstance(context, (unicode, str)): context = projex.rest.unjsonify(context) # otherwise, extract the limit information has_limit = 'limit' in context or 'limit' in param_values # create the new orb context orb_context = orb.Context(**context) # build up context information from the request params used = set() query_context = {} for key in orb.Context.Defaults: if key in param_values: used.add(key) query_context[key] = param_values.get(key) # generate a simple query object schema_values = {} if model: # extract match dict items for key, value in request.matchdict.items(): if model.schema().column(key, raise_=False): schema_values[key] = value # extract payload items for key, value in param_values.items(): root_key = key.split('.')[0] schema_object = model.schema().column(root_key, raise_=False) or model.schema().collector(root_key) if schema_object: value = param_values.pop(key) if isinstance(schema_object, orb.Collector) and type(value) not in (tuple, list): value = [value] schema_values[key] = value # generate the base context information query_context['scope'] = { 'request': request } # include any request specific scoping or information from the request # first, look for default ORB context for all requests try: default_context = request.orb_default_context # then, look for scope specific information for all requests except AttributeError: try: query_context['scope'].update(request.orb_scope) except AttributeError: pass # if request specific context defaults exist, then # merge them with the rest of the query context else: if 'scope' in default_context: query_context['scope'].update(default_context.pop('scope')) # setup defaults based on the request for k, v in default_context.items(): query_context.setdefault(k, v) orb_context.update(query_context) return schema_values, orb_context
[ "def", "get_context", "(", "request", ",", "model", "=", "None", ")", ":", "# convert request parameters to python", "param_values", "=", "get_param_values", "(", "request", ",", "model", "=", "model", ")", "# extract the full orb context if provided", "context", "=", "param_values", ".", "pop", "(", "'orb_context'", ",", "{", "}", ")", "if", "isinstance", "(", "context", ",", "(", "unicode", ",", "str", ")", ")", ":", "context", "=", "projex", ".", "rest", ".", "unjsonify", "(", "context", ")", "# otherwise, extract the limit information", "has_limit", "=", "'limit'", "in", "context", "or", "'limit'", "in", "param_values", "# create the new orb context", "orb_context", "=", "orb", ".", "Context", "(", "*", "*", "context", ")", "# build up context information from the request params", "used", "=", "set", "(", ")", "query_context", "=", "{", "}", "for", "key", "in", "orb", ".", "Context", ".", "Defaults", ":", "if", "key", "in", "param_values", ":", "used", ".", "add", "(", "key", ")", "query_context", "[", "key", "]", "=", "param_values", ".", "get", "(", "key", ")", "# generate a simple query object", "schema_values", "=", "{", "}", "if", "model", ":", "# extract match dict items", "for", "key", ",", "value", "in", "request", ".", "matchdict", ".", "items", "(", ")", ":", "if", "model", ".", "schema", "(", ")", ".", "column", "(", "key", ",", "raise_", "=", "False", ")", ":", "schema_values", "[", "key", "]", "=", "value", "# extract payload items", "for", "key", ",", "value", "in", "param_values", ".", "items", "(", ")", ":", "root_key", "=", "key", ".", "split", "(", "'.'", ")", "[", "0", "]", "schema_object", "=", "model", ".", "schema", "(", ")", ".", "column", "(", "root_key", ",", "raise_", "=", "False", ")", "or", "model", ".", "schema", "(", ")", ".", "collector", "(", "root_key", ")", "if", "schema_object", ":", "value", "=", "param_values", ".", "pop", "(", "key", ")", "if", "isinstance", "(", "schema_object", ",", "orb", ".", "Collector", ")", "and", "type", "(", "value", ")", "not", "in", "(", "tuple", ",", "list", ")", ":", "value", "=", "[", "value", "]", "schema_values", "[", "key", "]", "=", "value", "# generate the base context information", "query_context", "[", "'scope'", "]", "=", "{", "'request'", ":", "request", "}", "# include any request specific scoping or information from the request", "# first, look for default ORB context for all requests", "try", ":", "default_context", "=", "request", ".", "orb_default_context", "# then, look for scope specific information for all requests", "except", "AttributeError", ":", "try", ":", "query_context", "[", "'scope'", "]", ".", "update", "(", "request", ".", "orb_scope", ")", "except", "AttributeError", ":", "pass", "# if request specific context defaults exist, then", "# merge them with the rest of the query context", "else", ":", "if", "'scope'", "in", "default_context", ":", "query_context", "[", "'scope'", "]", ".", "update", "(", "default_context", ".", "pop", "(", "'scope'", ")", ")", "# setup defaults based on the request", "for", "k", ",", "v", "in", "default_context", ".", "items", "(", ")", ":", "query_context", ".", "setdefault", "(", "k", ",", "v", ")", "orb_context", ".", "update", "(", "query_context", ")", "return", "schema_values", ",", "orb_context" ]
Extracts ORB context information from the request. :param request: <pyramid.request.Request> :param model: <orb.Model> || None :return: {<str> key: <variant> value} values, <orb.Context>
[ "Extracts", "ORB", "context", "information", "from", "the", "request", "." ]
e5c716fc75626e1cd966f7bd87b470a8b71126bf
https://github.com/orb-framework/pyramid_orb/blob/e5c716fc75626e1cd966f7bd87b470a8b71126bf/pyramid_orb/utils.py#L36-L113
valid
agsimeonov/cbexchange
cbexchange/orderbook.py
OrderBook._real_time_thread
def _real_time_thread(self): """Handles real-time updates to the order book.""" while self.ws_client.connected(): if self.die: break if self.pause: sleep(5) continue message = self.ws_client.receive() if message is None: break message_type = message['type'] if message_type == 'error': continue if message['sequence'] <= self.sequence: continue if message_type == 'open': self._handle_open(message) elif message_type == 'match': self._handle_match(message) elif message_type == 'done': self._handle_done(message) elif message_type == 'change': self._handle_change(message) else: continue self.ws_client.disconnect()
python
def _real_time_thread(self): """Handles real-time updates to the order book.""" while self.ws_client.connected(): if self.die: break if self.pause: sleep(5) continue message = self.ws_client.receive() if message is None: break message_type = message['type'] if message_type == 'error': continue if message['sequence'] <= self.sequence: continue if message_type == 'open': self._handle_open(message) elif message_type == 'match': self._handle_match(message) elif message_type == 'done': self._handle_done(message) elif message_type == 'change': self._handle_change(message) else: continue self.ws_client.disconnect()
[ "def", "_real_time_thread", "(", "self", ")", ":", "while", "self", ".", "ws_client", ".", "connected", "(", ")", ":", "if", "self", ".", "die", ":", "break", "if", "self", ".", "pause", ":", "sleep", "(", "5", ")", "continue", "message", "=", "self", ".", "ws_client", ".", "receive", "(", ")", "if", "message", "is", "None", ":", "break", "message_type", "=", "message", "[", "'type'", "]", "if", "message_type", "==", "'error'", ":", "continue", "if", "message", "[", "'sequence'", "]", "<=", "self", ".", "sequence", ":", "continue", "if", "message_type", "==", "'open'", ":", "self", ".", "_handle_open", "(", "message", ")", "elif", "message_type", "==", "'match'", ":", "self", ".", "_handle_match", "(", "message", ")", "elif", "message_type", "==", "'done'", ":", "self", ".", "_handle_done", "(", "message", ")", "elif", "message_type", "==", "'change'", ":", "self", ".", "_handle_change", "(", "message", ")", "else", ":", "continue", "self", ".", "ws_client", ".", "disconnect", "(", ")" ]
Handles real-time updates to the order book.
[ "Handles", "real", "-", "time", "updates", "to", "the", "order", "book", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/orderbook.py#L120-L153
valid
agsimeonov/cbexchange
cbexchange/websock.py
WSClient._keep_alive_thread
def _keep_alive_thread(self): """Used exclusively as a thread which keeps the WebSocket alive.""" while True: with self._lock: if self.connected(): self._ws.ping() else: self.disconnect() self._thread = None return sleep(30)
python
def _keep_alive_thread(self): """Used exclusively as a thread which keeps the WebSocket alive.""" while True: with self._lock: if self.connected(): self._ws.ping() else: self.disconnect() self._thread = None return sleep(30)
[ "def", "_keep_alive_thread", "(", "self", ")", ":", "while", "True", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "connected", "(", ")", ":", "self", ".", "_ws", ".", "ping", "(", ")", "else", ":", "self", ".", "disconnect", "(", ")", "self", ".", "_thread", "=", "None", "return", "sleep", "(", "30", ")" ]
Used exclusively as a thread which keeps the WebSocket alive.
[ "Used", "exclusively", "as", "a", "thread", "which", "keeps", "the", "WebSocket", "alive", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/websock.py#L104-L114
valid
agsimeonov/cbexchange
cbexchange/websock.py
WSClient.connect
def connect(self): """Connects and subscribes to the WebSocket Feed.""" if not self.connected(): self._ws = create_connection(self.WS_URI) message = { 'type':self.WS_TYPE, 'product_id':self.WS_PRODUCT_ID } self._ws.send(dumps(message)) # There will be only one keep alive thread per client instance with self._lock: if not self._thread: thread = Thread(target=self._keep_alive_thread, args=[]) thread.start()
python
def connect(self): """Connects and subscribes to the WebSocket Feed.""" if not self.connected(): self._ws = create_connection(self.WS_URI) message = { 'type':self.WS_TYPE, 'product_id':self.WS_PRODUCT_ID } self._ws.send(dumps(message)) # There will be only one keep alive thread per client instance with self._lock: if not self._thread: thread = Thread(target=self._keep_alive_thread, args=[]) thread.start()
[ "def", "connect", "(", "self", ")", ":", "if", "not", "self", ".", "connected", "(", ")", ":", "self", ".", "_ws", "=", "create_connection", "(", "self", ".", "WS_URI", ")", "message", "=", "{", "'type'", ":", "self", ".", "WS_TYPE", ",", "'product_id'", ":", "self", ".", "WS_PRODUCT_ID", "}", "self", ".", "_ws", ".", "send", "(", "dumps", "(", "message", ")", ")", "# There will be only one keep alive thread per client instance", "with", "self", ".", "_lock", ":", "if", "not", "self", ".", "_thread", ":", "thread", "=", "Thread", "(", "target", "=", "self", ".", "_keep_alive_thread", ",", "args", "=", "[", "]", ")", "thread", ".", "start", "(", ")" ]
Connects and subscribes to the WebSocket Feed.
[ "Connects", "and", "subscribes", "to", "the", "WebSocket", "Feed", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/websock.py#L116-L130
valid
dlancer/django-cached-httpbl
cached_httpbl/decorators.py
cached_httpbl_exempt
def cached_httpbl_exempt(view_func): """ Marks a view function as being exempt from the cached httpbl view protection. """ # We could just do view_func.cached_httpbl_exempt = True, but decorators # are nicer if they don't have side-effects, so we return a new # function. def wrapped_view(*args, **kwargs): return view_func(*args, **kwargs) wrapped_view.cached_httpbl_exempt = True return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
python
def cached_httpbl_exempt(view_func): """ Marks a view function as being exempt from the cached httpbl view protection. """ # We could just do view_func.cached_httpbl_exempt = True, but decorators # are nicer if they don't have side-effects, so we return a new # function. def wrapped_view(*args, **kwargs): return view_func(*args, **kwargs) wrapped_view.cached_httpbl_exempt = True return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
[ "def", "cached_httpbl_exempt", "(", "view_func", ")", ":", "# We could just do view_func.cached_httpbl_exempt = True, but decorators", "# are nicer if they don't have side-effects, so we return a new", "# function.", "def", "wrapped_view", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "view_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "wrapped_view", ".", "cached_httpbl_exempt", "=", "True", "return", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "(", "wrapped_view", ")" ]
Marks a view function as being exempt from the cached httpbl view protection.
[ "Marks", "a", "view", "function", "as", "being", "exempt", "from", "the", "cached", "httpbl", "view", "protection", "." ]
b32106f4283f9605122255f2c9bfbd3bff465fa5
https://github.com/dlancer/django-cached-httpbl/blob/b32106f4283f9605122255f2c9bfbd3bff465fa5/cached_httpbl/decorators.py#L16-L26
valid
FocusLab/Albertson
albertson/base.py
CounterPool.get_conn
def get_conn(self, aws_access_key=None, aws_secret_key=None): ''' Hook point for overriding how the CounterPool gets its connection to AWS. ''' return boto.connect_dynamodb( aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key, )
python
def get_conn(self, aws_access_key=None, aws_secret_key=None): ''' Hook point for overriding how the CounterPool gets its connection to AWS. ''' return boto.connect_dynamodb( aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key, )
[ "def", "get_conn", "(", "self", ",", "aws_access_key", "=", "None", ",", "aws_secret_key", "=", "None", ")", ":", "return", "boto", ".", "connect_dynamodb", "(", "aws_access_key_id", "=", "aws_access_key", ",", "aws_secret_access_key", "=", "aws_secret_key", ",", ")" ]
Hook point for overriding how the CounterPool gets its connection to AWS.
[ "Hook", "point", "for", "overriding", "how", "the", "CounterPool", "gets", "its", "connection", "to", "AWS", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L55-L63
valid
FocusLab/Albertson
albertson/base.py
CounterPool.get_schema
def get_schema(self): ''' Hook point for overriding how the CounterPool determines the schema to be used when creating a missing table. ''' if not self.schema: raise NotImplementedError( 'You must provide a schema value or override the get_schema method' ) return self.conn.create_schema(**self.schema)
python
def get_schema(self): ''' Hook point for overriding how the CounterPool determines the schema to be used when creating a missing table. ''' if not self.schema: raise NotImplementedError( 'You must provide a schema value or override the get_schema method' ) return self.conn.create_schema(**self.schema)
[ "def", "get_schema", "(", "self", ")", ":", "if", "not", "self", ".", "schema", ":", "raise", "NotImplementedError", "(", "'You must provide a schema value or override the get_schema method'", ")", "return", "self", ".", "conn", ".", "create_schema", "(", "*", "*", "self", ".", "schema", ")" ]
Hook point for overriding how the CounterPool determines the schema to be used when creating a missing table.
[ "Hook", "point", "for", "overriding", "how", "the", "CounterPool", "determines", "the", "schema", "to", "be", "used", "when", "creating", "a", "missing", "table", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L76-L86
valid
FocusLab/Albertson
albertson/base.py
CounterPool.create_table
def create_table(self): ''' Hook point for overriding how the CounterPool creates a new table in DynamooDB ''' table = self.conn.create_table( name=self.get_table_name(), schema=self.get_schema(), read_units=self.get_read_units(), write_units=self.get_write_units(), ) if table.status != 'ACTIVE': table.refresh(wait_for_active=True, retry_seconds=1) return table
python
def create_table(self): ''' Hook point for overriding how the CounterPool creates a new table in DynamooDB ''' table = self.conn.create_table( name=self.get_table_name(), schema=self.get_schema(), read_units=self.get_read_units(), write_units=self.get_write_units(), ) if table.status != 'ACTIVE': table.refresh(wait_for_active=True, retry_seconds=1) return table
[ "def", "create_table", "(", "self", ")", ":", "table", "=", "self", ".", "conn", ".", "create_table", "(", "name", "=", "self", ".", "get_table_name", "(", ")", ",", "schema", "=", "self", ".", "get_schema", "(", ")", ",", "read_units", "=", "self", ".", "get_read_units", "(", ")", ",", "write_units", "=", "self", ".", "get_write_units", "(", ")", ",", ")", "if", "table", ".", "status", "!=", "'ACTIVE'", ":", "table", ".", "refresh", "(", "wait_for_active", "=", "True", ",", "retry_seconds", "=", "1", ")", "return", "table" ]
Hook point for overriding how the CounterPool creates a new table in DynamooDB
[ "Hook", "point", "for", "overriding", "how", "the", "CounterPool", "creates", "a", "new", "table", "in", "DynamooDB" ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L102-L117
valid
FocusLab/Albertson
albertson/base.py
CounterPool.get_table
def get_table(self): ''' Hook point for overriding how the CounterPool transforms table_name into a boto DynamoDB Table object. ''' if hasattr(self, '_table'): table = self._table else: try: table = self.conn.get_table(self.get_table_name()) except boto.exception.DynamoDBResponseError: if self.auto_create_table: table = self.create_table() else: raise self._table = table return table
python
def get_table(self): ''' Hook point for overriding how the CounterPool transforms table_name into a boto DynamoDB Table object. ''' if hasattr(self, '_table'): table = self._table else: try: table = self.conn.get_table(self.get_table_name()) except boto.exception.DynamoDBResponseError: if self.auto_create_table: table = self.create_table() else: raise self._table = table return table
[ "def", "get_table", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_table'", ")", ":", "table", "=", "self", ".", "_table", "else", ":", "try", ":", "table", "=", "self", ".", "conn", ".", "get_table", "(", "self", ".", "get_table_name", "(", ")", ")", "except", "boto", ".", "exception", ".", "DynamoDBResponseError", ":", "if", "self", ".", "auto_create_table", ":", "table", "=", "self", ".", "create_table", "(", ")", "else", ":", "raise", "self", ".", "_table", "=", "table", "return", "table" ]
Hook point for overriding how the CounterPool transforms table_name into a boto DynamoDB Table object.
[ "Hook", "point", "for", "overriding", "how", "the", "CounterPool", "transforms", "table_name", "into", "a", "boto", "DynamoDB", "Table", "object", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L119-L137
valid
FocusLab/Albertson
albertson/base.py
CounterPool.create_item
def create_item(self, hash_key, start=0, extra_attrs=None): ''' Hook point for overriding how the CouterPool creates a DynamoDB item for a given counter when an existing item can't be found. ''' table = self.get_table() now = datetime.utcnow().replace(microsecond=0).isoformat() attrs = { 'created_on': now, 'modified_on': now, 'count': start, } if extra_attrs: attrs.update(extra_attrs) item = table.new_item( hash_key=hash_key, attrs=attrs, ) return item
python
def create_item(self, hash_key, start=0, extra_attrs=None): ''' Hook point for overriding how the CouterPool creates a DynamoDB item for a given counter when an existing item can't be found. ''' table = self.get_table() now = datetime.utcnow().replace(microsecond=0).isoformat() attrs = { 'created_on': now, 'modified_on': now, 'count': start, } if extra_attrs: attrs.update(extra_attrs) item = table.new_item( hash_key=hash_key, attrs=attrs, ) return item
[ "def", "create_item", "(", "self", ",", "hash_key", ",", "start", "=", "0", ",", "extra_attrs", "=", "None", ")", ":", "table", "=", "self", ".", "get_table", "(", ")", "now", "=", "datetime", ".", "utcnow", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ")", ".", "isoformat", "(", ")", "attrs", "=", "{", "'created_on'", ":", "now", ",", "'modified_on'", ":", "now", ",", "'count'", ":", "start", ",", "}", "if", "extra_attrs", ":", "attrs", ".", "update", "(", "extra_attrs", ")", "item", "=", "table", ".", "new_item", "(", "hash_key", "=", "hash_key", ",", "attrs", "=", "attrs", ",", ")", "return", "item" ]
Hook point for overriding how the CouterPool creates a DynamoDB item for a given counter when an existing item can't be found.
[ "Hook", "point", "for", "overriding", "how", "the", "CouterPool", "creates", "a", "DynamoDB", "item", "for", "a", "given", "counter", "when", "an", "existing", "item", "can", "t", "be", "found", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L139-L160
valid
FocusLab/Albertson
albertson/base.py
CounterPool.get_item
def get_item(self, hash_key, start=0, extra_attrs=None): ''' Hook point for overriding how the CouterPool fetches a DynamoDB item for a given counter. ''' table = self.get_table() try: item = table.get_item(hash_key=hash_key) except DynamoDBKeyNotFoundError: item = None if item is None: item = self.create_item( hash_key=hash_key, start=start, extra_attrs=extra_attrs, ) return item
python
def get_item(self, hash_key, start=0, extra_attrs=None): ''' Hook point for overriding how the CouterPool fetches a DynamoDB item for a given counter. ''' table = self.get_table() try: item = table.get_item(hash_key=hash_key) except DynamoDBKeyNotFoundError: item = None if item is None: item = self.create_item( hash_key=hash_key, start=start, extra_attrs=extra_attrs, ) return item
[ "def", "get_item", "(", "self", ",", "hash_key", ",", "start", "=", "0", ",", "extra_attrs", "=", "None", ")", ":", "table", "=", "self", ".", "get_table", "(", ")", "try", ":", "item", "=", "table", ".", "get_item", "(", "hash_key", "=", "hash_key", ")", "except", "DynamoDBKeyNotFoundError", ":", "item", "=", "None", "if", "item", "is", "None", ":", "item", "=", "self", ".", "create_item", "(", "hash_key", "=", "hash_key", ",", "start", "=", "start", ",", "extra_attrs", "=", "extra_attrs", ",", ")", "return", "item" ]
Hook point for overriding how the CouterPool fetches a DynamoDB item for a given counter.
[ "Hook", "point", "for", "overriding", "how", "the", "CouterPool", "fetches", "a", "DynamoDB", "item", "for", "a", "given", "counter", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L162-L181
valid
FocusLab/Albertson
albertson/base.py
CounterPool.get_counter
def get_counter(self, name, start=0): ''' Gets the DynamoDB item behind a counter and ties it to a Counter instace. ''' item = self.get_item(hash_key=name, start=start) counter = Counter(dynamo_item=item, pool=self) return counter
python
def get_counter(self, name, start=0): ''' Gets the DynamoDB item behind a counter and ties it to a Counter instace. ''' item = self.get_item(hash_key=name, start=start) counter = Counter(dynamo_item=item, pool=self) return counter
[ "def", "get_counter", "(", "self", ",", "name", ",", "start", "=", "0", ")", ":", "item", "=", "self", ".", "get_item", "(", "hash_key", "=", "name", ",", "start", "=", "start", ")", "counter", "=", "Counter", "(", "dynamo_item", "=", "item", ",", "pool", "=", "self", ")", "return", "counter" ]
Gets the DynamoDB item behind a counter and ties it to a Counter instace.
[ "Gets", "the", "DynamoDB", "item", "behind", "a", "counter", "and", "ties", "it", "to", "a", "Counter", "instace", "." ]
a42f9873559df9188c40c34fdffb079d78eaa3fe
https://github.com/FocusLab/Albertson/blob/a42f9873559df9188c40c34fdffb079d78eaa3fe/albertson/base.py#L183-L191
valid
pip-services/pip-services-commons-python
pip_services_commons/refer/Descriptor.py
Descriptor.exact_match
def exact_match(self, descriptor): """ Matches this descriptor to another descriptor exactly. Args: descriptor: another descriptor to match this one. Returns: True if descriptors match or False otherwise. """ return self._exact_match_field(self._group, descriptor.get_group()) \ and self._exact_atch_field(self._type, descriptor.get_type()) \ and self._exact_match_field(self._kind, descriptor.get_kind()) \ and self._exact_match_field(self._name, descriptor.get_name()) \ and self._exact_match_field(self._version, descriptor.get_version())
python
def exact_match(self, descriptor): """ Matches this descriptor to another descriptor exactly. Args: descriptor: another descriptor to match this one. Returns: True if descriptors match or False otherwise. """ return self._exact_match_field(self._group, descriptor.get_group()) \ and self._exact_atch_field(self._type, descriptor.get_type()) \ and self._exact_match_field(self._kind, descriptor.get_kind()) \ and self._exact_match_field(self._name, descriptor.get_name()) \ and self._exact_match_field(self._version, descriptor.get_version())
[ "def", "exact_match", "(", "self", ",", "descriptor", ")", ":", "return", "self", ".", "_exact_match_field", "(", "self", ".", "_group", ",", "descriptor", ".", "get_group", "(", ")", ")", "and", "self", ".", "_exact_atch_field", "(", "self", ".", "_type", ",", "descriptor", ".", "get_type", "(", ")", ")", "and", "self", ".", "_exact_match_field", "(", "self", ".", "_kind", ",", "descriptor", ".", "get_kind", "(", ")", ")", "and", "self", ".", "_exact_match_field", "(", "self", ".", "_name", ",", "descriptor", ".", "get_name", "(", ")", ")", "and", "self", ".", "_exact_match_field", "(", "self", ".", "_version", ",", "descriptor", ".", "get_version", "(", ")", ")" ]
Matches this descriptor to another descriptor exactly. Args: descriptor: another descriptor to match this one. Returns: True if descriptors match or False otherwise.
[ "Matches", "this", "descriptor", "to", "another", "descriptor", "exactly", ".", "Args", ":", "descriptor", ":", "another", "descriptor", "to", "match", "this", "one", ".", "Returns", ":", "True", "if", "descriptors", "match", "or", "False", "otherwise", "." ]
2205b18c45c60372966c62c1f23ac4fbc31e11b3
https://github.com/pip-services/pip-services-commons-python/blob/2205b18c45c60372966c62c1f23ac4fbc31e11b3/pip_services_commons/refer/Descriptor.py#L117-L130
valid
suryakencana007/baka_model
baka_model/model/meta/orm.py
many_to_one
def many_to_one(clsname, **kw): """Use an event to build a many-to-one relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship to the remote table. """ @declared_attr def m2o(cls): cls._references((cls.__name__, clsname)) return relationship(clsname, **kw) return m2o
python
def many_to_one(clsname, **kw): """Use an event to build a many-to-one relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship to the remote table. """ @declared_attr def m2o(cls): cls._references((cls.__name__, clsname)) return relationship(clsname, **kw) return m2o
[ "def", "many_to_one", "(", "clsname", ",", "*", "*", "kw", ")", ":", "@", "declared_attr", "def", "m2o", "(", "cls", ")", ":", "cls", ".", "_references", "(", "(", "cls", ".", "__name__", ",", "clsname", ")", ")", "return", "relationship", "(", "clsname", ",", "*", "*", "kw", ")", "return", "m2o" ]
Use an event to build a many-to-one relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship to the remote table.
[ "Use", "an", "event", "to", "build", "a", "many", "-", "to", "-", "one", "relationship", "on", "a", "class", "." ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/orm.py#L23-L34
valid
suryakencana007/baka_model
baka_model/model/meta/orm.py
one_to_many
def one_to_many(clsname, **kw): """Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table. """ @declared_attr def o2m(cls): cls._references((clsname, cls.__name__)) return relationship(clsname, **kw) return o2m
python
def one_to_many(clsname, **kw): """Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table. """ @declared_attr def o2m(cls): cls._references((clsname, cls.__name__)) return relationship(clsname, **kw) return o2m
[ "def", "one_to_many", "(", "clsname", ",", "*", "*", "kw", ")", ":", "@", "declared_attr", "def", "o2m", "(", "cls", ")", ":", "cls", ".", "_references", "(", "(", "clsname", ",", "cls", ".", "__name__", ")", ")", "return", "relationship", "(", "clsname", ",", "*", "*", "kw", ")", "return", "o2m" ]
Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table.
[ "Use", "an", "event", "to", "build", "a", "one", "-", "to", "-", "many", "relationship", "on", "a", "class", "." ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/orm.py#L37-L48
valid
lightstrike/djeff
djeff/djeff.py
djeffify_string
def djeffify_string(string_to_djeff): """ Djeffifies string_to_djeff """ string_to_djeff = re.sub(r'^(?=[jg])', 'd', string_to_djeff, flags=re.IGNORECASE) # first string_to_djeff = re.sub(r'[ ](?=[jg])', ' d', string_to_djeff, flags=re.IGNORECASE) # spaces string_to_djeff = re.sub(r'[\n](?=[jg])', '\nd', string_to_djeff, flags=re.IGNORECASE) # \n return string_to_djeff
python
def djeffify_string(string_to_djeff): """ Djeffifies string_to_djeff """ string_to_djeff = re.sub(r'^(?=[jg])', 'd', string_to_djeff, flags=re.IGNORECASE) # first string_to_djeff = re.sub(r'[ ](?=[jg])', ' d', string_to_djeff, flags=re.IGNORECASE) # spaces string_to_djeff = re.sub(r'[\n](?=[jg])', '\nd', string_to_djeff, flags=re.IGNORECASE) # \n return string_to_djeff
[ "def", "djeffify_string", "(", "string_to_djeff", ")", ":", "string_to_djeff", "=", "re", ".", "sub", "(", "r'^(?=[jg])'", ",", "'d'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# first", "string_to_djeff", "=", "re", ".", "sub", "(", "r'[ ](?=[jg])'", ",", "' d'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# spaces", "string_to_djeff", "=", "re", ".", "sub", "(", "r'[\\n](?=[jg])'", ",", "'\\nd'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# \\n", "return", "string_to_djeff" ]
Djeffifies string_to_djeff
[ "Djeffifies", "string_to_djeff" ]
806a7fe1c9ebbe144bc8afcff55deb5616e372b4
https://github.com/lightstrike/djeff/blob/806a7fe1c9ebbe144bc8afcff55deb5616e372b4/djeff/djeff.py#L22-L29
valid
lightstrike/djeff
djeff/djeff.py
DjeffParser.handle_data
def handle_data(self, data): """ Djeffify data between tags """ if data.strip(): data = djeffify_string(data) self.djhtml += data
python
def handle_data(self, data): """ Djeffify data between tags """ if data.strip(): data = djeffify_string(data) self.djhtml += data
[ "def", "handle_data", "(", "self", ",", "data", ")", ":", "if", "data", ".", "strip", "(", ")", ":", "data", "=", "djeffify_string", "(", "data", ")", "self", ".", "djhtml", "+=", "data" ]
Djeffify data between tags
[ "Djeffify", "data", "between", "tags" ]
806a7fe1c9ebbe144bc8afcff55deb5616e372b4
https://github.com/lightstrike/djeff/blob/806a7fe1c9ebbe144bc8afcff55deb5616e372b4/djeff/djeff.py#L71-L77
valid
suryakencana007/baka_model
baka_model/model/meta/schema.py
References._reference_table
def _reference_table(cls, ref_table): """Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint. """ # create pairs of (Foreign key column, primary key column) cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key] # set "tablename_colname = Foreign key Column" on the local class for col, refcol in cols: setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col) # add a ForeignKeyConstraint([local columns], [remote columns]) cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))
python
def _reference_table(cls, ref_table): """Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint. """ # create pairs of (Foreign key column, primary key column) cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key] # set "tablename_colname = Foreign key Column" on the local class for col, refcol in cols: setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col) # add a ForeignKeyConstraint([local columns], [remote columns]) cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))
[ "def", "_reference_table", "(", "cls", ",", "ref_table", ")", ":", "# create pairs of (Foreign key column, primary key column)", "cols", "=", "[", "(", "sa", ".", "Column", "(", ")", ",", "refcol", ")", "for", "refcol", "in", "ref_table", ".", "primary_key", "]", "# set \"tablename_colname = Foreign key Column\" on the local class", "for", "col", ",", "refcol", "in", "cols", ":", "setattr", "(", "cls", ",", "\"%s_%s\"", "%", "(", "ref_table", ".", "name", ",", "refcol", ".", "name", ")", ",", "col", ")", "# add a ForeignKeyConstraint([local columns], [remote columns])", "cls", ".", "__table__", ".", "append_constraint", "(", "sa", ".", "ForeignKeyConstraint", "(", "*", "zip", "(", "*", "cols", ")", ")", ")" ]
Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint.
[ "Create", "a", "foreign", "key", "reference", "from", "the", "local", "class", "to", "the", "given", "remote", "table", "." ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/schema.py#L42-L58
valid
suryakencana007/baka_model
baka_model/model/meta/schema.py
JsonSerializableMixin.__try_to_json
def __try_to_json(self, request, attr): """ Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError """ # check for __json__ method and try to JSONify if hasattr(attr, '__json__'): return attr.__json__(request) # raise error otherwise raise TypeError('__json__ method missing on %s' % str(attr))
python
def __try_to_json(self, request, attr): """ Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError """ # check for __json__ method and try to JSONify if hasattr(attr, '__json__'): return attr.__json__(request) # raise error otherwise raise TypeError('__json__ method missing on %s' % str(attr))
[ "def", "__try_to_json", "(", "self", ",", "request", ",", "attr", ")", ":", "# check for __json__ method and try to JSONify", "if", "hasattr", "(", "attr", ",", "'__json__'", ")", ":", "return", "attr", ".", "__json__", "(", "request", ")", "# raise error otherwise", "raise", "TypeError", "(", "'__json__ method missing on %s'", "%", "str", "(", "attr", ")", ")" ]
Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError
[ "Try", "to", "run", "__json__", "on", "the", "given", "object", ".", "Raise", "TypeError", "is", "__json__", "is", "missing" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/schema.py#L147-L164
valid
Rikanishu/static-bundle
static_bundle/utils.py
prepare_path
def prepare_path(path): """ Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode """ if type(path) == list: return os.path.join(*path) return path
python
def prepare_path(path): """ Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode """ if type(path) == list: return os.path.join(*path) return path
[ "def", "prepare_path", "(", "path", ")", ":", "if", "type", "(", "path", ")", "==", "list", ":", "return", "os", ".", "path", ".", "join", "(", "*", "path", ")", "return", "path" ]
Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode
[ "Path", "join", "helper", "method", "Join", "paths", "if", "list", "passed" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L8-L18
valid
Rikanishu/static-bundle
static_bundle/utils.py
read_from_file
def read_from_file(file_path, encoding="utf-8"): """ Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode """ with codecs.open(file_path, "r", encoding) as f: return f.read()
python
def read_from_file(file_path, encoding="utf-8"): """ Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode """ with codecs.open(file_path, "r", encoding) as f: return f.read()
[ "def", "read_from_file", "(", "file_path", ",", "encoding", "=", "\"utf-8\"", ")", ":", "with", "codecs", ".", "open", "(", "file_path", ",", "\"r\"", ",", "encoding", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode
[ "Read", "helper", "method" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L21-L30
valid
Rikanishu/static-bundle
static_bundle/utils.py
write_to_file
def write_to_file(file_path, contents, encoding="utf-8"): """ Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode """ with codecs.open(file_path, "w", encoding) as f: f.write(contents)
python
def write_to_file(file_path, contents, encoding="utf-8"): """ Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode """ with codecs.open(file_path, "w", encoding) as f: f.write(contents)
[ "def", "write_to_file", "(", "file_path", ",", "contents", ",", "encoding", "=", "\"utf-8\"", ")", ":", "with", "codecs", ".", "open", "(", "file_path", ",", "\"w\"", ",", "encoding", ")", "as", "f", ":", "f", ".", "write", "(", "contents", ")" ]
Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode
[ "Write", "helper", "method" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L33-L42
valid
Rikanishu/static-bundle
static_bundle/utils.py
copy_file
def copy_file(src, dest): """ Copy file helper method :type src: str|unicode :type dest: str|unicode """ dir_path = os.path.dirname(dest) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy2(src, dest)
python
def copy_file(src, dest): """ Copy file helper method :type src: str|unicode :type dest: str|unicode """ dir_path = os.path.dirname(dest) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy2(src, dest)
[ "def", "copy_file", "(", "src", ",", "dest", ")", ":", "dir_path", "=", "os", ".", "path", ".", "dirname", "(", "dest", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dir_path", ")", ":", "os", ".", "makedirs", "(", "dir_path", ")", "shutil", ".", "copy2", "(", "src", ",", "dest", ")" ]
Copy file helper method :type src: str|unicode :type dest: str|unicode
[ "Copy", "file", "helper", "method" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L45-L55
valid
Rikanishu/static-bundle
static_bundle/utils.py
get_path_extension
def get_path_extension(path): """ Split file name and extension :type path: str|unicode :rtype: one str|unicode """ file_path, file_ext = os.path.splitext(path) return file_ext.lstrip('.')
python
def get_path_extension(path): """ Split file name and extension :type path: str|unicode :rtype: one str|unicode """ file_path, file_ext = os.path.splitext(path) return file_ext.lstrip('.')
[ "def", "get_path_extension", "(", "path", ")", ":", "file_path", ",", "file_ext", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "return", "file_ext", ".", "lstrip", "(", "'.'", ")" ]
Split file name and extension :type path: str|unicode :rtype: one str|unicode
[ "Split", "file", "name", "and", "extension" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L58-L66
valid
Rikanishu/static-bundle
static_bundle/utils.py
split_path
def split_path(path): """ Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list """ result_parts = [] #todo: check loops while path != "/": parts = os.path.split(path) if parts[1] == path: result_parts.insert(0, parts[1]) break elif parts[0] == path: result_parts.insert(0, parts[0]) break else: path = parts[0] result_parts.insert(0, parts[1]) return result_parts
python
def split_path(path): """ Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list """ result_parts = [] #todo: check loops while path != "/": parts = os.path.split(path) if parts[1] == path: result_parts.insert(0, parts[1]) break elif parts[0] == path: result_parts.insert(0, parts[0]) break else: path = parts[0] result_parts.insert(0, parts[1]) return result_parts
[ "def", "split_path", "(", "path", ")", ":", "result_parts", "=", "[", "]", "#todo: check loops", "while", "path", "!=", "\"/\"", ":", "parts", "=", "os", ".", "path", ".", "split", "(", "path", ")", "if", "parts", "[", "1", "]", "==", "path", ":", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "1", "]", ")", "break", "elif", "parts", "[", "0", "]", "==", "path", ":", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "0", "]", ")", "break", "else", ":", "path", "=", "parts", "[", "0", "]", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "1", "]", ")", "return", "result_parts" ]
Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list
[ "Helper", "method", "for", "absolute", "and", "relative", "paths", "resolution", "Split", "passed", "path", "and", "return", "each", "directory", "parts" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L69-L93
valid
agsimeonov/cbexchange
cbexchange/client.py
RESTClient._create_api_uri
def _create_api_uri(self, *parts): """Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI """ return urljoin(self.API_URI, '/'.join(map(quote, parts)))
python
def _create_api_uri(self, *parts): """Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI """ return urljoin(self.API_URI, '/'.join(map(quote, parts)))
[ "def", "_create_api_uri", "(", "self", ",", "*", "parts", ")", ":", "return", "urljoin", "(", "self", ".", "API_URI", ",", "'/'", ".", "join", "(", "map", "(", "quote", ",", "parts", ")", ")", ")" ]
Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI
[ "Creates", "fully", "qualified", "endpoint", "URIs", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L31-L37
valid
agsimeonov/cbexchange
cbexchange/client.py
RESTClient._format_iso_time
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: return None
python
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: return None
[ "def", "_format_iso_time", "(", "self", ",", "time", ")", ":", "if", "isinstance", "(", "time", ",", "str", ")", ":", "return", "time", "elif", "isinstance", "(", "time", ",", "datetime", ")", ":", "return", "time", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S.%fZ'", ")", "else", ":", "return", "None" ]
Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str
[ "Makes", "sure", "we", "have", "proper", "ISO", "8601", "time", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L39-L52
valid
agsimeonov/cbexchange
cbexchange/client.py
RESTClient._handle_response
def _handle_response(self, response): """Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses """ if not str(response.status_code).startswith('2'): raise get_api_error(response) return response
python
def _handle_response(self, response): """Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses """ if not str(response.status_code).startswith('2'): raise get_api_error(response) return response
[ "def", "_handle_response", "(", "self", ",", "response", ")", ":", "if", "not", "str", "(", "response", ".", "status_code", ")", ".", "startswith", "(", "'2'", ")", ":", "raise", "get_api_error", "(", "response", ")", "return", "response" ]
Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses
[ "Returns", "the", "given", "response", "or", "raises", "an", "APIError", "for", "non", "-", "2xx", "responses", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L54-L65
valid
agsimeonov/cbexchange
cbexchange/client.py
PaginationClient._check_next
def _check_next(self): """Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool """ if self.is_initial: return True if self.before: if self.before_cursor: return True else: return False else: if self.after_cursor: return True else: return False
python
def _check_next(self): """Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool """ if self.is_initial: return True if self.before: if self.before_cursor: return True else: return False else: if self.after_cursor: return True else: return False
[ "def", "_check_next", "(", "self", ")", ":", "if", "self", ".", "is_initial", ":", "return", "True", "if", "self", ".", "before", ":", "if", "self", ".", "before_cursor", ":", "return", "True", "else", ":", "return", "False", "else", ":", "if", "self", ".", "after_cursor", ":", "return", "True", "else", ":", "return", "False" ]
Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool
[ "Checks", "if", "a", "next", "message", "is", "possible", "." ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L158-L176
valid
chrisgilmerproj/pycolors2
colors.py
Colors._wrap_color
def _wrap_color(self, code, text, format=None, style=None): """ Colors text with code and given format """ color = None if code[:3] == self.bg.PREFIX: color = self.bg.COLORS.get(code, None) if not color: color = self.fg.COLORS.get(code, None) if not color: raise Exception('Color code not found') if format and format not in self.formats: raise Exception('Color format not found') fmt = "0;" if format == 'bold': fmt = "1;" elif format == 'underline': fmt = "4;" # Manage the format parts = color.split('[') color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1]) if self.has_colors and self.colors_enabled: # Set brightness st = '' if style: st = self.st.COLORS.get(style, '') return "{0}{1}{2}{3}".format(st, color, text, self.st.COLORS['reset_all']) else: return text
python
def _wrap_color(self, code, text, format=None, style=None): """ Colors text with code and given format """ color = None if code[:3] == self.bg.PREFIX: color = self.bg.COLORS.get(code, None) if not color: color = self.fg.COLORS.get(code, None) if not color: raise Exception('Color code not found') if format and format not in self.formats: raise Exception('Color format not found') fmt = "0;" if format == 'bold': fmt = "1;" elif format == 'underline': fmt = "4;" # Manage the format parts = color.split('[') color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1]) if self.has_colors and self.colors_enabled: # Set brightness st = '' if style: st = self.st.COLORS.get(style, '') return "{0}{1}{2}{3}".format(st, color, text, self.st.COLORS['reset_all']) else: return text
[ "def", "_wrap_color", "(", "self", ",", "code", ",", "text", ",", "format", "=", "None", ",", "style", "=", "None", ")", ":", "color", "=", "None", "if", "code", "[", ":", "3", "]", "==", "self", ".", "bg", ".", "PREFIX", ":", "color", "=", "self", ".", "bg", ".", "COLORS", ".", "get", "(", "code", ",", "None", ")", "if", "not", "color", ":", "color", "=", "self", ".", "fg", ".", "COLORS", ".", "get", "(", "code", ",", "None", ")", "if", "not", "color", ":", "raise", "Exception", "(", "'Color code not found'", ")", "if", "format", "and", "format", "not", "in", "self", ".", "formats", ":", "raise", "Exception", "(", "'Color format not found'", ")", "fmt", "=", "\"0;\"", "if", "format", "==", "'bold'", ":", "fmt", "=", "\"1;\"", "elif", "format", "==", "'underline'", ":", "fmt", "=", "\"4;\"", "# Manage the format", "parts", "=", "color", ".", "split", "(", "'['", ")", "color", "=", "'{0}[{1}{2}'", ".", "format", "(", "parts", "[", "0", "]", ",", "fmt", ",", "parts", "[", "1", "]", ")", "if", "self", ".", "has_colors", "and", "self", ".", "colors_enabled", ":", "# Set brightness", "st", "=", "''", "if", "style", ":", "st", "=", "self", ".", "st", ".", "COLORS", ".", "get", "(", "style", ",", "''", ")", "return", "\"{0}{1}{2}{3}\"", ".", "format", "(", "st", ",", "color", ",", "text", ",", "self", ".", "st", ".", "COLORS", "[", "'reset_all'", "]", ")", "else", ":", "return", "text" ]
Colors text with code and given format
[ "Colors", "text", "with", "code", "and", "given", "format" ]
20e447005b70d29fc9f3852bcd526fc6fb337ea3
https://github.com/chrisgilmerproj/pycolors2/blob/20e447005b70d29fc9f3852bcd526fc6fb337ea3/colors.py#L122-L153
valid
ibelie/typy
typy/google/protobuf/symbol_database.py
SymbolDatabase.RegisterMessage
def RegisterMessage(self, message): """Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message. """ desc = message.DESCRIPTOR self._symbols[desc.full_name] = message if desc.file.name not in self._symbols_by_file: self._symbols_by_file[desc.file.name] = {} self._symbols_by_file[desc.file.name][desc.full_name] = message self.pool.AddDescriptor(desc) return message
python
def RegisterMessage(self, message): """Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message. """ desc = message.DESCRIPTOR self._symbols[desc.full_name] = message if desc.file.name not in self._symbols_by_file: self._symbols_by_file[desc.file.name] = {} self._symbols_by_file[desc.file.name][desc.full_name] = message self.pool.AddDescriptor(desc) return message
[ "def", "RegisterMessage", "(", "self", ",", "message", ")", ":", "desc", "=", "message", ".", "DESCRIPTOR", "self", ".", "_symbols", "[", "desc", ".", "full_name", "]", "=", "message", "if", "desc", ".", "file", ".", "name", "not", "in", "self", ".", "_symbols_by_file", ":", "self", ".", "_symbols_by_file", "[", "desc", ".", "file", ".", "name", "]", "=", "{", "}", "self", ".", "_symbols_by_file", "[", "desc", ".", "file", ".", "name", "]", "[", "desc", ".", "full_name", "]", "=", "message", "self", ".", "pool", ".", "AddDescriptor", "(", "desc", ")", "return", "message" ]
Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message.
[ "Registers", "the", "given", "message", "type", "in", "the", "local", "database", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/symbol_database.py#L82-L98
valid
ibelie/typy
typy/google/protobuf/symbol_database.py
SymbolDatabase.GetMessages
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found. """ result = {} for f in files: result.update(self._symbols_by_file[f]) return result
python
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found. """ result = {} for f in files: result.update(self._symbols_by_file[f]) return result
[ "def", "GetMessages", "(", "self", ",", "files", ")", ":", "result", "=", "{", "}", "for", "f", "in", "files", ":", "result", ".", "update", "(", "self", ".", "_symbols_by_file", "[", "f", "]", ")", "return", "result" ]
Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found.
[ "Gets", "all", "the", "messages", "from", "a", "specified", "file", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/symbol_database.py#L156-L178
valid
manicmaniac/headlessvim
headlessvim/runtimepath.py
RuntimePath.insert
def insert(self, index, value): """ Insert object before index. :param int index: index to insert in :param string value: path to insert """ self._list.insert(index, value) self._sync()
python
def insert(self, index, value): """ Insert object before index. :param int index: index to insert in :param string value: path to insert """ self._list.insert(index, value) self._sync()
[ "def", "insert", "(", "self", ",", "index", ",", "value", ")", ":", "self", ".", "_list", ".", "insert", "(", "index", ",", "value", ")", "self", ".", "_sync", "(", ")" ]
Insert object before index. :param int index: index to insert in :param string value: path to insert
[ "Insert", "object", "before", "index", "." ]
3e4657f95d981ddf21fd285b7e1b9da2154f9cb9
https://github.com/manicmaniac/headlessvim/blob/3e4657f95d981ddf21fd285b7e1b9da2154f9cb9/headlessvim/runtimepath.py#L41-L49
valid
manicmaniac/headlessvim
headlessvim/runtimepath.py
RuntimePath.parse
def parse(self, string): """ Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string """ var, eq, values = string.strip().partition('=') assert var == 'runtimepath' assert eq == '=' return values.split(',')
python
def parse(self, string): """ Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string """ var, eq, values = string.strip().partition('=') assert var == 'runtimepath' assert eq == '=' return values.split(',')
[ "def", "parse", "(", "self", ",", "string", ")", ":", "var", ",", "eq", ",", "values", "=", "string", ".", "strip", "(", ")", ".", "partition", "(", "'='", ")", "assert", "var", "==", "'runtimepath'", "assert", "eq", "==", "'='", "return", "values", ".", "split", "(", "','", ")" ]
Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string
[ "Parse", "runtime", "path", "representation", "to", "list", "." ]
3e4657f95d981ddf21fd285b7e1b9da2154f9cb9
https://github.com/manicmaniac/headlessvim/blob/3e4657f95d981ddf21fd285b7e1b9da2154f9cb9/headlessvim/runtimepath.py#L63-L74
valid
Rikanishu/static-bundle
static_bundle/builders.py
Asset.add_bundle
def add_bundle(self, *args): """ Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup """ for bundle in args: if not self.multitype and self.has_bundles(): first_bundle = self.get_first_bundle() if first_bundle.get_type() != bundle.get_type(): raise Exception( 'Different bundle types for one Asset: %s[%s -> %s]' 'check types or set multitype parameter to True' % (self.name, first_bundle.get_type(), bundle.get_type()) ) self.bundles.append(bundle) return self
python
def add_bundle(self, *args): """ Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup """ for bundle in args: if not self.multitype and self.has_bundles(): first_bundle = self.get_first_bundle() if first_bundle.get_type() != bundle.get_type(): raise Exception( 'Different bundle types for one Asset: %s[%s -> %s]' 'check types or set multitype parameter to True' % (self.name, first_bundle.get_type(), bundle.get_type()) ) self.bundles.append(bundle) return self
[ "def", "add_bundle", "(", "self", ",", "*", "args", ")", ":", "for", "bundle", "in", "args", ":", "if", "not", "self", ".", "multitype", "and", "self", ".", "has_bundles", "(", ")", ":", "first_bundle", "=", "self", ".", "get_first_bundle", "(", ")", "if", "first_bundle", ".", "get_type", "(", ")", "!=", "bundle", ".", "get_type", "(", ")", ":", "raise", "Exception", "(", "'Different bundle types for one Asset: %s[%s -> %s]'", "'check types or set multitype parameter to True'", "%", "(", "self", ".", "name", ",", "first_bundle", ".", "get_type", "(", ")", ",", "bundle", ".", "get_type", "(", ")", ")", ")", "self", ".", "bundles", ".", "append", "(", "bundle", ")", "return", "self" ]
Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup
[ "Add", "some", "bundle", "to", "build", "group" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L44-L61
valid
Rikanishu/static-bundle
static_bundle/builders.py
Asset.collect_files
def collect_files(self): """ Return collected files links :rtype: list[static_bundle.files.StaticFileResult] """ self.files = [] for bundle in self.bundles: bundle.init_build(self, self.builder) bundle_files = bundle.prepare() self.files.extend(bundle_files) return self
python
def collect_files(self): """ Return collected files links :rtype: list[static_bundle.files.StaticFileResult] """ self.files = [] for bundle in self.bundles: bundle.init_build(self, self.builder) bundle_files = bundle.prepare() self.files.extend(bundle_files) return self
[ "def", "collect_files", "(", "self", ")", ":", "self", ".", "files", "=", "[", "]", "for", "bundle", "in", "self", ".", "bundles", ":", "bundle", ".", "init_build", "(", "self", ",", "self", ".", "builder", ")", "bundle_files", "=", "bundle", ".", "prepare", "(", ")", "self", ".", "files", ".", "extend", "(", "bundle_files", ")", "return", "self" ]
Return collected files links :rtype: list[static_bundle.files.StaticFileResult]
[ "Return", "collected", "files", "links" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L63-L74
valid
Rikanishu/static-bundle
static_bundle/builders.py
Asset.get_minifier
def get_minifier(self): """ Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None """ if self.minifier is None: if not self.has_bundles(): raise Exception("Unable to get default minifier, no bundles in build group") minifier = self.get_first_bundle().get_default_minifier() else: minifier = self.minifier if minifier: minifier.init_asset(self) return minifier
python
def get_minifier(self): """ Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None """ if self.minifier is None: if not self.has_bundles(): raise Exception("Unable to get default minifier, no bundles in build group") minifier = self.get_first_bundle().get_default_minifier() else: minifier = self.minifier if minifier: minifier.init_asset(self) return minifier
[ "def", "get_minifier", "(", "self", ")", ":", "if", "self", ".", "minifier", "is", "None", ":", "if", "not", "self", ".", "has_bundles", "(", ")", ":", "raise", "Exception", "(", "\"Unable to get default minifier, no bundles in build group\"", ")", "minifier", "=", "self", ".", "get_first_bundle", "(", ")", ".", "get_default_minifier", "(", ")", "else", ":", "minifier", "=", "self", ".", "minifier", "if", "minifier", ":", "minifier", ".", "init_asset", "(", "self", ")", "return", "minifier" ]
Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None
[ "Asset", "minifier", "Uses", "default", "minifier", "in", "bundle", "if", "it", "s", "not", "defined" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L76-L91
valid
Rikanishu/static-bundle
static_bundle/builders.py
StandardBuilder.create_asset
def create_asset(self, name, **kwargs): """ Create asset :type name: unicode|str :rtype: Asset """ asset = Asset(self, name, **kwargs) self.assets[name] = asset return asset
python
def create_asset(self, name, **kwargs): """ Create asset :type name: unicode|str :rtype: Asset """ asset = Asset(self, name, **kwargs) self.assets[name] = asset return asset
[ "def", "create_asset", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "asset", "=", "Asset", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", "self", ".", "assets", "[", "name", "]", "=", "asset", "return", "asset" ]
Create asset :type name: unicode|str :rtype: Asset
[ "Create", "asset" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L120-L129
valid
Rikanishu/static-bundle
static_bundle/builders.py
StandardBuilder.render_asset
def render_asset(self, name): """ Render all includes in asset by names :type name: str|unicode :rtype: str|unicode """ result = "" if self.has_asset(name): asset = self.get_asset(name) if asset.files: for f in asset.files: result += f.render_include() + "\r\n" return result
python
def render_asset(self, name): """ Render all includes in asset by names :type name: str|unicode :rtype: str|unicode """ result = "" if self.has_asset(name): asset = self.get_asset(name) if asset.files: for f in asset.files: result += f.render_include() + "\r\n" return result
[ "def", "render_asset", "(", "self", ",", "name", ")", ":", "result", "=", "\"\"", "if", "self", ".", "has_asset", "(", "name", ")", ":", "asset", "=", "self", ".", "get_asset", "(", "name", ")", "if", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "result", "+=", "f", ".", "render_include", "(", ")", "+", "\"\\r\\n\"", "return", "result" ]
Render all includes in asset by names :type name: str|unicode :rtype: str|unicode
[ "Render", "all", "includes", "in", "asset", "by", "names" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L157-L170
valid
Rikanishu/static-bundle
static_bundle/builders.py
StandardBuilder.collect_links
def collect_links(self, env=None): """ Return links without build files """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if env is None: env = self.config.env if env == static_bundle.ENV_PRODUCTION: self._minify(emulate=True) self._add_url_prefix()
python
def collect_links(self, env=None): """ Return links without build files """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if env is None: env = self.config.env if env == static_bundle.ENV_PRODUCTION: self._minify(emulate=True) self._add_url_prefix()
[ "def", "collect_links", "(", "self", ",", "env", "=", "None", ")", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "has_bundles", "(", ")", ":", "asset", ".", "collect_files", "(", ")", "if", "env", "is", "None", ":", "env", "=", "self", ".", "config", ".", "env", "if", "env", "==", "static_bundle", ".", "ENV_PRODUCTION", ":", "self", ".", "_minify", "(", "emulate", "=", "True", ")", "self", ".", "_add_url_prefix", "(", ")" ]
Return links without build files
[ "Return", "links", "without", "build", "files" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L178-L189
valid
Rikanishu/static-bundle
static_bundle/builders.py
StandardBuilder.make_build
def make_build(self): """ Move files / make static build """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if not os.path.exists(self.config.output_dir): os.makedirs(self.config.output_dir) if self.config.copy_only_bundles: for asset in self.assets.values(): if not asset.minify and asset.files: for f in asset.files: copy_file(f.abs_path, self._get_output_path(f.abs_path)) else: copy_excludes = {} for asset in self.assets.values(): if asset.minify and asset.files: for f in asset.files: copy_excludes[f.abs_path] = f for root, dirs, files in os.walk(self.config.input_dir): for fpath in files: current_file_path = os.path.join(root, fpath) if current_file_path not in copy_excludes: copy_file(current_file_path, self._get_output_path(current_file_path)) self._minify()
python
def make_build(self): """ Move files / make static build """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if not os.path.exists(self.config.output_dir): os.makedirs(self.config.output_dir) if self.config.copy_only_bundles: for asset in self.assets.values(): if not asset.minify and asset.files: for f in asset.files: copy_file(f.abs_path, self._get_output_path(f.abs_path)) else: copy_excludes = {} for asset in self.assets.values(): if asset.minify and asset.files: for f in asset.files: copy_excludes[f.abs_path] = f for root, dirs, files in os.walk(self.config.input_dir): for fpath in files: current_file_path = os.path.join(root, fpath) if current_file_path not in copy_excludes: copy_file(current_file_path, self._get_output_path(current_file_path)) self._minify()
[ "def", "make_build", "(", "self", ")", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "has_bundles", "(", ")", ":", "asset", ".", "collect_files", "(", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "config", ".", "output_dir", ")", ":", "os", ".", "makedirs", "(", "self", ".", "config", ".", "output_dir", ")", "if", "self", ".", "config", ".", "copy_only_bundles", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "not", "asset", ".", "minify", "and", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "copy_file", "(", "f", ".", "abs_path", ",", "self", ".", "_get_output_path", "(", "f", ".", "abs_path", ")", ")", "else", ":", "copy_excludes", "=", "{", "}", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "minify", "and", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "copy_excludes", "[", "f", ".", "abs_path", "]", "=", "f", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "config", ".", "input_dir", ")", ":", "for", "fpath", "in", "files", ":", "current_file_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fpath", ")", "if", "current_file_path", "not", "in", "copy_excludes", ":", "copy_file", "(", "current_file_path", ",", "self", ".", "_get_output_path", "(", "current_file_path", ")", ")", "self", ".", "_minify", "(", ")" ]
Move files / make static build
[ "Move", "files", "/", "make", "static", "build" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L191-L216
valid
Rikanishu/static-bundle
static_bundle/builders.py
StandardBuilder.clear
def clear(self, exclude=None): """ Clear build output dir :type exclude: list|None """ exclude = exclude or [] for root, dirs, files in os.walk(self.config.output_dir): for f in files: if f not in exclude: os.unlink(os.path.join(root, f)) for d in dirs: if d not in exclude: shutil.rmtree(os.path.join(root, d))
python
def clear(self, exclude=None): """ Clear build output dir :type exclude: list|None """ exclude = exclude or [] for root, dirs, files in os.walk(self.config.output_dir): for f in files: if f not in exclude: os.unlink(os.path.join(root, f)) for d in dirs: if d not in exclude: shutil.rmtree(os.path.join(root, d))
[ "def", "clear", "(", "self", ",", "exclude", "=", "None", ")", ":", "exclude", "=", "exclude", "or", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "config", ".", "output_dir", ")", ":", "for", "f", "in", "files", ":", "if", "f", "not", "in", "exclude", ":", "os", ".", "unlink", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", "for", "d", "in", "dirs", ":", "if", "d", "not", "in", "exclude", ":", "shutil", ".", "rmtree", "(", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", ")" ]
Clear build output dir :type exclude: list|None
[ "Clear", "build", "output", "dir", ":", "type", "exclude", ":", "list|None" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L218-L230
valid
zeeto/pyzlog
pyzlog/__init__.py
_default_json_default
def _default_json_default(obj): """ Coerce everything to strings. All objects representing time get output according to default_date_fmt. """ if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)): return obj.strftime(default_date_fmt) else: return str(obj)
python
def _default_json_default(obj): """ Coerce everything to strings. All objects representing time get output according to default_date_fmt. """ if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)): return obj.strftime(default_date_fmt) else: return str(obj)
[ "def", "_default_json_default", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ",", "datetime", ".", "time", ")", ")", ":", "return", "obj", ".", "strftime", "(", "default_date_fmt", ")", "else", ":", "return", "str", "(", "obj", ")" ]
Coerce everything to strings. All objects representing time get output according to default_date_fmt.
[ "Coerce", "everything", "to", "strings", ".", "All", "objects", "representing", "time", "get", "output", "according", "to", "default_date_fmt", "." ]
c26d680bec04f9edd57ed5be733cae43ec828107
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L48-L55
valid
zeeto/pyzlog
pyzlog/__init__.py
init_logs
def init_logs(path=None, target=None, logger_name='root', level=logging.DEBUG, maxBytes=1*1024*1024, backupCount=5, application_name='default', server_hostname=None, fields=None): """Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict """ log_file = os.path.abspath( os.path.join(path, target)) logger = logging.getLogger(logger_name) logger.setLevel(level) handler = logging.handlers.RotatingFileHandler( log_file, maxBytes=maxBytes, backupCount=backupCount) handler.setLevel(level) handler.setFormatter( JsonFormatter( application_name=application_name, server_hostname=server_hostname, fields=fields)) logger.addHandler(handler)
python
def init_logs(path=None, target=None, logger_name='root', level=logging.DEBUG, maxBytes=1*1024*1024, backupCount=5, application_name='default', server_hostname=None, fields=None): """Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict """ log_file = os.path.abspath( os.path.join(path, target)) logger = logging.getLogger(logger_name) logger.setLevel(level) handler = logging.handlers.RotatingFileHandler( log_file, maxBytes=maxBytes, backupCount=backupCount) handler.setLevel(level) handler.setFormatter( JsonFormatter( application_name=application_name, server_hostname=server_hostname, fields=fields)) logger.addHandler(handler)
[ "def", "init_logs", "(", "path", "=", "None", ",", "target", "=", "None", ",", "logger_name", "=", "'root'", ",", "level", "=", "logging", ".", "DEBUG", ",", "maxBytes", "=", "1", "*", "1024", "*", "1024", ",", "backupCount", "=", "5", ",", "application_name", "=", "'default'", ",", "server_hostname", "=", "None", ",", "fields", "=", "None", ")", ":", "log_file", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "target", ")", ")", "logger", "=", "logging", ".", "getLogger", "(", "logger_name", ")", "logger", ".", "setLevel", "(", "level", ")", "handler", "=", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "log_file", ",", "maxBytes", "=", "maxBytes", ",", "backupCount", "=", "backupCount", ")", "handler", ".", "setLevel", "(", "level", ")", "handler", ".", "setFormatter", "(", "JsonFormatter", "(", "application_name", "=", "application_name", ",", "server_hostname", "=", "server_hostname", ",", "fields", "=", "fields", ")", ")", "logger", ".", "addHandler", "(", "handler", ")" ]
Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict
[ "Initialize", "the", "zlogger", "." ]
c26d680bec04f9edd57ed5be733cae43ec828107
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L225-L273
valid
zeeto/pyzlog
pyzlog/__init__.py
JsonFormatter.format
def format(self, record): """formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string """ record_fields = record.__dict__.copy() self._set_exc_info(record_fields) event_name = 'default' if record_fields.get('event_name'): event_name = record_fields.pop('event_name') log_level = 'INFO' if record_fields.get('log_level'): log_level = record_fields.pop('log_level') [record_fields.pop(k) for k in record_fields.keys() if k not in self.fields] defaults = self.defaults.copy() fields = self.fields.copy() fields.update(record_fields) filtered_fields = {} for k, v in fields.iteritems(): if v is not None: filtered_fields[k] = v defaults.update({ 'event_timestamp': self._get_now(), 'event_name': event_name, 'log_level': log_level, 'fields': filtered_fields}) return json.dumps(defaults, default=self.json_default)
python
def format(self, record): """formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string """ record_fields = record.__dict__.copy() self._set_exc_info(record_fields) event_name = 'default' if record_fields.get('event_name'): event_name = record_fields.pop('event_name') log_level = 'INFO' if record_fields.get('log_level'): log_level = record_fields.pop('log_level') [record_fields.pop(k) for k in record_fields.keys() if k not in self.fields] defaults = self.defaults.copy() fields = self.fields.copy() fields.update(record_fields) filtered_fields = {} for k, v in fields.iteritems(): if v is not None: filtered_fields[k] = v defaults.update({ 'event_timestamp': self._get_now(), 'event_name': event_name, 'log_level': log_level, 'fields': filtered_fields}) return json.dumps(defaults, default=self.json_default)
[ "def", "format", "(", "self", ",", "record", ")", ":", "record_fields", "=", "record", ".", "__dict__", ".", "copy", "(", ")", "self", ".", "_set_exc_info", "(", "record_fields", ")", "event_name", "=", "'default'", "if", "record_fields", ".", "get", "(", "'event_name'", ")", ":", "event_name", "=", "record_fields", ".", "pop", "(", "'event_name'", ")", "log_level", "=", "'INFO'", "if", "record_fields", ".", "get", "(", "'log_level'", ")", ":", "log_level", "=", "record_fields", ".", "pop", "(", "'log_level'", ")", "[", "record_fields", ".", "pop", "(", "k", ")", "for", "k", "in", "record_fields", ".", "keys", "(", ")", "if", "k", "not", "in", "self", ".", "fields", "]", "defaults", "=", "self", ".", "defaults", ".", "copy", "(", ")", "fields", "=", "self", ".", "fields", ".", "copy", "(", ")", "fields", ".", "update", "(", "record_fields", ")", "filtered_fields", "=", "{", "}", "for", "k", ",", "v", "in", "fields", ".", "iteritems", "(", ")", ":", "if", "v", "is", "not", "None", ":", "filtered_fields", "[", "k", "]", "=", "v", "defaults", ".", "update", "(", "{", "'event_timestamp'", ":", "self", ".", "_get_now", "(", ")", ",", "'event_name'", ":", "event_name", ",", "'log_level'", ":", "log_level", ",", "'fields'", ":", "filtered_fields", "}", ")", "return", "json", ".", "dumps", "(", "defaults", ",", "default", "=", "self", ".", "json_default", ")" ]
formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string
[ "formats", "a", "logging", ".", "Record", "into", "a", "standard", "json", "log", "entry" ]
c26d680bec04f9edd57ed5be733cae43ec828107
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L173-L210
valid
suryakencana007/baka_model
baka_model/__init__.py
includeme
def includeme(config): """ Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``. """ settings = config.get_settings() should_create = asbool(settings.get('baka_model.should_create_all', False)) should_drop = asbool(settings.get('baka_model.should_drop_all', False)) # Configure the transaction manager to support retrying retryable # exceptions. We also register the session factory with the thread-local # transaction manager, so that all sessions it creates are registered. # "tm.attempts": 3, config.add_settings({ "retry.attempts": 3, "tm.activate_hook": tm_activate_hook, "tm.annotate_user": False, }) # use pyramid_retry couse pyramid_tm disabled it config.include('pyramid_retry') # use pyramid_tm to hook the transaction lifecycle to the request config.include('pyramid_tm') engine = get_engine(settings) session_factory = get_session_factory(engine) config.registry['db_session_factory'] = session_factory # make request.db available for use in Pyramid config.add_request_method( # r.tm is the transaction manager used by pyramid_tm lambda r: get_tm_session(session_factory, r.tm), 'db', reify=True ) # service model factory config.include('.service') # Register a deferred action to bind the engine when the configuration is # committed. Deferring the action means that this module can be included # before model modules without ill effect. config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop }, order=10)
python
def includeme(config): """ Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``. """ settings = config.get_settings() should_create = asbool(settings.get('baka_model.should_create_all', False)) should_drop = asbool(settings.get('baka_model.should_drop_all', False)) # Configure the transaction manager to support retrying retryable # exceptions. We also register the session factory with the thread-local # transaction manager, so that all sessions it creates are registered. # "tm.attempts": 3, config.add_settings({ "retry.attempts": 3, "tm.activate_hook": tm_activate_hook, "tm.annotate_user": False, }) # use pyramid_retry couse pyramid_tm disabled it config.include('pyramid_retry') # use pyramid_tm to hook the transaction lifecycle to the request config.include('pyramid_tm') engine = get_engine(settings) session_factory = get_session_factory(engine) config.registry['db_session_factory'] = session_factory # make request.db available for use in Pyramid config.add_request_method( # r.tm is the transaction manager used by pyramid_tm lambda r: get_tm_session(session_factory, r.tm), 'db', reify=True ) # service model factory config.include('.service') # Register a deferred action to bind the engine when the configuration is # committed. Deferring the action means that this module can be included # before model modules without ill effect. config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop }, order=10)
[ "def", "includeme", "(", "config", ")", ":", "settings", "=", "config", ".", "get_settings", "(", ")", "should_create", "=", "asbool", "(", "settings", ".", "get", "(", "'baka_model.should_create_all'", ",", "False", ")", ")", "should_drop", "=", "asbool", "(", "settings", ".", "get", "(", "'baka_model.should_drop_all'", ",", "False", ")", ")", "# Configure the transaction manager to support retrying retryable", "# exceptions. We also register the session factory with the thread-local", "# transaction manager, so that all sessions it creates are registered.", "# \"tm.attempts\": 3,", "config", ".", "add_settings", "(", "{", "\"retry.attempts\"", ":", "3", ",", "\"tm.activate_hook\"", ":", "tm_activate_hook", ",", "\"tm.annotate_user\"", ":", "False", ",", "}", ")", "# use pyramid_retry couse pyramid_tm disabled it", "config", ".", "include", "(", "'pyramid_retry'", ")", "# use pyramid_tm to hook the transaction lifecycle to the request", "config", ".", "include", "(", "'pyramid_tm'", ")", "engine", "=", "get_engine", "(", "settings", ")", "session_factory", "=", "get_session_factory", "(", "engine", ")", "config", ".", "registry", "[", "'db_session_factory'", "]", "=", "session_factory", "# make request.db available for use in Pyramid", "config", ".", "add_request_method", "(", "# r.tm is the transaction manager used by pyramid_tm", "lambda", "r", ":", "get_tm_session", "(", "session_factory", ",", "r", ".", "tm", ")", ",", "'db'", ",", "reify", "=", "True", ")", "# service model factory", "config", ".", "include", "(", "'.service'", ")", "# Register a deferred action to bind the engine when the configuration is", "# committed. Deferring the action means that this module can be included", "# before model modules without ill effect.", "config", ".", "action", "(", "None", ",", "bind_engine", ",", "(", "engine", ",", ")", ",", "{", "'should_create'", ":", "should_create", ",", "'should_drop'", ":", "should_drop", "}", ",", "order", "=", "10", ")" ]
Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``.
[ "Initialize", "the", "model", "for", "a", "Pyramid", "app", "." ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/__init__.py#L38-L86
valid
charlesthomas/proauth2
proauth2/data_stores/mongo_ds.py
DataStore.store
def store( self, collection, **kwargs ): ''' validate the passed values in kwargs based on the collection, store them in the mongodb collection ''' key = validate( collection, **kwargs ) if self.fetch( collection, **{ key : kwargs[key] } ): raise Proauth2Error( 'duplicate_key' ) self.db[collection].insert( kwargs )
python
def store( self, collection, **kwargs ): ''' validate the passed values in kwargs based on the collection, store them in the mongodb collection ''' key = validate( collection, **kwargs ) if self.fetch( collection, **{ key : kwargs[key] } ): raise Proauth2Error( 'duplicate_key' ) self.db[collection].insert( kwargs )
[ "def", "store", "(", "self", ",", "collection", ",", "*", "*", "kwargs", ")", ":", "key", "=", "validate", "(", "collection", ",", "*", "*", "kwargs", ")", "if", "self", ".", "fetch", "(", "collection", ",", "*", "*", "{", "key", ":", "kwargs", "[", "key", "]", "}", ")", ":", "raise", "Proauth2Error", "(", "'duplicate_key'", ")", "self", ".", "db", "[", "collection", "]", ".", "insert", "(", "kwargs", ")" ]
validate the passed values in kwargs based on the collection, store them in the mongodb collection
[ "validate", "the", "passed", "values", "in", "kwargs", "based", "on", "the", "collection", "store", "them", "in", "the", "mongodb", "collection" ]
f88c8df966a1802414047ed304d02df1dd520097
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/mongo_ds.py#L44-L52
valid
Rikanishu/static-bundle
static_bundle/paths.py
AbstractPath.get_abs_and_rel_paths
def get_abs_and_rel_paths(self, root_path, file_name, input_dir): """ Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple """ # todo: change relative path resolving [bug on duplicate dir names in path] relative_dir = root_path.replace(input_dir, '') return os.path.join(root_path, file_name), relative_dir + '/' + file_name
python
def get_abs_and_rel_paths(self, root_path, file_name, input_dir): """ Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple """ # todo: change relative path resolving [bug on duplicate dir names in path] relative_dir = root_path.replace(input_dir, '') return os.path.join(root_path, file_name), relative_dir + '/' + file_name
[ "def", "get_abs_and_rel_paths", "(", "self", ",", "root_path", ",", "file_name", ",", "input_dir", ")", ":", "# todo: change relative path resolving [bug on duplicate dir names in path]", "relative_dir", "=", "root_path", ".", "replace", "(", "input_dir", ",", "''", ")", "return", "os", ".", "path", ".", "join", "(", "root_path", ",", "file_name", ")", ",", "relative_dir", "+", "'/'", "+", "file_name" ]
Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple
[ "Return", "absolute", "and", "relative", "path", "for", "file" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L23-L35
valid
Rikanishu/static-bundle
static_bundle/paths.py
FilePath.get_files
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch file name with empty bundle' abs_path, rel_path = self.get_abs_and_rel_paths(self.bundle.path, self.file_path, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() return [file_cls(rel_path, abs_path)]
python
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch file name with empty bundle' abs_path, rel_path = self.get_abs_and_rel_paths(self.bundle.path, self.file_path, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() return [file_cls(rel_path, abs_path)]
[ "def", "get_files", "(", "self", ")", ":", "assert", "self", ".", "bundle", ",", "'Cannot fetch file name with empty bundle'", "abs_path", ",", "rel_path", "=", "self", ".", "get_abs_and_rel_paths", "(", "self", ".", "bundle", ".", "path", ",", "self", ".", "file_path", ",", "self", ".", "bundle", ".", "input_dir", ")", "file_cls", "=", "self", ".", "bundle", ".", "get_file_cls", "(", ")", "return", "[", "file_cls", "(", "rel_path", ",", "abs_path", ")", "]" ]
:inheritdoc
[ ":", "inheritdoc" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L51-L58
valid
Rikanishu/static-bundle
static_bundle/paths.py
DirectoryPath.get_files
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch directory name with empty bundle' result_files = [] bundle_ext = self.bundle.get_extension() ext = "." + bundle_ext if bundle_ext else None if self.directory_path == "": root_path = self.bundle.path else: root_path = os.path.join(self.bundle.path, self.directory_path) for root, dirs, files in os.walk(root_path): for fpath in files: if (not ext or fpath.endswith(ext)) and (not self.exclusions or all(fpath != n for n in self.exclusions)): abs_path, rel_path = self.get_abs_and_rel_paths(root, fpath, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() result_files.append(file_cls(rel_path, abs_path)) return result_files
python
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch directory name with empty bundle' result_files = [] bundle_ext = self.bundle.get_extension() ext = "." + bundle_ext if bundle_ext else None if self.directory_path == "": root_path = self.bundle.path else: root_path = os.path.join(self.bundle.path, self.directory_path) for root, dirs, files in os.walk(root_path): for fpath in files: if (not ext or fpath.endswith(ext)) and (not self.exclusions or all(fpath != n for n in self.exclusions)): abs_path, rel_path = self.get_abs_and_rel_paths(root, fpath, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() result_files.append(file_cls(rel_path, abs_path)) return result_files
[ "def", "get_files", "(", "self", ")", ":", "assert", "self", ".", "bundle", ",", "'Cannot fetch directory name with empty bundle'", "result_files", "=", "[", "]", "bundle_ext", "=", "self", ".", "bundle", ".", "get_extension", "(", ")", "ext", "=", "\".\"", "+", "bundle_ext", "if", "bundle_ext", "else", "None", "if", "self", ".", "directory_path", "==", "\"\"", ":", "root_path", "=", "self", ".", "bundle", ".", "path", "else", ":", "root_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "bundle", ".", "path", ",", "self", ".", "directory_path", ")", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "root_path", ")", ":", "for", "fpath", "in", "files", ":", "if", "(", "not", "ext", "or", "fpath", ".", "endswith", "(", "ext", ")", ")", "and", "(", "not", "self", ".", "exclusions", "or", "all", "(", "fpath", "!=", "n", "for", "n", "in", "self", ".", "exclusions", ")", ")", ":", "abs_path", ",", "rel_path", "=", "self", ".", "get_abs_and_rel_paths", "(", "root", ",", "fpath", ",", "self", ".", "bundle", ".", "input_dir", ")", "file_cls", "=", "self", ".", "bundle", ".", "get_file_cls", "(", ")", "result_files", ".", "append", "(", "file_cls", "(", "rel_path", ",", "abs_path", ")", ")", "return", "result_files" ]
:inheritdoc
[ ":", "inheritdoc" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L73-L91
valid
zvoase/django-relax
relax/couchdb/replicate.py
replicate_existing
def replicate_existing(source_db, target_db): """Replicate an existing database to another existing database.""" # Get the server from which to manage the replication. server = shortcuts.get_server() logger = logging.getLogger('relax.couchdb.replicate') logger.debug('POST ' + urlparse.urljoin(server.resource.uri, '/_replicate')) source, target = specifier_to_db(source_db), specifier_to_db(target_db) logger.debug('Source DB: %s' % (source,)) logger.debug('Target DB: %s' % (target,)) try: resp_headers, resp_body = server.resource.post(path='/_replicate', content=json.dumps({'source': source, 'target': target})) except couchdb.client.ServerError, exc: logger.error('Replication failed.') raise ReplicationError(exc.args) result = resp_body['history'][0] if resp_body['ok']: logger.info('Replication %s... successful!' % ( resp_body['session_id'][:6],)) logger.info('Replication started: ' + result['start_time']) logger.info('Replication finished: ' + result['end_time']) result['start_time'] = datetime.datetime.strptime(result['start_time'], '%a, %d %b %Y %H:%M:%S GMT') result['end_time'] = datetime.datetime.strptime(result['end_time'], '%a, %d %b %Y %H:%M:%S GMT') timedelta = result['end_time'] - result['start_time'] if timedelta.days: logger.info('Replication took %d days and %.2f seconds.' % ( timedelta.days, timedelta.seconds + (timedelta.microseconds * (1e-6)))) else: logger.info('Replication took %.2f seconds.' % ( timedelta.seconds + (timedelta.microseconds * (1e-6)))) # Prepare the 'result' dictionary. result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] # Info-log the number of docs read/written and checked/found. if result['docs_read'] == 1: docs_read = '1 document read' else: docs_read = '%d documents read' % (result['docs_read'],) if result['docs_written'] == 1: docs_written = '1 document written' else: docs_written = '%d documents written' % (result['docs_written'],) if result['missing_checked'] == 1: missing_checked = 'Checked for 1 missing document, found %d.' % ( result['missing_found'],) else: missing_checked = 'Checked for %d missing documents, found %d.' % ( result['missing_checked'], result['missing_found'],) logging.info('%s, %s' % (docs_read, docs_written)) logging.info(missing_checked) return result else: logger.error('Replication %s... failed.' % ( resp_body['session_id'][:6],)) result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] raise ReplicationFailure(resp_headers, result)
python
def replicate_existing(source_db, target_db): """Replicate an existing database to another existing database.""" # Get the server from which to manage the replication. server = shortcuts.get_server() logger = logging.getLogger('relax.couchdb.replicate') logger.debug('POST ' + urlparse.urljoin(server.resource.uri, '/_replicate')) source, target = specifier_to_db(source_db), specifier_to_db(target_db) logger.debug('Source DB: %s' % (source,)) logger.debug('Target DB: %s' % (target,)) try: resp_headers, resp_body = server.resource.post(path='/_replicate', content=json.dumps({'source': source, 'target': target})) except couchdb.client.ServerError, exc: logger.error('Replication failed.') raise ReplicationError(exc.args) result = resp_body['history'][0] if resp_body['ok']: logger.info('Replication %s... successful!' % ( resp_body['session_id'][:6],)) logger.info('Replication started: ' + result['start_time']) logger.info('Replication finished: ' + result['end_time']) result['start_time'] = datetime.datetime.strptime(result['start_time'], '%a, %d %b %Y %H:%M:%S GMT') result['end_time'] = datetime.datetime.strptime(result['end_time'], '%a, %d %b %Y %H:%M:%S GMT') timedelta = result['end_time'] - result['start_time'] if timedelta.days: logger.info('Replication took %d days and %.2f seconds.' % ( timedelta.days, timedelta.seconds + (timedelta.microseconds * (1e-6)))) else: logger.info('Replication took %.2f seconds.' % ( timedelta.seconds + (timedelta.microseconds * (1e-6)))) # Prepare the 'result' dictionary. result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] # Info-log the number of docs read/written and checked/found. if result['docs_read'] == 1: docs_read = '1 document read' else: docs_read = '%d documents read' % (result['docs_read'],) if result['docs_written'] == 1: docs_written = '1 document written' else: docs_written = '%d documents written' % (result['docs_written'],) if result['missing_checked'] == 1: missing_checked = 'Checked for 1 missing document, found %d.' % ( result['missing_found'],) else: missing_checked = 'Checked for %d missing documents, found %d.' % ( result['missing_checked'], result['missing_found'],) logging.info('%s, %s' % (docs_read, docs_written)) logging.info(missing_checked) return result else: logger.error('Replication %s... failed.' % ( resp_body['session_id'][:6],)) result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] raise ReplicationFailure(resp_headers, result)
[ "def", "replicate_existing", "(", "source_db", ",", "target_db", ")", ":", "# Get the server from which to manage the replication.", "server", "=", "shortcuts", ".", "get_server", "(", ")", "logger", "=", "logging", ".", "getLogger", "(", "'relax.couchdb.replicate'", ")", "logger", ".", "debug", "(", "'POST '", "+", "urlparse", ".", "urljoin", "(", "server", ".", "resource", ".", "uri", ",", "'/_replicate'", ")", ")", "source", ",", "target", "=", "specifier_to_db", "(", "source_db", ")", ",", "specifier_to_db", "(", "target_db", ")", "logger", ".", "debug", "(", "'Source DB: %s'", "%", "(", "source", ",", ")", ")", "logger", ".", "debug", "(", "'Target DB: %s'", "%", "(", "target", ",", ")", ")", "try", ":", "resp_headers", ",", "resp_body", "=", "server", ".", "resource", ".", "post", "(", "path", "=", "'/_replicate'", ",", "content", "=", "json", ".", "dumps", "(", "{", "'source'", ":", "source", ",", "'target'", ":", "target", "}", ")", ")", "except", "couchdb", ".", "client", ".", "ServerError", ",", "exc", ":", "logger", ".", "error", "(", "'Replication failed.'", ")", "raise", "ReplicationError", "(", "exc", ".", "args", ")", "result", "=", "resp_body", "[", "'history'", "]", "[", "0", "]", "if", "resp_body", "[", "'ok'", "]", ":", "logger", ".", "info", "(", "'Replication %s... successful!'", "%", "(", "resp_body", "[", "'session_id'", "]", "[", ":", "6", "]", ",", ")", ")", "logger", ".", "info", "(", "'Replication started: '", "+", "result", "[", "'start_time'", "]", ")", "logger", ".", "info", "(", "'Replication finished: '", "+", "result", "[", "'end_time'", "]", ")", "result", "[", "'start_time'", "]", "=", "datetime", ".", "datetime", ".", "strptime", "(", "result", "[", "'start_time'", "]", ",", "'%a, %d %b %Y %H:%M:%S GMT'", ")", "result", "[", "'end_time'", "]", "=", "datetime", ".", "datetime", ".", "strptime", "(", "result", "[", "'end_time'", "]", ",", "'%a, %d %b %Y %H:%M:%S GMT'", ")", "timedelta", "=", "result", "[", "'end_time'", "]", "-", "result", "[", "'start_time'", "]", "if", "timedelta", ".", "days", ":", "logger", ".", "info", "(", "'Replication took %d days and %.2f seconds.'", "%", "(", "timedelta", ".", "days", ",", "timedelta", ".", "seconds", "+", "(", "timedelta", ".", "microseconds", "*", "(", "1e-6", ")", ")", ")", ")", "else", ":", "logger", ".", "info", "(", "'Replication took %.2f seconds.'", "%", "(", "timedelta", ".", "seconds", "+", "(", "timedelta", ".", "microseconds", "*", "(", "1e-6", ")", ")", ")", ")", "# Prepare the 'result' dictionary.", "result", "[", "'ok'", "]", "=", "resp_body", "[", "'ok'", "]", "result", "[", "'session_id'", "]", "=", "resp_body", "[", "'session_id'", "]", "result", "[", "'source_last_seq'", "]", "=", "resp_body", "[", "'source_last_seq'", "]", "# Info-log the number of docs read/written and checked/found.", "if", "result", "[", "'docs_read'", "]", "==", "1", ":", "docs_read", "=", "'1 document read'", "else", ":", "docs_read", "=", "'%d documents read'", "%", "(", "result", "[", "'docs_read'", "]", ",", ")", "if", "result", "[", "'docs_written'", "]", "==", "1", ":", "docs_written", "=", "'1 document written'", "else", ":", "docs_written", "=", "'%d documents written'", "%", "(", "result", "[", "'docs_written'", "]", ",", ")", "if", "result", "[", "'missing_checked'", "]", "==", "1", ":", "missing_checked", "=", "'Checked for 1 missing document, found %d.'", "%", "(", "result", "[", "'missing_found'", "]", ",", ")", "else", ":", "missing_checked", "=", "'Checked for %d missing documents, found %d.'", "%", "(", "result", "[", "'missing_checked'", "]", ",", "result", "[", "'missing_found'", "]", ",", ")", "logging", ".", "info", "(", "'%s, %s'", "%", "(", "docs_read", ",", "docs_written", ")", ")", "logging", ".", "info", "(", "missing_checked", ")", "return", "result", "else", ":", "logger", ".", "error", "(", "'Replication %s... failed.'", "%", "(", "resp_body", "[", "'session_id'", "]", "[", ":", "6", "]", ",", ")", ")", "result", "[", "'ok'", "]", "=", "resp_body", "[", "'ok'", "]", "result", "[", "'session_id'", "]", "=", "resp_body", "[", "'session_id'", "]", "result", "[", "'source_last_seq'", "]", "=", "resp_body", "[", "'source_last_seq'", "]", "raise", "ReplicationFailure", "(", "resp_headers", ",", "result", ")" ]
Replicate an existing database to another existing database.
[ "Replicate", "an", "existing", "database", "to", "another", "existing", "database", "." ]
10bb37bf3a512b290816856a6877c17fa37e930f
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/replicate.py#L31-L92
valid
JohannesBuchner/jbopt
jbopt/mcmc.py
mcmc_advance
def mcmc_advance(start, stdevs, logp, nsteps = 1e300, adapt=True, callback=None): """ Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges). """ import scipy from numpy import log import progressbar prob = logp(start) chain = [start] accepts = [True] probs = [prob] assert not numpy.isnan(start).any() assert not numpy.isnan(stdevs).any() i = 0 widgets=['AR', progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=nsteps).start() prev = start prev_prob = prob print 'MCMC: start at prob', prob stepchange = 0.1 while len(chain) < nsteps: i = i + 1 next = scipy.random.normal(prev, stdevs) next[next > 1] = 1 next[next < 0] = 0 next_prob = logp(next) assert not numpy.isnan(next).any() assert not numpy.isnan(next_prob).any() delta = next_prob - prev_prob dice = log(scipy.random.uniform(0, 1)) accept = delta > dice if accept: prev = next prev_prob = next_prob if adapt: stdevs *= (1 + stepchange) else: if adapt: stdevs *= (1 + stepchange)**(-0.4) # aiming for 40% acceptance if callback: callback(prev_prob, prev, accept) chain.append(prev) accepts.append(accept) probs.append(prev_prob) if adapt: stepchange = min(0.1, 10. / i) #print 'STDEV', stdevs[:5], stepchange # compute stats widgets[0] = 'AR: %.03f' % numpy.mean(numpy.array(accepts[len(accepts)/3:])+0) pbar.update(pbar.currval + 1) pbar.finish() return chain, probs, accepts, stdevs
python
def mcmc_advance(start, stdevs, logp, nsteps = 1e300, adapt=True, callback=None): """ Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges). """ import scipy from numpy import log import progressbar prob = logp(start) chain = [start] accepts = [True] probs = [prob] assert not numpy.isnan(start).any() assert not numpy.isnan(stdevs).any() i = 0 widgets=['AR', progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=nsteps).start() prev = start prev_prob = prob print 'MCMC: start at prob', prob stepchange = 0.1 while len(chain) < nsteps: i = i + 1 next = scipy.random.normal(prev, stdevs) next[next > 1] = 1 next[next < 0] = 0 next_prob = logp(next) assert not numpy.isnan(next).any() assert not numpy.isnan(next_prob).any() delta = next_prob - prev_prob dice = log(scipy.random.uniform(0, 1)) accept = delta > dice if accept: prev = next prev_prob = next_prob if adapt: stdevs *= (1 + stepchange) else: if adapt: stdevs *= (1 + stepchange)**(-0.4) # aiming for 40% acceptance if callback: callback(prev_prob, prev, accept) chain.append(prev) accepts.append(accept) probs.append(prev_prob) if adapt: stepchange = min(0.1, 10. / i) #print 'STDEV', stdevs[:5], stepchange # compute stats widgets[0] = 'AR: %.03f' % numpy.mean(numpy.array(accepts[len(accepts)/3:])+0) pbar.update(pbar.currval + 1) pbar.finish() return chain, probs, accepts, stdevs
[ "def", "mcmc_advance", "(", "start", ",", "stdevs", ",", "logp", ",", "nsteps", "=", "1e300", ",", "adapt", "=", "True", ",", "callback", "=", "None", ")", ":", "import", "scipy", "from", "numpy", "import", "log", "import", "progressbar", "prob", "=", "logp", "(", "start", ")", "chain", "=", "[", "start", "]", "accepts", "=", "[", "True", "]", "probs", "=", "[", "prob", "]", "assert", "not", "numpy", ".", "isnan", "(", "start", ")", ".", "any", "(", ")", "assert", "not", "numpy", ".", "isnan", "(", "stdevs", ")", ".", "any", "(", ")", "i", "=", "0", "widgets", "=", "[", "'AR'", ",", "progressbar", ".", "Percentage", "(", ")", ",", "progressbar", ".", "Counter", "(", "'%5d'", ")", ",", "progressbar", ".", "Bar", "(", ")", ",", "progressbar", ".", "ETA", "(", ")", "]", "pbar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "widgets", ",", "maxval", "=", "nsteps", ")", ".", "start", "(", ")", "prev", "=", "start", "prev_prob", "=", "prob", "print", "'MCMC: start at prob'", ",", "prob", "stepchange", "=", "0.1", "while", "len", "(", "chain", ")", "<", "nsteps", ":", "i", "=", "i", "+", "1", "next", "=", "scipy", ".", "random", ".", "normal", "(", "prev", ",", "stdevs", ")", "next", "[", "next", ">", "1", "]", "=", "1", "next", "[", "next", "<", "0", "]", "=", "0", "next_prob", "=", "logp", "(", "next", ")", "assert", "not", "numpy", ".", "isnan", "(", "next", ")", ".", "any", "(", ")", "assert", "not", "numpy", ".", "isnan", "(", "next_prob", ")", ".", "any", "(", ")", "delta", "=", "next_prob", "-", "prev_prob", "dice", "=", "log", "(", "scipy", ".", "random", ".", "uniform", "(", "0", ",", "1", ")", ")", "accept", "=", "delta", ">", "dice", "if", "accept", ":", "prev", "=", "next", "prev_prob", "=", "next_prob", "if", "adapt", ":", "stdevs", "*=", "(", "1", "+", "stepchange", ")", "else", ":", "if", "adapt", ":", "stdevs", "*=", "(", "1", "+", "stepchange", ")", "**", "(", "-", "0.4", ")", "# aiming for 40% acceptance", "if", "callback", ":", "callback", "(", "prev_prob", ",", "prev", ",", "accept", ")", "chain", ".", "append", "(", "prev", ")", "accepts", ".", "append", "(", "accept", ")", "probs", ".", "append", "(", "prev_prob", ")", "if", "adapt", ":", "stepchange", "=", "min", "(", "0.1", ",", "10.", "/", "i", ")", "#print 'STDEV', stdevs[:5], stepchange", "# compute stats", "widgets", "[", "0", "]", "=", "'AR: %.03f'", "%", "numpy", ".", "mean", "(", "numpy", ".", "array", "(", "accepts", "[", "len", "(", "accepts", ")", "/", "3", ":", "]", ")", "+", "0", ")", "pbar", ".", "update", "(", "pbar", ".", "currval", "+", "1", ")", "pbar", ".", "finish", "(", ")", "return", "chain", ",", "probs", ",", "accepts", ",", "stdevs" ]
Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges).
[ "Generic", "Metropolis", "MCMC", ".", "Advances", "the", "chain", "by", "nsteps", ".", "Called", "by", ":", "func", ":", "mcmc", ":", "param", "adapt", ":", "enables", "adaptive", "stepwidth", "alteration", "(", "converges", ")", "." ]
11b721ea001625ad7820f71ff684723c71216646
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L6-L63
valid
JohannesBuchner/jbopt
jbopt/mcmc.py
mcmc
def mcmc(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, stdevs=0.1, start = 0.5, **problem): """ **Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with """ if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) start = start + numpy.zeros(n_params) stdevs = stdevs + numpy.zeros(n_params) def compute_stepwidths(chain): return numpy.std(chain, axis=0) / 3 import matplotlib.pyplot as plt plt.figure(figsize=(7, 7)) steps = numpy.array([0.1]*(n_params)) print 'burn-in (1/2)...' chain, prob, _, steps_ = mcmc_advance(start, steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'burn-in (2/2)...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'recording chain ...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nsteps) chain = numpy.array(chain) i = numpy.argmax(prob) final = chain[-1] print 'postprocessing...' chain = numpy.array([transform(params) for params in chain]) return dict(start=chain[-1], maximum=chain[i], seeds=[final, chain[i]], chain=chain, method='Metropolis MCMC')
python
def mcmc(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, stdevs=0.1, start = 0.5, **problem): """ **Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with """ if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) start = start + numpy.zeros(n_params) stdevs = stdevs + numpy.zeros(n_params) def compute_stepwidths(chain): return numpy.std(chain, axis=0) / 3 import matplotlib.pyplot as plt plt.figure(figsize=(7, 7)) steps = numpy.array([0.1]*(n_params)) print 'burn-in (1/2)...' chain, prob, _, steps_ = mcmc_advance(start, steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'burn-in (2/2)...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'recording chain ...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nsteps) chain = numpy.array(chain) i = numpy.argmax(prob) final = chain[-1] print 'postprocessing...' chain = numpy.array([transform(params) for params in chain]) return dict(start=chain[-1], maximum=chain[i], seeds=[final, chain[i]], chain=chain, method='Metropolis MCMC')
[ "def", "mcmc", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "nsteps", "=", "40000", ",", "nburn", "=", "400", ",", "stdevs", "=", "0.1", ",", "start", "=", "0.5", ",", "*", "*", "problem", ")", ":", "if", "'seed'", "in", "problem", ":", "numpy", ".", "random", ".", "seed", "(", "problem", "[", "'seed'", "]", ")", "n_params", "=", "len", "(", "parameter_names", ")", "def", "like", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "-", "1e100", "params", "=", "transform", "(", "cube", ")", "return", "loglikelihood", "(", "params", ")", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "stdevs", "=", "stdevs", "+", "numpy", ".", "zeros", "(", "n_params", ")", "def", "compute_stepwidths", "(", "chain", ")", ":", "return", "numpy", ".", "std", "(", "chain", ",", "axis", "=", "0", ")", "/", "3", "import", "matplotlib", ".", "pyplot", "as", "plt", "plt", ".", "figure", "(", "figsize", "=", "(", "7", ",", "7", ")", ")", "steps", "=", "numpy", ".", "array", "(", "[", "0.1", "]", "*", "(", "n_params", ")", ")", "print", "'burn-in (1/2)...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "start", ",", "steps", ",", "like", ",", "nsteps", "=", "nburn", "/", "2", ",", "adapt", "=", "True", ")", "steps", "=", "compute_stepwidths", "(", "chain", ")", "print", "'burn-in (2/2)...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "chain", "[", "-", "1", "]", ",", "steps", ",", "like", ",", "nsteps", "=", "nburn", "/", "2", ",", "adapt", "=", "True", ")", "steps", "=", "compute_stepwidths", "(", "chain", ")", "print", "'recording chain ...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "chain", "[", "-", "1", "]", ",", "steps", ",", "like", ",", "nsteps", "=", "nsteps", ")", "chain", "=", "numpy", ".", "array", "(", "chain", ")", "i", "=", "numpy", ".", "argmax", "(", "prob", ")", "final", "=", "chain", "[", "-", "1", "]", "print", "'postprocessing...'", "chain", "=", "numpy", ".", "array", "(", "[", "transform", "(", "params", ")", "for", "params", "in", "chain", "]", ")", "return", "dict", "(", "start", "=", "chain", "[", "-", "1", "]", ",", "maximum", "=", "chain", "[", "i", "]", ",", "seeds", "=", "[", "final", ",", "chain", "[", "i", "]", "]", ",", "chain", "=", "chain", ",", "method", "=", "'Metropolis MCMC'", ")" ]
**Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with
[ "**", "Metropolis", "Hastings", "MCMC", "**" ]
11b721ea001625ad7820f71ff684723c71216646
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L65-L112
valid
JohannesBuchner/jbopt
jbopt/mcmc.py
ensemble
def ensemble(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, start=0.5, **problem): """ **Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_ """ import emcee import progressbar if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) nwalkers = 50 + n_params * 2 if nwalkers > 200: nwalkers = 200 p0 = [numpy.random.rand(n_params) for i in xrange(nwalkers)] start = start + numpy.zeros(n_params) p0[0] = start def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) sampler = emcee.EnsembleSampler(nwalkers, n_params, like, live_dangerously=True) print 'burn-in...' pos, prob, state = sampler.run_mcmc(p0, nburn / nwalkers) # Reset the chain to remove the burn-in samples. sampler.reset() print 'running ...' # Starting from the final position in the burn-in chain, sample pbar = progressbar.ProgressBar( widgets=[progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()], maxval=nsteps).start() for results in sampler.sample(pos, iterations=nsteps / nwalkers, rstate0=state): pbar.update(pbar.currval + 1) pbar.finish() print "Mean acceptance fraction:", numpy.mean(sampler.acceptance_fraction) chain = sampler.flatchain final = chain[-1] print 'postprocessing...' chain_post = numpy.array([transform(params) for params in chain]) chain_prob = sampler.flatlnprobability return dict(start=final, chain=chain_post, chain_prior=chain, chain_prob=chain_prob, method='Ensemble MCMC')
python
def ensemble(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, start=0.5, **problem): """ **Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_ """ import emcee import progressbar if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) nwalkers = 50 + n_params * 2 if nwalkers > 200: nwalkers = 200 p0 = [numpy.random.rand(n_params) for i in xrange(nwalkers)] start = start + numpy.zeros(n_params) p0[0] = start def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) sampler = emcee.EnsembleSampler(nwalkers, n_params, like, live_dangerously=True) print 'burn-in...' pos, prob, state = sampler.run_mcmc(p0, nburn / nwalkers) # Reset the chain to remove the burn-in samples. sampler.reset() print 'running ...' # Starting from the final position in the burn-in chain, sample pbar = progressbar.ProgressBar( widgets=[progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()], maxval=nsteps).start() for results in sampler.sample(pos, iterations=nsteps / nwalkers, rstate0=state): pbar.update(pbar.currval + 1) pbar.finish() print "Mean acceptance fraction:", numpy.mean(sampler.acceptance_fraction) chain = sampler.flatchain final = chain[-1] print 'postprocessing...' chain_post = numpy.array([transform(params) for params in chain]) chain_prob = sampler.flatlnprobability return dict(start=final, chain=chain_post, chain_prior=chain, chain_prob=chain_prob, method='Ensemble MCMC')
[ "def", "ensemble", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "nsteps", "=", "40000", ",", "nburn", "=", "400", ",", "start", "=", "0.5", ",", "*", "*", "problem", ")", ":", "import", "emcee", "import", "progressbar", "if", "'seed'", "in", "problem", ":", "numpy", ".", "random", ".", "seed", "(", "problem", "[", "'seed'", "]", ")", "n_params", "=", "len", "(", "parameter_names", ")", "nwalkers", "=", "50", "+", "n_params", "*", "2", "if", "nwalkers", ">", "200", ":", "nwalkers", "=", "200", "p0", "=", "[", "numpy", ".", "random", ".", "rand", "(", "n_params", ")", "for", "i", "in", "xrange", "(", "nwalkers", ")", "]", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "p0", "[", "0", "]", "=", "start", "def", "like", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "-", "1e100", "params", "=", "transform", "(", "cube", ")", "return", "loglikelihood", "(", "params", ")", "sampler", "=", "emcee", ".", "EnsembleSampler", "(", "nwalkers", ",", "n_params", ",", "like", ",", "live_dangerously", "=", "True", ")", "print", "'burn-in...'", "pos", ",", "prob", ",", "state", "=", "sampler", ".", "run_mcmc", "(", "p0", ",", "nburn", "/", "nwalkers", ")", "# Reset the chain to remove the burn-in samples.", "sampler", ".", "reset", "(", ")", "print", "'running ...'", "# Starting from the final position in the burn-in chain, sample", "pbar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "[", "progressbar", ".", "Percentage", "(", ")", ",", "progressbar", ".", "Counter", "(", "'%5d'", ")", ",", "progressbar", ".", "Bar", "(", ")", ",", "progressbar", ".", "ETA", "(", ")", "]", ",", "maxval", "=", "nsteps", ")", ".", "start", "(", ")", "for", "results", "in", "sampler", ".", "sample", "(", "pos", ",", "iterations", "=", "nsteps", "/", "nwalkers", ",", "rstate0", "=", "state", ")", ":", "pbar", ".", "update", "(", "pbar", ".", "currval", "+", "1", ")", "pbar", ".", "finish", "(", ")", "print", "\"Mean acceptance fraction:\"", ",", "numpy", ".", "mean", "(", "sampler", ".", "acceptance_fraction", ")", "chain", "=", "sampler", ".", "flatchain", "final", "=", "chain", "[", "-", "1", "]", "print", "'postprocessing...'", "chain_post", "=", "numpy", ".", "array", "(", "[", "transform", "(", "params", ")", "for", "params", "in", "chain", "]", ")", "chain_prob", "=", "sampler", ".", "flatlnprobability", "return", "dict", "(", "start", "=", "final", ",", "chain", "=", "chain_post", ",", "chain_prior", "=", "chain", ",", "chain_prob", "=", "chain_prob", ",", "method", "=", "'Ensemble MCMC'", ")" ]
**Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_
[ "**", "Ensemble", "MCMC", "**", "via", "emcee", "<http", ":", "//", "dan", ".", "iel", ".", "fm", "/", "emcee", "/", ">", "_" ]
11b721ea001625ad7820f71ff684723c71216646
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L114-L170
valid
telminov/park-worker-base
parkworker/task_processor.py
TaskProcessor._get_classes
def _get_classes(package_name, base_class): """ search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class """ classes = {} base_dir = os.getcwd() root_module_name = base_dir.split('/')[-1] package_dir = base_dir + '/%s' % package_name if os.path.isdir(package_dir): for module_path in os.listdir(package_dir): if not module_path.endswith('.py'): continue module_name = os.path.splitext(module_path)[0] module_full_name = '%s.%s.%s' % (root_module_name, package_name, module_name) __import__(module_full_name) work_module = sys.modules[module_full_name] for module_item in work_module.__dict__.values(): if type(module_item) is type \ and issubclass(module_item, base_class) \ and module_item is not base_class\ and hasattr(module_item, 'name') and module_item.name: classes.setdefault(module_item.name, []).append(module_item) # check no duplicated names for work_name, work_modules in classes.items(): if len(work_modules) > 1: raise DuplicatedNameException('Modules %s have same name "%s"' % ( ' and '.join(map(str, work_modules)), work_name )) # create immutable list of modules return tuple([(work_name, work_modules[0]) for work_name, work_modules in classes.items()])
python
def _get_classes(package_name, base_class): """ search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class """ classes = {} base_dir = os.getcwd() root_module_name = base_dir.split('/')[-1] package_dir = base_dir + '/%s' % package_name if os.path.isdir(package_dir): for module_path in os.listdir(package_dir): if not module_path.endswith('.py'): continue module_name = os.path.splitext(module_path)[0] module_full_name = '%s.%s.%s' % (root_module_name, package_name, module_name) __import__(module_full_name) work_module = sys.modules[module_full_name] for module_item in work_module.__dict__.values(): if type(module_item) is type \ and issubclass(module_item, base_class) \ and module_item is not base_class\ and hasattr(module_item, 'name') and module_item.name: classes.setdefault(module_item.name, []).append(module_item) # check no duplicated names for work_name, work_modules in classes.items(): if len(work_modules) > 1: raise DuplicatedNameException('Modules %s have same name "%s"' % ( ' and '.join(map(str, work_modules)), work_name )) # create immutable list of modules return tuple([(work_name, work_modules[0]) for work_name, work_modules in classes.items()])
[ "def", "_get_classes", "(", "package_name", ",", "base_class", ")", ":", "classes", "=", "{", "}", "base_dir", "=", "os", ".", "getcwd", "(", ")", "root_module_name", "=", "base_dir", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "package_dir", "=", "base_dir", "+", "'/%s'", "%", "package_name", "if", "os", ".", "path", ".", "isdir", "(", "package_dir", ")", ":", "for", "module_path", "in", "os", ".", "listdir", "(", "package_dir", ")", ":", "if", "not", "module_path", ".", "endswith", "(", "'.py'", ")", ":", "continue", "module_name", "=", "os", ".", "path", ".", "splitext", "(", "module_path", ")", "[", "0", "]", "module_full_name", "=", "'%s.%s.%s'", "%", "(", "root_module_name", ",", "package_name", ",", "module_name", ")", "__import__", "(", "module_full_name", ")", "work_module", "=", "sys", ".", "modules", "[", "module_full_name", "]", "for", "module_item", "in", "work_module", ".", "__dict__", ".", "values", "(", ")", ":", "if", "type", "(", "module_item", ")", "is", "type", "and", "issubclass", "(", "module_item", ",", "base_class", ")", "and", "module_item", "is", "not", "base_class", "and", "hasattr", "(", "module_item", ",", "'name'", ")", "and", "module_item", ".", "name", ":", "classes", ".", "setdefault", "(", "module_item", ".", "name", ",", "[", "]", ")", ".", "append", "(", "module_item", ")", "# check no duplicated names", "for", "work_name", ",", "work_modules", "in", "classes", ".", "items", "(", ")", ":", "if", "len", "(", "work_modules", ")", ">", "1", ":", "raise", "DuplicatedNameException", "(", "'Modules %s have same name \"%s\"'", "%", "(", "' and '", ".", "join", "(", "map", "(", "str", ",", "work_modules", ")", ")", ",", "work_name", ")", ")", "# create immutable list of modules", "return", "tuple", "(", "[", "(", "work_name", ",", "work_modules", "[", "0", "]", ")", "for", "work_name", ",", "work_modules", "in", "classes", ".", "items", "(", ")", "]", ")" ]
search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class
[ "search", "monits", "or", "works", "classes", ".", "Class", "must", "have", "name", "attribute", ":", "param", "package_name", ":", "monits", "or", "works", ":", "param", "base_class", ":", "Monit", "or", "Work", ":", "return", ":", "tuple", "of", "tuples", "monit", "/", "work", "-", "name", "and", "class" ]
35fa90939b68bd47f1c82ba49fc80d419a079964
https://github.com/telminov/park-worker-base/blob/35fa90939b68bd47f1c82ba49fc80d419a079964/parkworker/task_processor.py#L90-L127
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool.AddEnumDescriptor
def AddEnumDescriptor(self, enum_desc): """Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor. """ if not isinstance(enum_desc, descriptor.EnumDescriptor): raise TypeError('Expected instance of descriptor.EnumDescriptor.') self._enum_descriptors[enum_desc.full_name] = enum_desc self.AddFileDescriptor(enum_desc.file)
python
def AddEnumDescriptor(self, enum_desc): """Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor. """ if not isinstance(enum_desc, descriptor.EnumDescriptor): raise TypeError('Expected instance of descriptor.EnumDescriptor.') self._enum_descriptors[enum_desc.full_name] = enum_desc self.AddFileDescriptor(enum_desc.file)
[ "def", "AddEnumDescriptor", "(", "self", ",", "enum_desc", ")", ":", "if", "not", "isinstance", "(", "enum_desc", ",", "descriptor", ".", "EnumDescriptor", ")", ":", "raise", "TypeError", "(", "'Expected instance of descriptor.EnumDescriptor.'", ")", "self", ".", "_enum_descriptors", "[", "enum_desc", ".", "full_name", "]", "=", "enum_desc", "self", ".", "AddFileDescriptor", "(", "enum_desc", ".", "file", ")" ]
Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor.
[ "Adds", "an", "EnumDescriptor", "to", "the", "pool", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L150-L163
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool.FindFileContainingSymbol
def FindFileContainingSymbol(self, symbol): """Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool. """ symbol = _NormalizeFullyQualifiedName(symbol) try: return self._descriptors[symbol].file except KeyError: pass try: return self._enum_descriptors[symbol].file except KeyError: pass try: file_proto = self._internal_db.FindFileContainingSymbol(symbol) except KeyError as error: if self._descriptor_db: file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) else: raise error if not file_proto: raise KeyError('Cannot find a file containing %s' % symbol) return self._ConvertFileProtoToFileDescriptor(file_proto)
python
def FindFileContainingSymbol(self, symbol): """Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool. """ symbol = _NormalizeFullyQualifiedName(symbol) try: return self._descriptors[symbol].file except KeyError: pass try: return self._enum_descriptors[symbol].file except KeyError: pass try: file_proto = self._internal_db.FindFileContainingSymbol(symbol) except KeyError as error: if self._descriptor_db: file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) else: raise error if not file_proto: raise KeyError('Cannot find a file containing %s' % symbol) return self._ConvertFileProtoToFileDescriptor(file_proto)
[ "def", "FindFileContainingSymbol", "(", "self", ",", "symbol", ")", ":", "symbol", "=", "_NormalizeFullyQualifiedName", "(", "symbol", ")", "try", ":", "return", "self", ".", "_descriptors", "[", "symbol", "]", ".", "file", "except", "KeyError", ":", "pass", "try", ":", "return", "self", ".", "_enum_descriptors", "[", "symbol", "]", ".", "file", "except", "KeyError", ":", "pass", "try", ":", "file_proto", "=", "self", ".", "_internal_db", ".", "FindFileContainingSymbol", "(", "symbol", ")", "except", "KeyError", "as", "error", ":", "if", "self", ".", "_descriptor_db", ":", "file_proto", "=", "self", ".", "_descriptor_db", ".", "FindFileContainingSymbol", "(", "symbol", ")", "else", ":", "raise", "error", "if", "not", "file_proto", ":", "raise", "KeyError", "(", "'Cannot find a file containing %s'", "%", "symbol", ")", "return", "self", ".", "_ConvertFileProtoToFileDescriptor", "(", "file_proto", ")" ]
Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool.
[ "Gets", "the", "FileDescriptor", "for", "the", "file", "containing", "the", "specified", "symbol", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L208-L241
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool.FindMessageTypeByName
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) return self._descriptors[full_name]
python
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) return self._descriptors[full_name]
[ "def", "FindMessageTypeByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "if", "full_name", "not", "in", "self", ".", "_descriptors", ":", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "self", ".", "_descriptors", "[", "full_name", "]" ]
Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type.
[ "Loads", "the", "named", "descriptor", "from", "the", "pool", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L243-L256
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool.FindEnumTypeByName
def FindEnumTypeByName(self, full_name): """Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._enum_descriptors: self.FindFileContainingSymbol(full_name) return self._enum_descriptors[full_name]
python
def FindEnumTypeByName(self, full_name): """Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._enum_descriptors: self.FindFileContainingSymbol(full_name) return self._enum_descriptors[full_name]
[ "def", "FindEnumTypeByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "if", "full_name", "not", "in", "self", ".", "_enum_descriptors", ":", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "self", ".", "_enum_descriptors", "[", "full_name", "]" ]
Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type.
[ "Loads", "the", "named", "enum", "descriptor", "from", "the", "pool", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L258-L271
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool.FindExtensionByName
def FindExtensionByName(self, full_name): """Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension. """ full_name = _NormalizeFullyQualifiedName(full_name) message_name, _, extension_name = full_name.rpartition('.') try: # Most extensions are nested inside a message. scope = self.FindMessageTypeByName(message_name) except KeyError: # Some extensions are defined at file scope. scope = self.FindFileContainingSymbol(full_name) return scope.extensions_by_name[extension_name]
python
def FindExtensionByName(self, full_name): """Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension. """ full_name = _NormalizeFullyQualifiedName(full_name) message_name, _, extension_name = full_name.rpartition('.') try: # Most extensions are nested inside a message. scope = self.FindMessageTypeByName(message_name) except KeyError: # Some extensions are defined at file scope. scope = self.FindFileContainingSymbol(full_name) return scope.extensions_by_name[extension_name]
[ "def", "FindExtensionByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "message_name", ",", "_", ",", "extension_name", "=", "full_name", ".", "rpartition", "(", "'.'", ")", "try", ":", "# Most extensions are nested inside a message.", "scope", "=", "self", ".", "FindMessageTypeByName", "(", "message_name", ")", "except", "KeyError", ":", "# Some extensions are defined at file scope.", "scope", "=", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "scope", ".", "extensions_by_name", "[", "extension_name", "]" ]
Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension.
[ "Loads", "the", "named", "extension", "descriptor", "from", "the", "pool", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L287-L304
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool._ConvertEnumDescriptor
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None): """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor """ if package: enum_name = '.'.join((package, enum_proto.name)) else: enum_name = enum_proto.name if file_desc is None: file_name = None else: file_name = file_desc.name values = [self._MakeEnumValueDescriptor(value, index) for index, value in enumerate(enum_proto.value)] desc = descriptor.EnumDescriptor(name=enum_proto.name, full_name=enum_name, filename=file_name, file=file_desc, values=values, containing_type=containing_type, options=enum_proto.options) scope['.%s' % enum_name] = desc self._enum_descriptors[enum_name] = desc return desc
python
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None): """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor """ if package: enum_name = '.'.join((package, enum_proto.name)) else: enum_name = enum_proto.name if file_desc is None: file_name = None else: file_name = file_desc.name values = [self._MakeEnumValueDescriptor(value, index) for index, value in enumerate(enum_proto.value)] desc = descriptor.EnumDescriptor(name=enum_proto.name, full_name=enum_name, filename=file_name, file=file_desc, values=values, containing_type=containing_type, options=enum_proto.options) scope['.%s' % enum_name] = desc self._enum_descriptors[enum_name] = desc return desc
[ "def", "_ConvertEnumDescriptor", "(", "self", ",", "enum_proto", ",", "package", "=", "None", ",", "file_desc", "=", "None", ",", "containing_type", "=", "None", ",", "scope", "=", "None", ")", ":", "if", "package", ":", "enum_name", "=", "'.'", ".", "join", "(", "(", "package", ",", "enum_proto", ".", "name", ")", ")", "else", ":", "enum_name", "=", "enum_proto", ".", "name", "if", "file_desc", "is", "None", ":", "file_name", "=", "None", "else", ":", "file_name", "=", "file_desc", ".", "name", "values", "=", "[", "self", ".", "_MakeEnumValueDescriptor", "(", "value", ",", "index", ")", "for", "index", ",", "value", "in", "enumerate", "(", "enum_proto", ".", "value", ")", "]", "desc", "=", "descriptor", ".", "EnumDescriptor", "(", "name", "=", "enum_proto", ".", "name", ",", "full_name", "=", "enum_name", ",", "filename", "=", "file_name", ",", "file", "=", "file_desc", ",", "values", "=", "values", ",", "containing_type", "=", "containing_type", ",", "options", "=", "enum_proto", ".", "options", ")", "scope", "[", "'.%s'", "%", "enum_name", "]", "=", "desc", "self", ".", "_enum_descriptors", "[", "enum_name", "]", "=", "desc", "return", "desc" ]
Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor
[ "Make", "a", "protobuf", "EnumDescriptor", "given", "an", "EnumDescriptorProto", "protobuf", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L482-L518
valid
ibelie/typy
typy/google/protobuf/descriptor_pool.py
DescriptorPool._MakeFieldDescriptor
def _MakeFieldDescriptor(self, field_proto, message_name, index, is_extension=False): """Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object """ if message_name: full_name = '.'.join((message_name, field_proto.name)) else: full_name = field_proto.name return descriptor.FieldDescriptor( name=field_proto.name, full_name=full_name, index=index, number=field_proto.number, type=field_proto.type, cpp_type=None, message_type=None, enum_type=None, containing_type=None, label=field_proto.label, has_default_value=False, default_value=None, is_extension=is_extension, extension_scope=None, options=field_proto.options)
python
def _MakeFieldDescriptor(self, field_proto, message_name, index, is_extension=False): """Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object """ if message_name: full_name = '.'.join((message_name, field_proto.name)) else: full_name = field_proto.name return descriptor.FieldDescriptor( name=field_proto.name, full_name=full_name, index=index, number=field_proto.number, type=field_proto.type, cpp_type=None, message_type=None, enum_type=None, containing_type=None, label=field_proto.label, has_default_value=False, default_value=None, is_extension=is_extension, extension_scope=None, options=field_proto.options)
[ "def", "_MakeFieldDescriptor", "(", "self", ",", "field_proto", ",", "message_name", ",", "index", ",", "is_extension", "=", "False", ")", ":", "if", "message_name", ":", "full_name", "=", "'.'", ".", "join", "(", "(", "message_name", ",", "field_proto", ".", "name", ")", ")", "else", ":", "full_name", "=", "field_proto", ".", "name", "return", "descriptor", ".", "FieldDescriptor", "(", "name", "=", "field_proto", ".", "name", ",", "full_name", "=", "full_name", ",", "index", "=", "index", ",", "number", "=", "field_proto", ".", "number", ",", "type", "=", "field_proto", ".", "type", ",", "cpp_type", "=", "None", ",", "message_type", "=", "None", ",", "enum_type", "=", "None", ",", "containing_type", "=", "None", ",", "label", "=", "field_proto", ".", "label", ",", "has_default_value", "=", "False", ",", "default_value", "=", "None", ",", "is_extension", "=", "is_extension", ",", "extension_scope", "=", "None", ",", "options", "=", "field_proto", ".", "options", ")" ]
Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object
[ "Creates", "a", "field", "descriptor", "from", "a", "FieldDescriptorProto", "." ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L520-L560
valid
charlesthomas/proauth2
proauth2/data_stores/validate.py
validate
def validate( table, **data ): ''' theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.) ''' if table not in good.keys(): raise Proauth2Error( 'invalid_request', 'invalid name: %s' % table ) for req in good[table]['required']: if not data.get( req, None ): raise Proauth2Error( 'invalid_request', 'missing required field: %s' % req ) for key in data.keys(): if key not in good[table]['required'] and \ key not in good[table]['optional']: raise Proauth2Error( 'invalid_request', 'invalid field: %s' % key ) return good[table]['key']
python
def validate( table, **data ): ''' theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.) ''' if table not in good.keys(): raise Proauth2Error( 'invalid_request', 'invalid name: %s' % table ) for req in good[table]['required']: if not data.get( req, None ): raise Proauth2Error( 'invalid_request', 'missing required field: %s' % req ) for key in data.keys(): if key not in good[table]['required'] and \ key not in good[table]['optional']: raise Proauth2Error( 'invalid_request', 'invalid field: %s' % key ) return good[table]['key']
[ "def", "validate", "(", "table", ",", "*", "*", "data", ")", ":", "if", "table", "not", "in", "good", ".", "keys", "(", ")", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'invalid name: %s'", "%", "table", ")", "for", "req", "in", "good", "[", "table", "]", "[", "'required'", "]", ":", "if", "not", "data", ".", "get", "(", "req", ",", "None", ")", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'missing required field: %s'", "%", "req", ")", "for", "key", "in", "data", ".", "keys", "(", ")", ":", "if", "key", "not", "in", "good", "[", "table", "]", "[", "'required'", "]", "and", "key", "not", "in", "good", "[", "table", "]", "[", "'optional'", "]", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'invalid field: %s'", "%", "key", ")", "return", "good", "[", "table", "]", "[", "'key'", "]" ]
theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.)
[ "theoretically", "any", "data", "store", "can", "be", "implemented", "to", "work", "with", "this", "package", "which", "means", "basic", "data", "validation", "must", "be", "done", "in", "-", "package", "so", "that", "weird", "stuff", "can", "t", "be", "stored", "in", "the", "data", "store", ".", "this", "function", "raises", "an", "exception", "if", "an", "invalid", "table", "name", "is", "passed", "not", "all", "of", "the", "required", "fields", "are", "in", "the", "data", "kwargs", "or", "if", "a", "field", "that", "was", "passed", "is", "not", "expected", ".", "it", "also", "returns", "the", "key", "field", "name", "for", "ensuring", "uniqueness", "(", "again", "that", "may", "not", "be", "built", "into", "whatever", "data", "store", "is", "impelemented", ".", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/validate.py#L22-L43
valid
mallamanis/experimenter
experimenter/experimentlogger.py
ExperimentLogger.record_results
def record_results(self, results): """ Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict """ repository = Repo(self.__repository_directory, search_parent_directories=True) for tag in repository.tags: if tag.name == self.__tag_name: tag_object = tag break else: raise Exception("Experiment tag has been deleted since experiment started") data = json.loads(tag_object.tag.message) data["results"] = results TagReference.create(repository, self.__tag_name, message=json.dumps(data), ref=tag_object.tag.object, force=True) self.__results_recorded = True
python
def record_results(self, results): """ Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict """ repository = Repo(self.__repository_directory, search_parent_directories=True) for tag in repository.tags: if tag.name == self.__tag_name: tag_object = tag break else: raise Exception("Experiment tag has been deleted since experiment started") data = json.loads(tag_object.tag.message) data["results"] = results TagReference.create(repository, self.__tag_name, message=json.dumps(data), ref=tag_object.tag.object, force=True) self.__results_recorded = True
[ "def", "record_results", "(", "self", ",", "results", ")", ":", "repository", "=", "Repo", "(", "self", ".", "__repository_directory", ",", "search_parent_directories", "=", "True", ")", "for", "tag", "in", "repository", ".", "tags", ":", "if", "tag", ".", "name", "==", "self", ".", "__tag_name", ":", "tag_object", "=", "tag", "break", "else", ":", "raise", "Exception", "(", "\"Experiment tag has been deleted since experiment started\"", ")", "data", "=", "json", ".", "loads", "(", "tag_object", ".", "tag", ".", "message", ")", "data", "[", "\"results\"", "]", "=", "results", "TagReference", ".", "create", "(", "repository", ",", "self", ".", "__tag_name", ",", "message", "=", "json", ".", "dumps", "(", "data", ")", ",", "ref", "=", "tag_object", ".", "tag", ".", "object", ",", "force", "=", "True", ")", "self", ".", "__results_recorded", "=", "True" ]
Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict
[ "Record", "the", "results", "of", "this", "experiment", "by", "updating", "the", "tag", ".", ":", "param", "results", ":", "A", "dictionary", "containing", "the", "results", "of", "the", "experiment", ".", ":", "type", "results", ":", "dict" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L46-L63
valid
mallamanis/experimenter
experimenter/experimentlogger.py
ExperimentLogger.__tag_repo
def __tag_repo(self, data, repository): """ Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict """ assert self.__tag_name not in [t.name for t in repository.tags] return TagReference.create(repository, self.__tag_name, message=json.dumps(data))
python
def __tag_repo(self, data, repository): """ Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict """ assert self.__tag_name not in [t.name for t in repository.tags] return TagReference.create(repository, self.__tag_name, message=json.dumps(data))
[ "def", "__tag_repo", "(", "self", ",", "data", ",", "repository", ")", ":", "assert", "self", ".", "__tag_name", "not", "in", "[", "t", ".", "name", "for", "t", "in", "repository", ".", "tags", "]", "return", "TagReference", ".", "create", "(", "repository", ",", "self", ".", "__tag_name", ",", "message", "=", "json", ".", "dumps", "(", "data", ")", ")" ]
Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict
[ "Tag", "the", "current", "repository", ".", ":", "param", "data", ":", "a", "dictionary", "containing", "the", "data", "about", "the", "experiment", ":", "type", "data", ":", "dict" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L80-L87
valid
mallamanis/experimenter
experimenter/experimentlogger.py
ExperimentLogger.__get_files_to_be_added
def __get_files_to_be_added(self, repository): """ :return: the files that have been modified and can be added """ for root, dirs, files in os.walk(repository.working_dir): for f in files: relative_path = os.path.join(root, f)[len(repository.working_dir) + 1:] try: repository.head.commit.tree[relative_path] # will fail if not tracked yield relative_path except: pass
python
def __get_files_to_be_added(self, repository): """ :return: the files that have been modified and can be added """ for root, dirs, files in os.walk(repository.working_dir): for f in files: relative_path = os.path.join(root, f)[len(repository.working_dir) + 1:] try: repository.head.commit.tree[relative_path] # will fail if not tracked yield relative_path except: pass
[ "def", "__get_files_to_be_added", "(", "self", ",", "repository", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "repository", ".", "working_dir", ")", ":", "for", "f", "in", "files", ":", "relative_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", "[", "len", "(", "repository", ".", "working_dir", ")", "+", "1", ":", "]", "try", ":", "repository", ".", "head", ".", "commit", ".", "tree", "[", "relative_path", "]", "# will fail if not tracked", "yield", "relative_path", "except", ":", "pass" ]
:return: the files that have been modified and can be added
[ ":", "return", ":", "the", "files", "that", "have", "been", "modified", "and", "can", "be", "added" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L89-L100
valid
mallamanis/experimenter
experimenter/experimentlogger.py
ExperimentLogger.__start_experiment
def __start_experiment(self, parameters): """ Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference """ repository = Repo(self.__repository_directory, search_parent_directories=True) if len(repository.untracked_files) > 0: logging.warning("Untracked files will not be recorded: %s", repository.untracked_files) current_commit = repository.head.commit started_state_is_dirty = repository.is_dirty() if started_state_is_dirty: repository.index.add([p for p in self.__get_files_to_be_added(repository)]) commit_obj = repository.index.commit("Temporary commit for experiment " + self.__experiment_name) sha = commit_obj.hexsha else: sha = repository.head.object.hexsha data = {"parameters": parameters, "started": time.time(), "description": self.__description, "commit_sha": sha} tag_object = self.__tag_repo(data, repository) if started_state_is_dirty: repository.head.reset(current_commit, working_tree=False, index=True) return tag_object
python
def __start_experiment(self, parameters): """ Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference """ repository = Repo(self.__repository_directory, search_parent_directories=True) if len(repository.untracked_files) > 0: logging.warning("Untracked files will not be recorded: %s", repository.untracked_files) current_commit = repository.head.commit started_state_is_dirty = repository.is_dirty() if started_state_is_dirty: repository.index.add([p for p in self.__get_files_to_be_added(repository)]) commit_obj = repository.index.commit("Temporary commit for experiment " + self.__experiment_name) sha = commit_obj.hexsha else: sha = repository.head.object.hexsha data = {"parameters": parameters, "started": time.time(), "description": self.__description, "commit_sha": sha} tag_object = self.__tag_repo(data, repository) if started_state_is_dirty: repository.head.reset(current_commit, working_tree=False, index=True) return tag_object
[ "def", "__start_experiment", "(", "self", ",", "parameters", ")", ":", "repository", "=", "Repo", "(", "self", ".", "__repository_directory", ",", "search_parent_directories", "=", "True", ")", "if", "len", "(", "repository", ".", "untracked_files", ")", ">", "0", ":", "logging", ".", "warning", "(", "\"Untracked files will not be recorded: %s\"", ",", "repository", ".", "untracked_files", ")", "current_commit", "=", "repository", ".", "head", ".", "commit", "started_state_is_dirty", "=", "repository", ".", "is_dirty", "(", ")", "if", "started_state_is_dirty", ":", "repository", ".", "index", ".", "add", "(", "[", "p", "for", "p", "in", "self", ".", "__get_files_to_be_added", "(", "repository", ")", "]", ")", "commit_obj", "=", "repository", ".", "index", ".", "commit", "(", "\"Temporary commit for experiment \"", "+", "self", ".", "__experiment_name", ")", "sha", "=", "commit_obj", ".", "hexsha", "else", ":", "sha", "=", "repository", ".", "head", ".", "object", ".", "hexsha", "data", "=", "{", "\"parameters\"", ":", "parameters", ",", "\"started\"", ":", "time", ".", "time", "(", ")", ",", "\"description\"", ":", "self", ".", "__description", ",", "\"commit_sha\"", ":", "sha", "}", "tag_object", "=", "self", ".", "__tag_repo", "(", "data", ",", "repository", ")", "if", "started_state_is_dirty", ":", "repository", ".", "head", ".", "reset", "(", "current_commit", ",", "working_tree", "=", "False", ",", "index", "=", "True", ")", "return", "tag_object" ]
Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference
[ "Start", "an", "experiment", "by", "capturing", "the", "state", "of", "the", "code", ":", "param", "parameters", ":", "a", "dictionary", "containing", "the", "parameters", "of", "the", "experiment", ":", "type", "parameters", ":", "dict", ":", "return", ":", "the", "tag", "representing", "this", "experiment", ":", "rtype", ":", "TagReference" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L102-L130
valid
suryakencana007/baka_model
baka_model/model/meta/base.py
get_tm_session
def get_tm_session(session_factory, transaction_manager): """ Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) """ dbsession = session_factory() zope.sqlalchemy.register( dbsession, transaction_manager=transaction_manager) return dbsession
python
def get_tm_session(session_factory, transaction_manager): """ Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) """ dbsession = session_factory() zope.sqlalchemy.register( dbsession, transaction_manager=transaction_manager) return dbsession
[ "def", "get_tm_session", "(", "session_factory", ",", "transaction_manager", ")", ":", "dbsession", "=", "session_factory", "(", ")", "zope", ".", "sqlalchemy", ".", "register", "(", "dbsession", ",", "transaction_manager", "=", "transaction_manager", ")", "return", "dbsession" ]
Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager)
[ "Get", "a", "sqlalchemy", ".", "orm", ".", "Session", "instance", "backed", "by", "a", "transaction", "." ]
915c2da9920e973302f5764ae63799acd5ecf0b7
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/base.py#L71-L95
valid