repo
stringclasses 679
values | path
stringlengths 6
122
| func_name
stringlengths 2
76
| original_string
stringlengths 87
70.9k
| language
stringclasses 1
value | code
stringlengths 87
70.9k
| code_tokens
sequencelengths 20
6.91k
| docstring
stringlengths 1
21.7k
| docstring_tokens
sequencelengths 1
1.6k
| sha
stringclasses 679
values | url
stringlengths 92
213
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
ecmadao/threads-creator | threads_creator/threads/main_thread.py | MainThread.run | def run(self):
"""run your main spider here, and get a list/tuple of url as result
then make the instance of branch thread
:return: None
"""
global existed_urls_list
config = config_creator()
debug = config.debug
main_thread_sleep = config.main_thread_sleep
branch_thread_num = config.branch_thread_num
while 1:
url = self.main_queue.get()
if debug:
print('main thread-{} start'.format(url))
main_spider = self.main_spider(url)
sleep(random.randrange(*main_thread_sleep))
links = main_spider.request_urls()
try:
assert type(links) in VALIDATE_URLS
except AssertionError:
error_message('except to return a list or tuple which contains url')
links = list()
branch_queue = queue.Queue(branch_thread_num)
for i in range(branch_thread_num):
branch_thread = BranchThread(branch_queue=branch_queue,
branch_spider=self.branch_spider)
branch_thread.daemon = True
branch_thread.start()
for link in links:
if link not in existed_urls_list:
existed_urls_list.append(link)
branch_queue.put(link)
branch_queue.join()
if debug:
print('main thread-{}\'s child threads is all finish'.format(url))
self.main_queue.task_done() | python | def run(self):
"""run your main spider here, and get a list/tuple of url as result
then make the instance of branch thread
:return: None
"""
global existed_urls_list
config = config_creator()
debug = config.debug
main_thread_sleep = config.main_thread_sleep
branch_thread_num = config.branch_thread_num
while 1:
url = self.main_queue.get()
if debug:
print('main thread-{} start'.format(url))
main_spider = self.main_spider(url)
sleep(random.randrange(*main_thread_sleep))
links = main_spider.request_urls()
try:
assert type(links) in VALIDATE_URLS
except AssertionError:
error_message('except to return a list or tuple which contains url')
links = list()
branch_queue = queue.Queue(branch_thread_num)
for i in range(branch_thread_num):
branch_thread = BranchThread(branch_queue=branch_queue,
branch_spider=self.branch_spider)
branch_thread.daemon = True
branch_thread.start()
for link in links:
if link not in existed_urls_list:
existed_urls_list.append(link)
branch_queue.put(link)
branch_queue.join()
if debug:
print('main thread-{}\'s child threads is all finish'.format(url))
self.main_queue.task_done() | [
"def",
"run",
"(",
"self",
")",
":",
"global",
"existed_urls_list",
"config",
"=",
"config_creator",
"(",
")",
"debug",
"=",
"config",
".",
"debug",
"main_thread_sleep",
"=",
"config",
".",
"main_thread_sleep",
"branch_thread_num",
"=",
"config",
".",
"branch_thread_num",
"while",
"1",
":",
"url",
"=",
"self",
".",
"main_queue",
".",
"get",
"(",
")",
"if",
"debug",
":",
"print",
"(",
"'main thread-{} start'",
".",
"format",
"(",
"url",
")",
")",
"main_spider",
"=",
"self",
".",
"main_spider",
"(",
"url",
")",
"sleep",
"(",
"random",
".",
"randrange",
"(",
"*",
"main_thread_sleep",
")",
")",
"links",
"=",
"main_spider",
".",
"request_urls",
"(",
")",
"try",
":",
"assert",
"type",
"(",
"links",
")",
"in",
"VALIDATE_URLS",
"except",
"AssertionError",
":",
"error_message",
"(",
"'except to return a list or tuple which contains url'",
")",
"links",
"=",
"list",
"(",
")",
"branch_queue",
"=",
"queue",
".",
"Queue",
"(",
"branch_thread_num",
")",
"for",
"i",
"in",
"range",
"(",
"branch_thread_num",
")",
":",
"branch_thread",
"=",
"BranchThread",
"(",
"branch_queue",
"=",
"branch_queue",
",",
"branch_spider",
"=",
"self",
".",
"branch_spider",
")",
"branch_thread",
".",
"daemon",
"=",
"True",
"branch_thread",
".",
"start",
"(",
")",
"for",
"link",
"in",
"links",
":",
"if",
"link",
"not",
"in",
"existed_urls_list",
":",
"existed_urls_list",
".",
"append",
"(",
"link",
")",
"branch_queue",
".",
"put",
"(",
"link",
")",
"branch_queue",
".",
"join",
"(",
")",
"if",
"debug",
":",
"print",
"(",
"'main thread-{}\\'s child threads is all finish'",
".",
"format",
"(",
"url",
")",
")",
"self",
".",
"main_queue",
".",
"task_done",
"(",
")"
] | run your main spider here, and get a list/tuple of url as result
then make the instance of branch thread
:return: None | [
"run",
"your",
"main",
"spider",
"here",
"and",
"get",
"a",
"list",
"/",
"tuple",
"of",
"url",
"as",
"result",
"then",
"make",
"the",
"instance",
"of",
"branch",
"thread"
] | f081091425d4382e5e9776c395c20e1af2332657 | https://github.com/ecmadao/threads-creator/blob/f081091425d4382e5e9776c395c20e1af2332657/threads_creator/threads/main_thread.py#L27-L68 | valid |
dstufft/recliner | tools/check_render.py | pypi_render | def pypi_render(source):
"""
Copied (and slightly adapted) from pypi.description_tools
"""
ALLOWED_SCHEMES = '''file ftp gopher hdl http https imap mailto mms news
nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh
telnet wais irc'''.split()
settings_overrides = {
"raw_enabled": 0, # no raw HTML code
"file_insertion_enabled": 0, # no file/URL access
"halt_level": 2, # at warnings or errors, raise an exception
"report_level": 5, # never report problems with the reST code
}
# capture publishing errors, they go to stderr
old_stderr = sys.stderr
sys.stderr = s = StringIO.StringIO()
parts = None
try:
# Convert reStructuredText to HTML using Docutils.
document = publish_doctree(source=source,
settings_overrides=settings_overrides)
for node in document.traverse():
if node.tagname == '#text':
continue
if node.hasattr('refuri'):
uri = node['refuri']
elif node.hasattr('uri'):
uri = node['uri']
else:
continue
o = urlparse.urlparse(uri)
if o.scheme not in ALLOWED_SCHEMES:
raise TransformError('link scheme not allowed')
# now turn the transformed document into HTML
reader = readers.doctree.Reader(parser_name='null')
pub = Publisher(reader, source=io.DocTreeInput(document),
destination_class=io.StringOutput)
pub.set_writer('html')
pub.process_programmatic_settings(None, settings_overrides, None)
pub.set_destination(None, None)
pub.publish()
parts = pub.writer.parts
except:
pass
sys.stderr = old_stderr
# original text if publishing errors occur
if parts is None or len(s.getvalue()) > 0:
return None
else:
return parts['body'] | python | def pypi_render(source):
"""
Copied (and slightly adapted) from pypi.description_tools
"""
ALLOWED_SCHEMES = '''file ftp gopher hdl http https imap mailto mms news
nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh
telnet wais irc'''.split()
settings_overrides = {
"raw_enabled": 0, # no raw HTML code
"file_insertion_enabled": 0, # no file/URL access
"halt_level": 2, # at warnings or errors, raise an exception
"report_level": 5, # never report problems with the reST code
}
# capture publishing errors, they go to stderr
old_stderr = sys.stderr
sys.stderr = s = StringIO.StringIO()
parts = None
try:
# Convert reStructuredText to HTML using Docutils.
document = publish_doctree(source=source,
settings_overrides=settings_overrides)
for node in document.traverse():
if node.tagname == '#text':
continue
if node.hasattr('refuri'):
uri = node['refuri']
elif node.hasattr('uri'):
uri = node['uri']
else:
continue
o = urlparse.urlparse(uri)
if o.scheme not in ALLOWED_SCHEMES:
raise TransformError('link scheme not allowed')
# now turn the transformed document into HTML
reader = readers.doctree.Reader(parser_name='null')
pub = Publisher(reader, source=io.DocTreeInput(document),
destination_class=io.StringOutput)
pub.set_writer('html')
pub.process_programmatic_settings(None, settings_overrides, None)
pub.set_destination(None, None)
pub.publish()
parts = pub.writer.parts
except:
pass
sys.stderr = old_stderr
# original text if publishing errors occur
if parts is None or len(s.getvalue()) > 0:
return None
else:
return parts['body'] | [
"def",
"pypi_render",
"(",
"source",
")",
":",
"ALLOWED_SCHEMES",
"=",
"'''file ftp gopher hdl http https imap mailto mms news\n nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh\n telnet wais irc'''",
".",
"split",
"(",
")",
"settings_overrides",
"=",
"{",
"\"raw_enabled\"",
":",
"0",
",",
"# no raw HTML code",
"\"file_insertion_enabled\"",
":",
"0",
",",
"# no file/URL access",
"\"halt_level\"",
":",
"2",
",",
"# at warnings or errors, raise an exception",
"\"report_level\"",
":",
"5",
",",
"# never report problems with the reST code",
"}",
"# capture publishing errors, they go to stderr",
"old_stderr",
"=",
"sys",
".",
"stderr",
"sys",
".",
"stderr",
"=",
"s",
"=",
"StringIO",
".",
"StringIO",
"(",
")",
"parts",
"=",
"None",
"try",
":",
"# Convert reStructuredText to HTML using Docutils.",
"document",
"=",
"publish_doctree",
"(",
"source",
"=",
"source",
",",
"settings_overrides",
"=",
"settings_overrides",
")",
"for",
"node",
"in",
"document",
".",
"traverse",
"(",
")",
":",
"if",
"node",
".",
"tagname",
"==",
"'#text'",
":",
"continue",
"if",
"node",
".",
"hasattr",
"(",
"'refuri'",
")",
":",
"uri",
"=",
"node",
"[",
"'refuri'",
"]",
"elif",
"node",
".",
"hasattr",
"(",
"'uri'",
")",
":",
"uri",
"=",
"node",
"[",
"'uri'",
"]",
"else",
":",
"continue",
"o",
"=",
"urlparse",
".",
"urlparse",
"(",
"uri",
")",
"if",
"o",
".",
"scheme",
"not",
"in",
"ALLOWED_SCHEMES",
":",
"raise",
"TransformError",
"(",
"'link scheme not allowed'",
")",
"# now turn the transformed document into HTML",
"reader",
"=",
"readers",
".",
"doctree",
".",
"Reader",
"(",
"parser_name",
"=",
"'null'",
")",
"pub",
"=",
"Publisher",
"(",
"reader",
",",
"source",
"=",
"io",
".",
"DocTreeInput",
"(",
"document",
")",
",",
"destination_class",
"=",
"io",
".",
"StringOutput",
")",
"pub",
".",
"set_writer",
"(",
"'html'",
")",
"pub",
".",
"process_programmatic_settings",
"(",
"None",
",",
"settings_overrides",
",",
"None",
")",
"pub",
".",
"set_destination",
"(",
"None",
",",
"None",
")",
"pub",
".",
"publish",
"(",
")",
"parts",
"=",
"pub",
".",
"writer",
".",
"parts",
"except",
":",
"pass",
"sys",
".",
"stderr",
"=",
"old_stderr",
"# original text if publishing errors occur",
"if",
"parts",
"is",
"None",
"or",
"len",
"(",
"s",
".",
"getvalue",
"(",
")",
")",
">",
"0",
":",
"return",
"None",
"else",
":",
"return",
"parts",
"[",
"'body'",
"]"
] | Copied (and slightly adapted) from pypi.description_tools | [
"Copied",
"(",
"and",
"slightly",
"adapted",
")",
"from",
"pypi",
".",
"description_tools"
] | 64248e46805cbe75491933e9f75e5eb9961cf04e | https://github.com/dstufft/recliner/blob/64248e46805cbe75491933e9f75e5eb9961cf04e/tools/check_render.py#L19-L76 | valid |
suryakencana007/baka_model | baka_model/model/pubid.py | generate | def generate(length=DEFAULT_LENGTH):
"""
Generate a random string of the specified length.
The returned string is composed of an alphabet that shouldn't include any
characters that are easily mistakeable for one another (I, 1, O, 0), and
hopefully won't accidentally contain any English-language curse words.
"""
return ''.join(random.SystemRandom().choice(ALPHABET)
for _ in range(length)) | python | def generate(length=DEFAULT_LENGTH):
"""
Generate a random string of the specified length.
The returned string is composed of an alphabet that shouldn't include any
characters that are easily mistakeable for one another (I, 1, O, 0), and
hopefully won't accidentally contain any English-language curse words.
"""
return ''.join(random.SystemRandom().choice(ALPHABET)
for _ in range(length)) | [
"def",
"generate",
"(",
"length",
"=",
"DEFAULT_LENGTH",
")",
":",
"return",
"''",
".",
"join",
"(",
"random",
".",
"SystemRandom",
"(",
")",
".",
"choice",
"(",
"ALPHABET",
")",
"for",
"_",
"in",
"range",
"(",
"length",
")",
")"
] | Generate a random string of the specified length.
The returned string is composed of an alphabet that shouldn't include any
characters that are easily mistakeable for one another (I, 1, O, 0), and
hopefully won't accidentally contain any English-language curse words. | [
"Generate",
"a",
"random",
"string",
"of",
"the",
"specified",
"length",
"."
] | 915c2da9920e973302f5764ae63799acd5ecf0b7 | https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/pubid.py#L36-L45 | valid |
nyaruka/python-librato-bg | librato_bg/client.py | require | def require(name, field, data_type):
"""Require that the named `field` has the right `data_type`"""
if not isinstance(field, data_type):
msg = '{0} must have {1}, got: {2}'.format(name, data_type, field)
raise AssertionError(msg) | python | def require(name, field, data_type):
"""Require that the named `field` has the right `data_type`"""
if not isinstance(field, data_type):
msg = '{0} must have {1}, got: {2}'.format(name, data_type, field)
raise AssertionError(msg) | [
"def",
"require",
"(",
"name",
",",
"field",
",",
"data_type",
")",
":",
"if",
"not",
"isinstance",
"(",
"field",
",",
"data_type",
")",
":",
"msg",
"=",
"'{0} must have {1}, got: {2}'",
".",
"format",
"(",
"name",
",",
"data_type",
",",
"field",
")",
"raise",
"AssertionError",
"(",
"msg",
")"
] | Require that the named `field` has the right `data_type` | [
"Require",
"that",
"the",
"named",
"field",
"has",
"the",
"right",
"data_type"
] | e541092838694de31d256becea8391a9cfe086c7 | https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L63-L67 | valid |
nyaruka/python-librato-bg | librato_bg/client.py | Client._enqueue | def _enqueue(self, msg):
"""Push a new `msg` onto the queue, return `(success, msg)`"""
self.log.debug('queueing: %s', msg)
if self.queue.full():
self.log.warn('librato_bg queue is full')
return False, msg
self.queue.put(msg)
self.log.debug('enqueued %s.', msg)
return True, msg | python | def _enqueue(self, msg):
"""Push a new `msg` onto the queue, return `(success, msg)`"""
self.log.debug('queueing: %s', msg)
if self.queue.full():
self.log.warn('librato_bg queue is full')
return False, msg
self.queue.put(msg)
self.log.debug('enqueued %s.', msg)
return True, msg | [
"def",
"_enqueue",
"(",
"self",
",",
"msg",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'queueing: %s'",
",",
"msg",
")",
"if",
"self",
".",
"queue",
".",
"full",
"(",
")",
":",
"self",
".",
"log",
".",
"warn",
"(",
"'librato_bg queue is full'",
")",
"return",
"False",
",",
"msg",
"self",
".",
"queue",
".",
"put",
"(",
"msg",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'enqueued %s.'",
",",
"msg",
")",
"return",
"True",
",",
"msg"
] | Push a new `msg` onto the queue, return `(success, msg)` | [
"Push",
"a",
"new",
"msg",
"onto",
"the",
"queue",
"return",
"(",
"success",
"msg",
")"
] | e541092838694de31d256becea8391a9cfe086c7 | https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L38-L48 | valid |
nyaruka/python-librato-bg | librato_bg/client.py | Client.flush | def flush(self):
"""Forces a flush from the internal queue to the server"""
queue = self.queue
size = queue.qsize()
queue.join()
self.log.debug('successfully flushed %s items.', size) | python | def flush(self):
"""Forces a flush from the internal queue to the server"""
queue = self.queue
size = queue.qsize()
queue.join()
self.log.debug('successfully flushed %s items.', size) | [
"def",
"flush",
"(",
"self",
")",
":",
"queue",
"=",
"self",
".",
"queue",
"size",
"=",
"queue",
".",
"qsize",
"(",
")",
"queue",
".",
"join",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'successfully flushed %s items.'",
",",
"size",
")"
] | Forces a flush from the internal queue to the server | [
"Forces",
"a",
"flush",
"from",
"the",
"internal",
"queue",
"to",
"the",
"server"
] | e541092838694de31d256becea8391a9cfe086c7 | https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L50-L55 | valid |
cecton/destream | destream/guesser.py | open | def open(name=None, fileobj=None, closefd=True):
"""
Use all decompressor possible to make the stream
"""
return Guesser().open(name=name, fileobj=fileobj, closefd=closefd) | python | def open(name=None, fileobj=None, closefd=True):
"""
Use all decompressor possible to make the stream
"""
return Guesser().open(name=name, fileobj=fileobj, closefd=closefd) | [
"def",
"open",
"(",
"name",
"=",
"None",
",",
"fileobj",
"=",
"None",
",",
"closefd",
"=",
"True",
")",
":",
"return",
"Guesser",
"(",
")",
".",
"open",
"(",
"name",
"=",
"name",
",",
"fileobj",
"=",
"fileobj",
",",
"closefd",
"=",
"closefd",
")"
] | Use all decompressor possible to make the stream | [
"Use",
"all",
"decompressor",
"possible",
"to",
"make",
"the",
"stream"
] | a9e12b4ac7d41bcd9af54a820c235d77a68a9b8c | https://github.com/cecton/destream/blob/a9e12b4ac7d41bcd9af54a820c235d77a68a9b8c/destream/guesser.py#L46-L50 | valid |
ternaris/marv-cli | marv_cli/__init__.py | marv | def marv(ctx, config, loglevel, logfilter, verbosity):
"""Manage a Marv site"""
if config is None:
cwd = os.path.abspath(os.path.curdir)
while cwd != os.path.sep:
config = os.path.join(cwd, 'marv.conf')
if os.path.exists(config):
break
cwd = os.path.dirname(cwd)
else:
config = '/etc/marv/marv.conf'
if not os.path.exists(config):
config = None
ctx.obj = config
setup_logging(loglevel, verbosity, logfilter) | python | def marv(ctx, config, loglevel, logfilter, verbosity):
"""Manage a Marv site"""
if config is None:
cwd = os.path.abspath(os.path.curdir)
while cwd != os.path.sep:
config = os.path.join(cwd, 'marv.conf')
if os.path.exists(config):
break
cwd = os.path.dirname(cwd)
else:
config = '/etc/marv/marv.conf'
if not os.path.exists(config):
config = None
ctx.obj = config
setup_logging(loglevel, verbosity, logfilter) | [
"def",
"marv",
"(",
"ctx",
",",
"config",
",",
"loglevel",
",",
"logfilter",
",",
"verbosity",
")",
":",
"if",
"config",
"is",
"None",
":",
"cwd",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"curdir",
")",
"while",
"cwd",
"!=",
"os",
".",
"path",
".",
"sep",
":",
"config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cwd",
",",
"'marv.conf'",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"config",
")",
":",
"break",
"cwd",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"cwd",
")",
"else",
":",
"config",
"=",
"'/etc/marv/marv.conf'",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"config",
")",
":",
"config",
"=",
"None",
"ctx",
".",
"obj",
"=",
"config",
"setup_logging",
"(",
"loglevel",
",",
"verbosity",
",",
"logfilter",
")"
] | Manage a Marv site | [
"Manage",
"a",
"Marv",
"site"
] | c06abf4f527c22035dd3b602849f6906877c6e68 | https://github.com/ternaris/marv-cli/blob/c06abf4f527c22035dd3b602849f6906877c6e68/marv_cli/__init__.py#L110-L124 | valid |
universalcore/unicore-cms | cms/__init__.py | main | def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
set_cache_regions_from_settings(settings)
config = Configurator(settings=settings)
config.include('cms')
config.configure_celery(global_config['__file__'])
return config.make_wsgi_app() | python | def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
set_cache_regions_from_settings(settings)
config = Configurator(settings=settings)
config.include('cms')
config.configure_celery(global_config['__file__'])
return config.make_wsgi_app() | [
"def",
"main",
"(",
"global_config",
",",
"*",
"*",
"settings",
")",
":",
"set_cache_regions_from_settings",
"(",
"settings",
")",
"config",
"=",
"Configurator",
"(",
"settings",
"=",
"settings",
")",
"config",
".",
"include",
"(",
"'cms'",
")",
"config",
".",
"configure_celery",
"(",
"global_config",
"[",
"'__file__'",
"]",
")",
"return",
"config",
".",
"make_wsgi_app",
"(",
")"
] | This function returns a Pyramid WSGI application. | [
"This",
"function",
"returns",
"a",
"Pyramid",
"WSGI",
"application",
"."
] | f68385fe742eb7efcce0d8f04f42f26ccf05d624 | https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/__init__.py#L30-L37 | valid |
ibelie/typy | typy/google/protobuf/internal/decoder.py | _SignedVarintDecoder | def _SignedVarintDecoder(mask, result_type):
"""Like _VarintDecoder() but decodes signed values."""
def DecodeVarint(buffer, pos):
result = 0
shift = 0
while 1:
b = six.indexbytes(buffer, pos)
result |= ((b & 0x7f) << shift)
pos += 1
if not (b & 0x80):
if result > 0x7fffffffffffffff:
result -= (1 << 64)
result |= ~mask
else:
result &= mask
result = result_type(result)
return (result, pos)
shift += 7
if shift >= 64:
raise _DecodeError('Too many bytes when decoding varint.')
return DecodeVarint | python | def _SignedVarintDecoder(mask, result_type):
"""Like _VarintDecoder() but decodes signed values."""
def DecodeVarint(buffer, pos):
result = 0
shift = 0
while 1:
b = six.indexbytes(buffer, pos)
result |= ((b & 0x7f) << shift)
pos += 1
if not (b & 0x80):
if result > 0x7fffffffffffffff:
result -= (1 << 64)
result |= ~mask
else:
result &= mask
result = result_type(result)
return (result, pos)
shift += 7
if shift >= 64:
raise _DecodeError('Too many bytes when decoding varint.')
return DecodeVarint | [
"def",
"_SignedVarintDecoder",
"(",
"mask",
",",
"result_type",
")",
":",
"def",
"DecodeVarint",
"(",
"buffer",
",",
"pos",
")",
":",
"result",
"=",
"0",
"shift",
"=",
"0",
"while",
"1",
":",
"b",
"=",
"six",
".",
"indexbytes",
"(",
"buffer",
",",
"pos",
")",
"result",
"|=",
"(",
"(",
"b",
"&",
"0x7f",
")",
"<<",
"shift",
")",
"pos",
"+=",
"1",
"if",
"not",
"(",
"b",
"&",
"0x80",
")",
":",
"if",
"result",
">",
"0x7fffffffffffffff",
":",
"result",
"-=",
"(",
"1",
"<<",
"64",
")",
"result",
"|=",
"~",
"mask",
"else",
":",
"result",
"&=",
"mask",
"result",
"=",
"result_type",
"(",
"result",
")",
"return",
"(",
"result",
",",
"pos",
")",
"shift",
"+=",
"7",
"if",
"shift",
">=",
"64",
":",
"raise",
"_DecodeError",
"(",
"'Too many bytes when decoding varint.'",
")",
"return",
"DecodeVarint"
] | Like _VarintDecoder() but decodes signed values. | [
"Like",
"_VarintDecoder",
"()",
"but",
"decodes",
"signed",
"values",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/decoder.py#L134-L155 | valid |
ibelie/typy | typy/google/protobuf/internal/decoder.py | MessageSetItemDecoder | def MessageSetItemDecoder(extensions_by_number):
"""Returns a decoder for a MessageSet item.
The parameter is the _extensions_by_number map for the message class.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_ReadTag = ReadTag
local_DecodeVarint = _DecodeVarint
local_SkipField = SkipField
def DecodeItem(buffer, pos, end, message, field_dict):
message_set_item_start = pos
type_id = -1
message_start = -1
message_end = -1
# Technically, type_id and message can appear in any order, so we need
# a little loop here.
while 1:
(tag_bytes, pos) = local_ReadTag(buffer, pos)
if tag_bytes == type_id_tag_bytes:
(type_id, pos) = local_DecodeVarint(buffer, pos)
elif tag_bytes == message_tag_bytes:
(size, message_start) = local_DecodeVarint(buffer, pos)
pos = message_end = message_start + size
elif tag_bytes == item_end_tag_bytes:
break
else:
pos = SkipField(buffer, pos, end, tag_bytes)
if pos == -1:
raise _DecodeError('Missing group end tag.')
if pos > end:
raise _DecodeError('Truncated message.')
if type_id == -1:
raise _DecodeError('MessageSet item missing type_id.')
if message_start == -1:
raise _DecodeError('MessageSet item missing message.')
extension = extensions_by_number.get(type_id)
if extension is not None:
value = field_dict.get(extension)
if value is None:
value = field_dict.setdefault(
extension, extension.message_type._concrete_class())
if value._InternalParse(buffer, message_start,message_end) != message_end:
# The only reason _InternalParse would return early is if it encountered
# an end-group tag.
raise _DecodeError('Unexpected end-group tag.')
else:
if not message._unknown_fields:
message._unknown_fields = []
message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
buffer[message_set_item_start:pos]))
return pos
return DecodeItem | python | def MessageSetItemDecoder(extensions_by_number):
"""Returns a decoder for a MessageSet item.
The parameter is the _extensions_by_number map for the message class.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_ReadTag = ReadTag
local_DecodeVarint = _DecodeVarint
local_SkipField = SkipField
def DecodeItem(buffer, pos, end, message, field_dict):
message_set_item_start = pos
type_id = -1
message_start = -1
message_end = -1
# Technically, type_id and message can appear in any order, so we need
# a little loop here.
while 1:
(tag_bytes, pos) = local_ReadTag(buffer, pos)
if tag_bytes == type_id_tag_bytes:
(type_id, pos) = local_DecodeVarint(buffer, pos)
elif tag_bytes == message_tag_bytes:
(size, message_start) = local_DecodeVarint(buffer, pos)
pos = message_end = message_start + size
elif tag_bytes == item_end_tag_bytes:
break
else:
pos = SkipField(buffer, pos, end, tag_bytes)
if pos == -1:
raise _DecodeError('Missing group end tag.')
if pos > end:
raise _DecodeError('Truncated message.')
if type_id == -1:
raise _DecodeError('MessageSet item missing type_id.')
if message_start == -1:
raise _DecodeError('MessageSet item missing message.')
extension = extensions_by_number.get(type_id)
if extension is not None:
value = field_dict.get(extension)
if value is None:
value = field_dict.setdefault(
extension, extension.message_type._concrete_class())
if value._InternalParse(buffer, message_start,message_end) != message_end:
# The only reason _InternalParse would return early is if it encountered
# an end-group tag.
raise _DecodeError('Unexpected end-group tag.')
else:
if not message._unknown_fields:
message._unknown_fields = []
message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
buffer[message_set_item_start:pos]))
return pos
return DecodeItem | [
"def",
"MessageSetItemDecoder",
"(",
"extensions_by_number",
")",
":",
"type_id_tag_bytes",
"=",
"encoder",
".",
"TagBytes",
"(",
"2",
",",
"wire_format",
".",
"WIRETYPE_VARINT",
")",
"message_tag_bytes",
"=",
"encoder",
".",
"TagBytes",
"(",
"3",
",",
"wire_format",
".",
"WIRETYPE_LENGTH_DELIMITED",
")",
"item_end_tag_bytes",
"=",
"encoder",
".",
"TagBytes",
"(",
"1",
",",
"wire_format",
".",
"WIRETYPE_END_GROUP",
")",
"local_ReadTag",
"=",
"ReadTag",
"local_DecodeVarint",
"=",
"_DecodeVarint",
"local_SkipField",
"=",
"SkipField",
"def",
"DecodeItem",
"(",
"buffer",
",",
"pos",
",",
"end",
",",
"message",
",",
"field_dict",
")",
":",
"message_set_item_start",
"=",
"pos",
"type_id",
"=",
"-",
"1",
"message_start",
"=",
"-",
"1",
"message_end",
"=",
"-",
"1",
"# Technically, type_id and message can appear in any order, so we need",
"# a little loop here.",
"while",
"1",
":",
"(",
"tag_bytes",
",",
"pos",
")",
"=",
"local_ReadTag",
"(",
"buffer",
",",
"pos",
")",
"if",
"tag_bytes",
"==",
"type_id_tag_bytes",
":",
"(",
"type_id",
",",
"pos",
")",
"=",
"local_DecodeVarint",
"(",
"buffer",
",",
"pos",
")",
"elif",
"tag_bytes",
"==",
"message_tag_bytes",
":",
"(",
"size",
",",
"message_start",
")",
"=",
"local_DecodeVarint",
"(",
"buffer",
",",
"pos",
")",
"pos",
"=",
"message_end",
"=",
"message_start",
"+",
"size",
"elif",
"tag_bytes",
"==",
"item_end_tag_bytes",
":",
"break",
"else",
":",
"pos",
"=",
"SkipField",
"(",
"buffer",
",",
"pos",
",",
"end",
",",
"tag_bytes",
")",
"if",
"pos",
"==",
"-",
"1",
":",
"raise",
"_DecodeError",
"(",
"'Missing group end tag.'",
")",
"if",
"pos",
">",
"end",
":",
"raise",
"_DecodeError",
"(",
"'Truncated message.'",
")",
"if",
"type_id",
"==",
"-",
"1",
":",
"raise",
"_DecodeError",
"(",
"'MessageSet item missing type_id.'",
")",
"if",
"message_start",
"==",
"-",
"1",
":",
"raise",
"_DecodeError",
"(",
"'MessageSet item missing message.'",
")",
"extension",
"=",
"extensions_by_number",
".",
"get",
"(",
"type_id",
")",
"if",
"extension",
"is",
"not",
"None",
":",
"value",
"=",
"field_dict",
".",
"get",
"(",
"extension",
")",
"if",
"value",
"is",
"None",
":",
"value",
"=",
"field_dict",
".",
"setdefault",
"(",
"extension",
",",
"extension",
".",
"message_type",
".",
"_concrete_class",
"(",
")",
")",
"if",
"value",
".",
"_InternalParse",
"(",
"buffer",
",",
"message_start",
",",
"message_end",
")",
"!=",
"message_end",
":",
"# The only reason _InternalParse would return early is if it encountered",
"# an end-group tag.",
"raise",
"_DecodeError",
"(",
"'Unexpected end-group tag.'",
")",
"else",
":",
"if",
"not",
"message",
".",
"_unknown_fields",
":",
"message",
".",
"_unknown_fields",
"=",
"[",
"]",
"message",
".",
"_unknown_fields",
".",
"append",
"(",
"(",
"MESSAGE_SET_ITEM_TAG",
",",
"buffer",
"[",
"message_set_item_start",
":",
"pos",
"]",
")",
")",
"return",
"pos",
"return",
"DecodeItem"
] | Returns a decoder for a MessageSet item.
The parameter is the _extensions_by_number map for the message class.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
} | [
"Returns",
"a",
"decoder",
"for",
"a",
"MessageSet",
"item",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/decoder.py#L645-L715 | valid |
sivel/happymongo | happymongo/__init__.py | get_app_name | def get_app_name():
"""Flask like implementation of getting the applicaiton name via
the filename of the including file
"""
fn = getattr(sys.modules['__main__'], '__file__', None)
if fn is None:
return '__main__'
return os.path.splitext(os.path.basename(fn))[0] | python | def get_app_name():
"""Flask like implementation of getting the applicaiton name via
the filename of the including file
"""
fn = getattr(sys.modules['__main__'], '__file__', None)
if fn is None:
return '__main__'
return os.path.splitext(os.path.basename(fn))[0] | [
"def",
"get_app_name",
"(",
")",
":",
"fn",
"=",
"getattr",
"(",
"sys",
".",
"modules",
"[",
"'__main__'",
"]",
",",
"'__file__'",
",",
"None",
")",
"if",
"fn",
"is",
"None",
":",
"return",
"'__main__'",
"return",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"fn",
")",
")",
"[",
"0",
"]"
] | Flask like implementation of getting the applicaiton name via
the filename of the including file | [
"Flask",
"like",
"implementation",
"of",
"getting",
"the",
"applicaiton",
"name",
"via",
"the",
"filename",
"of",
"the",
"including",
"file"
] | 05831465ef9b88210a67d00c35b37d7f114c6a63 | https://github.com/sivel/happymongo/blob/05831465ef9b88210a67d00c35b37d7f114c6a63/happymongo/__init__.py#L33-L41 | valid |
zvoase/django-relax | relax/viewserver.py | get_function | def get_function(function_name):
"""
Given a Python function name, return the function it refers to.
"""
module, basename = str(function_name).rsplit('.', 1)
try:
return getattr(__import__(module, fromlist=[basename]), basename)
except (ImportError, AttributeError):
raise FunctionNotFound(function_name) | python | def get_function(function_name):
"""
Given a Python function name, return the function it refers to.
"""
module, basename = str(function_name).rsplit('.', 1)
try:
return getattr(__import__(module, fromlist=[basename]), basename)
except (ImportError, AttributeError):
raise FunctionNotFound(function_name) | [
"def",
"get_function",
"(",
"function_name",
")",
":",
"module",
",",
"basename",
"=",
"str",
"(",
"function_name",
")",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"try",
":",
"return",
"getattr",
"(",
"__import__",
"(",
"module",
",",
"fromlist",
"=",
"[",
"basename",
"]",
")",
",",
"basename",
")",
"except",
"(",
"ImportError",
",",
"AttributeError",
")",
":",
"raise",
"FunctionNotFound",
"(",
"function_name",
")"
] | Given a Python function name, return the function it refers to. | [
"Given",
"a",
"Python",
"function",
"name",
"return",
"the",
"function",
"it",
"refers",
"to",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L16-L24 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle_add_fun | def handle_add_fun(self, function_name):
"""Add a function to the function list, in order."""
function_name = function_name.strip()
try:
function = get_function(function_name)
except Exception, exc:
self.wfile.write(js_error(exc) + NEWLINE)
return
# This tests to see if the function has been decorated with the view
# server synchronisation decorator (``decorate_view``).
if not getattr(function, 'view_decorated', None):
self.functions[function_name] = (self.function_counter, function)
# The decorator gets called with the logger function.
else:
self.functions[function_name] = (self.function_counter,
function(self.log))
self.function_counter += 1
return True | python | def handle_add_fun(self, function_name):
"""Add a function to the function list, in order."""
function_name = function_name.strip()
try:
function = get_function(function_name)
except Exception, exc:
self.wfile.write(js_error(exc) + NEWLINE)
return
# This tests to see if the function has been decorated with the view
# server synchronisation decorator (``decorate_view``).
if not getattr(function, 'view_decorated', None):
self.functions[function_name] = (self.function_counter, function)
# The decorator gets called with the logger function.
else:
self.functions[function_name] = (self.function_counter,
function(self.log))
self.function_counter += 1
return True | [
"def",
"handle_add_fun",
"(",
"self",
",",
"function_name",
")",
":",
"function_name",
"=",
"function_name",
".",
"strip",
"(",
")",
"try",
":",
"function",
"=",
"get_function",
"(",
"function_name",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"wfile",
".",
"write",
"(",
"js_error",
"(",
"exc",
")",
"+",
"NEWLINE",
")",
"return",
"# This tests to see if the function has been decorated with the view",
"# server synchronisation decorator (``decorate_view``).",
"if",
"not",
"getattr",
"(",
"function",
",",
"'view_decorated'",
",",
"None",
")",
":",
"self",
".",
"functions",
"[",
"function_name",
"]",
"=",
"(",
"self",
".",
"function_counter",
",",
"function",
")",
"# The decorator gets called with the logger function.",
"else",
":",
"self",
".",
"functions",
"[",
"function_name",
"]",
"=",
"(",
"self",
".",
"function_counter",
",",
"function",
"(",
"self",
".",
"log",
")",
")",
"self",
".",
"function_counter",
"+=",
"1",
"return",
"True"
] | Add a function to the function list, in order. | [
"Add",
"a",
"function",
"to",
"the",
"function",
"list",
"in",
"order",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L63-L80 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle_map_doc | def handle_map_doc(self, document):
"""Return the mapping of a document according to the function list."""
# This uses the stored set of functions, sorted by order of addition.
for function in sorted(self.functions.values(), key=lambda x: x[0]):
try:
# It has to be run through ``list``, because it may be a
# generator function.
yield [list(function(document))]
except Exception, exc:
# Otherwise, return an empty list and log the event.
yield []
self.log(repr(exc)) | python | def handle_map_doc(self, document):
"""Return the mapping of a document according to the function list."""
# This uses the stored set of functions, sorted by order of addition.
for function in sorted(self.functions.values(), key=lambda x: x[0]):
try:
# It has to be run through ``list``, because it may be a
# generator function.
yield [list(function(document))]
except Exception, exc:
# Otherwise, return an empty list and log the event.
yield []
self.log(repr(exc)) | [
"def",
"handle_map_doc",
"(",
"self",
",",
"document",
")",
":",
"# This uses the stored set of functions, sorted by order of addition.",
"for",
"function",
"in",
"sorted",
"(",
"self",
".",
"functions",
".",
"values",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")",
":",
"try",
":",
"# It has to be run through ``list``, because it may be a",
"# generator function.",
"yield",
"[",
"list",
"(",
"function",
"(",
"document",
")",
")",
"]",
"except",
"Exception",
",",
"exc",
":",
"# Otherwise, return an empty list and log the event.",
"yield",
"[",
"]",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")"
] | Return the mapping of a document according to the function list. | [
"Return",
"the",
"mapping",
"of",
"a",
"document",
"according",
"to",
"the",
"function",
"list",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L83-L94 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle_reduce | def handle_reduce(self, reduce_function_names, mapped_docs):
"""Reduce several mapped documents by several reduction functions."""
reduce_functions = []
# This gets a large list of reduction functions, given their names.
for reduce_function_name in reduce_function_names:
try:
reduce_function = get_function(reduce_function_name)
if getattr(reduce_function, 'view_decorated', None):
reduce_function = reduce_function(self.log)
reduce_functions.append(reduce_function)
except Exception, exc:
self.log(repr(exc))
reduce_functions.append(lambda *args, **kwargs: None)
# Transform lots of (key, value) pairs into one (keys, values) pair.
keys, values = zip(
(key, value) for ((key, doc_id), value) in mapped_docs)
# This gets the list of results from the reduction functions.
results = []
for reduce_function in reduce_functions:
try:
results.append(reduce_function(keys, values, rereduce=False))
except Exception, exc:
self.log(repr(exc))
results.append(None)
return [True, results] | python | def handle_reduce(self, reduce_function_names, mapped_docs):
"""Reduce several mapped documents by several reduction functions."""
reduce_functions = []
# This gets a large list of reduction functions, given their names.
for reduce_function_name in reduce_function_names:
try:
reduce_function = get_function(reduce_function_name)
if getattr(reduce_function, 'view_decorated', None):
reduce_function = reduce_function(self.log)
reduce_functions.append(reduce_function)
except Exception, exc:
self.log(repr(exc))
reduce_functions.append(lambda *args, **kwargs: None)
# Transform lots of (key, value) pairs into one (keys, values) pair.
keys, values = zip(
(key, value) for ((key, doc_id), value) in mapped_docs)
# This gets the list of results from the reduction functions.
results = []
for reduce_function in reduce_functions:
try:
results.append(reduce_function(keys, values, rereduce=False))
except Exception, exc:
self.log(repr(exc))
results.append(None)
return [True, results] | [
"def",
"handle_reduce",
"(",
"self",
",",
"reduce_function_names",
",",
"mapped_docs",
")",
":",
"reduce_functions",
"=",
"[",
"]",
"# This gets a large list of reduction functions, given their names.",
"for",
"reduce_function_name",
"in",
"reduce_function_names",
":",
"try",
":",
"reduce_function",
"=",
"get_function",
"(",
"reduce_function_name",
")",
"if",
"getattr",
"(",
"reduce_function",
",",
"'view_decorated'",
",",
"None",
")",
":",
"reduce_function",
"=",
"reduce_function",
"(",
"self",
".",
"log",
")",
"reduce_functions",
".",
"append",
"(",
"reduce_function",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"reduce_functions",
".",
"append",
"(",
"lambda",
"*",
"args",
",",
"*",
"*",
"kwargs",
":",
"None",
")",
"# Transform lots of (key, value) pairs into one (keys, values) pair.",
"keys",
",",
"values",
"=",
"zip",
"(",
"(",
"key",
",",
"value",
")",
"for",
"(",
"(",
"key",
",",
"doc_id",
")",
",",
"value",
")",
"in",
"mapped_docs",
")",
"# This gets the list of results from the reduction functions.",
"results",
"=",
"[",
"]",
"for",
"reduce_function",
"in",
"reduce_functions",
":",
"try",
":",
"results",
".",
"append",
"(",
"reduce_function",
"(",
"keys",
",",
"values",
",",
"rereduce",
"=",
"False",
")",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"results",
".",
"append",
"(",
"None",
")",
"return",
"[",
"True",
",",
"results",
"]"
] | Reduce several mapped documents by several reduction functions. | [
"Reduce",
"several",
"mapped",
"documents",
"by",
"several",
"reduction",
"functions",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L96-L120 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle_rereduce | def handle_rereduce(self, reduce_function_names, values):
"""Re-reduce a set of values, with a list of rereduction functions."""
# This gets a large list of reduction functions, given their names.
reduce_functions = []
for reduce_function_name in reduce_function_names:
try:
reduce_function = get_function(reduce_function_name)
if getattr(reduce_function, 'view_decorated', None):
reduce_function = reduce_function(self.log)
reduce_functions.append(reduce_function)
except Exception, exc:
self.log(repr(exc))
reduce_functions.append(lambda *args, **kwargs: None)
# This gets the list of results from those functions.
results = []
for reduce_function in reduce_functions:
try:
results.append(reduce_function(None, values, rereduce=True))
except Exception, exc:
self.log(repr(exc))
results.append(None)
return [True, results] | python | def handle_rereduce(self, reduce_function_names, values):
"""Re-reduce a set of values, with a list of rereduction functions."""
# This gets a large list of reduction functions, given their names.
reduce_functions = []
for reduce_function_name in reduce_function_names:
try:
reduce_function = get_function(reduce_function_name)
if getattr(reduce_function, 'view_decorated', None):
reduce_function = reduce_function(self.log)
reduce_functions.append(reduce_function)
except Exception, exc:
self.log(repr(exc))
reduce_functions.append(lambda *args, **kwargs: None)
# This gets the list of results from those functions.
results = []
for reduce_function in reduce_functions:
try:
results.append(reduce_function(None, values, rereduce=True))
except Exception, exc:
self.log(repr(exc))
results.append(None)
return [True, results] | [
"def",
"handle_rereduce",
"(",
"self",
",",
"reduce_function_names",
",",
"values",
")",
":",
"# This gets a large list of reduction functions, given their names.",
"reduce_functions",
"=",
"[",
"]",
"for",
"reduce_function_name",
"in",
"reduce_function_names",
":",
"try",
":",
"reduce_function",
"=",
"get_function",
"(",
"reduce_function_name",
")",
"if",
"getattr",
"(",
"reduce_function",
",",
"'view_decorated'",
",",
"None",
")",
":",
"reduce_function",
"=",
"reduce_function",
"(",
"self",
".",
"log",
")",
"reduce_functions",
".",
"append",
"(",
"reduce_function",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"reduce_functions",
".",
"append",
"(",
"lambda",
"*",
"args",
",",
"*",
"*",
"kwargs",
":",
"None",
")",
"# This gets the list of results from those functions.",
"results",
"=",
"[",
"]",
"for",
"reduce_function",
"in",
"reduce_functions",
":",
"try",
":",
"results",
".",
"append",
"(",
"reduce_function",
"(",
"None",
",",
"values",
",",
"rereduce",
"=",
"True",
")",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"results",
".",
"append",
"(",
"None",
")",
"return",
"[",
"True",
",",
"results",
"]"
] | Re-reduce a set of values, with a list of rereduction functions. | [
"Re",
"-",
"reduce",
"a",
"set",
"of",
"values",
"with",
"a",
"list",
"of",
"rereduction",
"functions",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L122-L143 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle_validate | def handle_validate(self, function_name, new_doc, old_doc, user_ctx):
"""Validate...this function is undocumented, but still in CouchDB."""
try:
function = get_function(function_name)
except Exception, exc:
self.log(repr(exc))
return False
try:
return function(new_doc, old_doc, user_ctx)
except Exception, exc:
self.log(repr(exc))
return repr(exc) | python | def handle_validate(self, function_name, new_doc, old_doc, user_ctx):
"""Validate...this function is undocumented, but still in CouchDB."""
try:
function = get_function(function_name)
except Exception, exc:
self.log(repr(exc))
return False
try:
return function(new_doc, old_doc, user_ctx)
except Exception, exc:
self.log(repr(exc))
return repr(exc) | [
"def",
"handle_validate",
"(",
"self",
",",
"function_name",
",",
"new_doc",
",",
"old_doc",
",",
"user_ctx",
")",
":",
"try",
":",
"function",
"=",
"get_function",
"(",
"function_name",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"return",
"False",
"try",
":",
"return",
"function",
"(",
"new_doc",
",",
"old_doc",
",",
"user_ctx",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"log",
"(",
"repr",
"(",
"exc",
")",
")",
"return",
"repr",
"(",
"exc",
")"
] | Validate...this function is undocumented, but still in CouchDB. | [
"Validate",
"...",
"this",
"function",
"is",
"undocumented",
"but",
"still",
"in",
"CouchDB",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L145-L156 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.handle | def handle(self):
"""The main function called to handle a request."""
while True:
try:
line = self.rfile.readline()
try:
# All input data are lines of JSON like the following:
# ["<cmd_name>" "<cmd_arg1>" "<cmd_arg2>" ...]
# So I handle this by dispatching to various methods.
cmd = json.loads(line)
except Exception, exc:
# Sometimes errors come up. Once again, I can't predict
# anything, but can at least tell CouchDB about the error.
self.wfile.write(repr(exc) + NEWLINE)
continue
else:
# Automagically get the command handler.
handler = getattr(self, 'handle_' + cmd[0], None)
if not handler:
# We are ready to not find commands. It probably won't
# happen, but fortune favours the prepared.
self.wfile.write(
repr(CommandNotFound(cmd[0])) + NEWLINE)
continue
return_value = handler(*cmd[1:])
if not return_value:
continue
# We write the output back to CouchDB.
self.wfile.write(
one_lineify(json.dumps(return_value)) + NEWLINE)
except Exception, exc:
self.wfile.write(repr(exc) + NEWLINE)
continue | python | def handle(self):
"""The main function called to handle a request."""
while True:
try:
line = self.rfile.readline()
try:
# All input data are lines of JSON like the following:
# ["<cmd_name>" "<cmd_arg1>" "<cmd_arg2>" ...]
# So I handle this by dispatching to various methods.
cmd = json.loads(line)
except Exception, exc:
# Sometimes errors come up. Once again, I can't predict
# anything, but can at least tell CouchDB about the error.
self.wfile.write(repr(exc) + NEWLINE)
continue
else:
# Automagically get the command handler.
handler = getattr(self, 'handle_' + cmd[0], None)
if not handler:
# We are ready to not find commands. It probably won't
# happen, but fortune favours the prepared.
self.wfile.write(
repr(CommandNotFound(cmd[0])) + NEWLINE)
continue
return_value = handler(*cmd[1:])
if not return_value:
continue
# We write the output back to CouchDB.
self.wfile.write(
one_lineify(json.dumps(return_value)) + NEWLINE)
except Exception, exc:
self.wfile.write(repr(exc) + NEWLINE)
continue | [
"def",
"handle",
"(",
"self",
")",
":",
"while",
"True",
":",
"try",
":",
"line",
"=",
"self",
".",
"rfile",
".",
"readline",
"(",
")",
"try",
":",
"# All input data are lines of JSON like the following:",
"# [\"<cmd_name>\" \"<cmd_arg1>\" \"<cmd_arg2>\" ...]",
"# So I handle this by dispatching to various methods.",
"cmd",
"=",
"json",
".",
"loads",
"(",
"line",
")",
"except",
"Exception",
",",
"exc",
":",
"# Sometimes errors come up. Once again, I can't predict",
"# anything, but can at least tell CouchDB about the error.",
"self",
".",
"wfile",
".",
"write",
"(",
"repr",
"(",
"exc",
")",
"+",
"NEWLINE",
")",
"continue",
"else",
":",
"# Automagically get the command handler.",
"handler",
"=",
"getattr",
"(",
"self",
",",
"'handle_'",
"+",
"cmd",
"[",
"0",
"]",
",",
"None",
")",
"if",
"not",
"handler",
":",
"# We are ready to not find commands. It probably won't",
"# happen, but fortune favours the prepared.",
"self",
".",
"wfile",
".",
"write",
"(",
"repr",
"(",
"CommandNotFound",
"(",
"cmd",
"[",
"0",
"]",
")",
")",
"+",
"NEWLINE",
")",
"continue",
"return_value",
"=",
"handler",
"(",
"*",
"cmd",
"[",
"1",
":",
"]",
")",
"if",
"not",
"return_value",
":",
"continue",
"# We write the output back to CouchDB.",
"self",
".",
"wfile",
".",
"write",
"(",
"one_lineify",
"(",
"json",
".",
"dumps",
"(",
"return_value",
")",
")",
"+",
"NEWLINE",
")",
"except",
"Exception",
",",
"exc",
":",
"self",
".",
"wfile",
".",
"write",
"(",
"repr",
"(",
"exc",
")",
"+",
"NEWLINE",
")",
"continue"
] | The main function called to handle a request. | [
"The",
"main",
"function",
"called",
"to",
"handle",
"a",
"request",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L158-L190 | valid |
zvoase/django-relax | relax/viewserver.py | ViewServerRequestHandler.log | def log(self, string):
"""Log an event on the CouchDB server."""
self.wfile.write(json.dumps({'log': string}) + NEWLINE) | python | def log(self, string):
"""Log an event on the CouchDB server."""
self.wfile.write(json.dumps({'log': string}) + NEWLINE) | [
"def",
"log",
"(",
"self",
",",
"string",
")",
":",
"self",
".",
"wfile",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"{",
"'log'",
":",
"string",
"}",
")",
"+",
"NEWLINE",
")"
] | Log an event on the CouchDB server. | [
"Log",
"an",
"event",
"on",
"the",
"CouchDB",
"server",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L192-L194 | valid |
suryakencana007/baka_model | baka_model/model/helper.py | guid | def guid(*args):
"""
Generates a universally unique ID.
Any arguments only create more randomness.
"""
t = float(time.time() * 1000)
r = float(random.random()*10000000000000)
a = random.random() * 10000000000000
data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args)
data = hashlib.md5(data.encode()).hexdigest()[:10]
return data | python | def guid(*args):
"""
Generates a universally unique ID.
Any arguments only create more randomness.
"""
t = float(time.time() * 1000)
r = float(random.random()*10000000000000)
a = random.random() * 10000000000000
data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args)
data = hashlib.md5(data.encode()).hexdigest()[:10]
return data | [
"def",
"guid",
"(",
"*",
"args",
")",
":",
"t",
"=",
"float",
"(",
"time",
".",
"time",
"(",
")",
"*",
"1000",
")",
"r",
"=",
"float",
"(",
"random",
".",
"random",
"(",
")",
"*",
"10000000000000",
")",
"a",
"=",
"random",
".",
"random",
"(",
")",
"*",
"10000000000000",
"data",
"=",
"str",
"(",
"t",
")",
"+",
"' '",
"+",
"str",
"(",
"r",
")",
"+",
"' '",
"+",
"str",
"(",
"a",
")",
"+",
"' '",
"+",
"str",
"(",
"args",
")",
"data",
"=",
"hashlib",
".",
"md5",
"(",
"data",
".",
"encode",
"(",
")",
")",
".",
"hexdigest",
"(",
")",
"[",
":",
"10",
"]",
"return",
"data"
] | Generates a universally unique ID.
Any arguments only create more randomness. | [
"Generates",
"a",
"universally",
"unique",
"ID",
".",
"Any",
"arguments",
"only",
"create",
"more",
"randomness",
"."
] | 915c2da9920e973302f5764ae63799acd5ecf0b7 | https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/helper.py#L42-L54 | valid |
universalcore/unicore-cms | cms/views/cms_views.py | CmsViews.get_pages | def get_pages(self, limit=5, order_by=('position', '-modified_at')):
"""
Return pages the GitModel knows about.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at')
"""
return to_eg_objects(self.workspace.S(Page).filter(
language=self.locale).order_by(*order_by)[:limit]) | python | def get_pages(self, limit=5, order_by=('position', '-modified_at')):
"""
Return pages the GitModel knows about.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at')
"""
return to_eg_objects(self.workspace.S(Page).filter(
language=self.locale).order_by(*order_by)[:limit]) | [
"def",
"get_pages",
"(",
"self",
",",
"limit",
"=",
"5",
",",
"order_by",
"=",
"(",
"'position'",
",",
"'-modified_at'",
")",
")",
":",
"return",
"to_eg_objects",
"(",
"self",
".",
"workspace",
".",
"S",
"(",
"Page",
")",
".",
"filter",
"(",
"language",
"=",
"self",
".",
"locale",
")",
".",
"order_by",
"(",
"*",
"order_by",
")",
"[",
":",
"limit",
"]",
")"
] | Return pages the GitModel knows about.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at') | [
"Return",
"pages",
"the",
"GitModel",
"knows",
"about",
".",
":",
"param",
"int",
"limit",
":",
"The",
"number",
"of",
"pages",
"to",
"return",
"defaults",
"to",
"5",
".",
":",
"param",
"tuple",
"order_by",
":",
"The",
"attributes",
"to",
"order",
"on",
"defaults",
"to",
"(",
"position",
"-",
"modified_at",
")"
] | f68385fe742eb7efcce0d8f04f42f26ccf05d624 | https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/views/cms_views.py#L159-L169 | valid |
universalcore/unicore-cms | cms/views/cms_views.py | CmsViews.get_featured_pages | def get_featured_pages(
self, limit=5, order_by=('position', '-modified_at')):
"""
Return featured pages the GitModel knows about.
:param str locale:
The locale string, like `eng_UK`.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at').
"""
return self._get_featured_pages(self.locale, limit, order_by) | python | def get_featured_pages(
self, limit=5, order_by=('position', '-modified_at')):
"""
Return featured pages the GitModel knows about.
:param str locale:
The locale string, like `eng_UK`.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at').
"""
return self._get_featured_pages(self.locale, limit, order_by) | [
"def",
"get_featured_pages",
"(",
"self",
",",
"limit",
"=",
"5",
",",
"order_by",
"=",
"(",
"'position'",
",",
"'-modified_at'",
")",
")",
":",
"return",
"self",
".",
"_get_featured_pages",
"(",
"self",
".",
"locale",
",",
"limit",
",",
"order_by",
")"
] | Return featured pages the GitModel knows about.
:param str locale:
The locale string, like `eng_UK`.
:param int limit:
The number of pages to return, defaults to 5.
:param tuple order_by:
The attributes to order on,
defaults to ('position', '-modified_at'). | [
"Return",
"featured",
"pages",
"the",
"GitModel",
"knows",
"about",
".",
":",
"param",
"str",
"locale",
":",
"The",
"locale",
"string",
"like",
"eng_UK",
".",
":",
"param",
"int",
"limit",
":",
"The",
"number",
"of",
"pages",
"to",
"return",
"defaults",
"to",
"5",
".",
":",
"param",
"tuple",
"order_by",
":",
"The",
"attributes",
"to",
"order",
"on",
"defaults",
"to",
"(",
"position",
"-",
"modified_at",
")",
"."
] | f68385fe742eb7efcce0d8f04f42f26ccf05d624 | https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/views/cms_views.py#L176-L188 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2.register_app | def register_app(self, name, redirect_uri, callback):
'''
register_app takes an application name and redirect_uri
It generates client_id (client_key) and client_secret,
then stores all of the above in the data_store,
and returns a dictionary containing the client_id and client_secret.
'''
client_id = self._generate_token()
client_secret = self._generate_token(64)
yield Task(self.data_store.store, 'applications', client_id=client_id,
client_secret=client_secret, name=name,
redirect_uri=redirect_uri)
callback({'client_id':client_id, 'client_secret':client_secret}) | python | def register_app(self, name, redirect_uri, callback):
'''
register_app takes an application name and redirect_uri
It generates client_id (client_key) and client_secret,
then stores all of the above in the data_store,
and returns a dictionary containing the client_id and client_secret.
'''
client_id = self._generate_token()
client_secret = self._generate_token(64)
yield Task(self.data_store.store, 'applications', client_id=client_id,
client_secret=client_secret, name=name,
redirect_uri=redirect_uri)
callback({'client_id':client_id, 'client_secret':client_secret}) | [
"def",
"register_app",
"(",
"self",
",",
"name",
",",
"redirect_uri",
",",
"callback",
")",
":",
"client_id",
"=",
"self",
".",
"_generate_token",
"(",
")",
"client_secret",
"=",
"self",
".",
"_generate_token",
"(",
"64",
")",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"store",
",",
"'applications'",
",",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"name",
"=",
"name",
",",
"redirect_uri",
"=",
"redirect_uri",
")",
"callback",
"(",
"{",
"'client_id'",
":",
"client_id",
",",
"'client_secret'",
":",
"client_secret",
"}",
")"
] | register_app takes an application name and redirect_uri
It generates client_id (client_key) and client_secret,
then stores all of the above in the data_store,
and returns a dictionary containing the client_id and client_secret. | [
"register_app",
"takes",
"an",
"application",
"name",
"and",
"redirect_uri",
"It",
"generates",
"client_id",
"(",
"client_key",
")",
"and",
"client_secret",
"then",
"stores",
"all",
"of",
"the",
"above",
"in",
"the",
"data_store",
"and",
"returns",
"a",
"dictionary",
"containing",
"the",
"client_id",
"and",
"client_secret",
"."
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L22-L34 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2.request_authorization | def request_authorization(self, client_id, user_id, response_type,
redirect_uri=None, scope=None, state=None,
expires=600, callback=None):
'''
request_authorization generates a nonce, and stores it in the data_store along with the
client_id, user_id, and expiration timestamp.
It then returns a dictionary containing the nonce as "code," and the passed
state.
---
response_type MUST be "code." this is directly from the OAuth2 spec.
this probably doesn't need to be checked here, but if it's in the spec I
guess it should be verified somewhere.
scope has not been implemented here. it will be stored, but there is no
scope-checking built in here at this time.
if a redirect_uri is passed, it must match the registered redirect_uri.
again, this is per spec.
'''
if response_type != 'code':
raise Proauth2Error('invalid_request',
'response_type must be "code"', state=state)
client = yield Task(self.data_store.fetch, 'applications',
client_id=client_id)
if not client: raise Proauth2Error('access_denied')
if redirect_uri and client['redirect_uri'] != redirect_uri:
raise Proauth2Error('invalid_request', "redirect_uris don't match")
nonce_code = self._generate_token()
expires = time() + expires
try:
yield Task(self.data_store.store, 'nonce_codes', code=nonce_code,
client_id=client_id, expires=expires, user_id=user_id,
scope=scope)
except Proauth2Error as e:
e.state = state
raise e
callback({'code':nonce_code, 'state':state}) | python | def request_authorization(self, client_id, user_id, response_type,
redirect_uri=None, scope=None, state=None,
expires=600, callback=None):
'''
request_authorization generates a nonce, and stores it in the data_store along with the
client_id, user_id, and expiration timestamp.
It then returns a dictionary containing the nonce as "code," and the passed
state.
---
response_type MUST be "code." this is directly from the OAuth2 spec.
this probably doesn't need to be checked here, but if it's in the spec I
guess it should be verified somewhere.
scope has not been implemented here. it will be stored, but there is no
scope-checking built in here at this time.
if a redirect_uri is passed, it must match the registered redirect_uri.
again, this is per spec.
'''
if response_type != 'code':
raise Proauth2Error('invalid_request',
'response_type must be "code"', state=state)
client = yield Task(self.data_store.fetch, 'applications',
client_id=client_id)
if not client: raise Proauth2Error('access_denied')
if redirect_uri and client['redirect_uri'] != redirect_uri:
raise Proauth2Error('invalid_request', "redirect_uris don't match")
nonce_code = self._generate_token()
expires = time() + expires
try:
yield Task(self.data_store.store, 'nonce_codes', code=nonce_code,
client_id=client_id, expires=expires, user_id=user_id,
scope=scope)
except Proauth2Error as e:
e.state = state
raise e
callback({'code':nonce_code, 'state':state}) | [
"def",
"request_authorization",
"(",
"self",
",",
"client_id",
",",
"user_id",
",",
"response_type",
",",
"redirect_uri",
"=",
"None",
",",
"scope",
"=",
"None",
",",
"state",
"=",
"None",
",",
"expires",
"=",
"600",
",",
"callback",
"=",
"None",
")",
":",
"if",
"response_type",
"!=",
"'code'",
":",
"raise",
"Proauth2Error",
"(",
"'invalid_request'",
",",
"'response_type must be \"code\"'",
",",
"state",
"=",
"state",
")",
"client",
"=",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"fetch",
",",
"'applications'",
",",
"client_id",
"=",
"client_id",
")",
"if",
"not",
"client",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
")",
"if",
"redirect_uri",
"and",
"client",
"[",
"'redirect_uri'",
"]",
"!=",
"redirect_uri",
":",
"raise",
"Proauth2Error",
"(",
"'invalid_request'",
",",
"\"redirect_uris don't match\"",
")",
"nonce_code",
"=",
"self",
".",
"_generate_token",
"(",
")",
"expires",
"=",
"time",
"(",
")",
"+",
"expires",
"try",
":",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"store",
",",
"'nonce_codes'",
",",
"code",
"=",
"nonce_code",
",",
"client_id",
"=",
"client_id",
",",
"expires",
"=",
"expires",
",",
"user_id",
"=",
"user_id",
",",
"scope",
"=",
"scope",
")",
"except",
"Proauth2Error",
"as",
"e",
":",
"e",
".",
"state",
"=",
"state",
"raise",
"e",
"callback",
"(",
"{",
"'code'",
":",
"nonce_code",
",",
"'state'",
":",
"state",
"}",
")"
] | request_authorization generates a nonce, and stores it in the data_store along with the
client_id, user_id, and expiration timestamp.
It then returns a dictionary containing the nonce as "code," and the passed
state.
---
response_type MUST be "code." this is directly from the OAuth2 spec.
this probably doesn't need to be checked here, but if it's in the spec I
guess it should be verified somewhere.
scope has not been implemented here. it will be stored, but there is no
scope-checking built in here at this time.
if a redirect_uri is passed, it must match the registered redirect_uri.
again, this is per spec. | [
"request_authorization",
"generates",
"a",
"nonce",
"and",
"stores",
"it",
"in",
"the",
"data_store",
"along",
"with",
"the",
"client_id",
"user_id",
"and",
"expiration",
"timestamp",
".",
"It",
"then",
"returns",
"a",
"dictionary",
"containing",
"the",
"nonce",
"as",
"code",
"and",
"the",
"passed",
"state",
".",
"---",
"response_type",
"MUST",
"be",
"code",
".",
"this",
"is",
"directly",
"from",
"the",
"OAuth2",
"spec",
".",
"this",
"probably",
"doesn",
"t",
"need",
"to",
"be",
"checked",
"here",
"but",
"if",
"it",
"s",
"in",
"the",
"spec",
"I",
"guess",
"it",
"should",
"be",
"verified",
"somewhere",
".",
"scope",
"has",
"not",
"been",
"implemented",
"here",
".",
"it",
"will",
"be",
"stored",
"but",
"there",
"is",
"no",
"scope",
"-",
"checking",
"built",
"in",
"here",
"at",
"this",
"time",
".",
"if",
"a",
"redirect_uri",
"is",
"passed",
"it",
"must",
"match",
"the",
"registered",
"redirect_uri",
".",
"again",
"this",
"is",
"per",
"spec",
"."
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L37-L74 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2.request_access_token | def request_access_token(self, client_id, key, code, grant_type,
redirect_uri=None, method='direct_auth',
callback=None):
'''
request_access_token validates the client_id and client_secret, using the
provided method, then generates an access_token, stores it with the user_id
from the nonce, and returns a dictionary containing an access_token and
bearer token.
---
from the spec, it looks like there are different types of
tokens, but i don't understand the disctintions, so someone else can fix
this if need be.
regarding the method: it appears that it is intended for there to be
multiple ways to verify the client_id. my assumption is that you use the
secret as the salt and pass the hashed of the client_id or something, and
then compare hashes on the server end. currently the only implemented method
is direct comparison of the client_ids and client_secrets.
additional methods can be added to proauth2.auth_methods
'''
if grant_type != 'authorization_code':
raise Proauth2Error('invalid_request',
'grant_type must be "authorization_code"')
yield Task(self._auth, client_id, key, method)
user_id = yield Task(self._validate_request_code, code, client_id)
access_token = self._generate_token(64)
yield Task(self.data_store.store, 'tokens', token=access_token,
user_id=user_id, client_id=client_id)
callback({'access_token':access_token, 'token_type':'bearer'}) | python | def request_access_token(self, client_id, key, code, grant_type,
redirect_uri=None, method='direct_auth',
callback=None):
'''
request_access_token validates the client_id and client_secret, using the
provided method, then generates an access_token, stores it with the user_id
from the nonce, and returns a dictionary containing an access_token and
bearer token.
---
from the spec, it looks like there are different types of
tokens, but i don't understand the disctintions, so someone else can fix
this if need be.
regarding the method: it appears that it is intended for there to be
multiple ways to verify the client_id. my assumption is that you use the
secret as the salt and pass the hashed of the client_id or something, and
then compare hashes on the server end. currently the only implemented method
is direct comparison of the client_ids and client_secrets.
additional methods can be added to proauth2.auth_methods
'''
if grant_type != 'authorization_code':
raise Proauth2Error('invalid_request',
'grant_type must be "authorization_code"')
yield Task(self._auth, client_id, key, method)
user_id = yield Task(self._validate_request_code, code, client_id)
access_token = self._generate_token(64)
yield Task(self.data_store.store, 'tokens', token=access_token,
user_id=user_id, client_id=client_id)
callback({'access_token':access_token, 'token_type':'bearer'}) | [
"def",
"request_access_token",
"(",
"self",
",",
"client_id",
",",
"key",
",",
"code",
",",
"grant_type",
",",
"redirect_uri",
"=",
"None",
",",
"method",
"=",
"'direct_auth'",
",",
"callback",
"=",
"None",
")",
":",
"if",
"grant_type",
"!=",
"'authorization_code'",
":",
"raise",
"Proauth2Error",
"(",
"'invalid_request'",
",",
"'grant_type must be \"authorization_code\"'",
")",
"yield",
"Task",
"(",
"self",
".",
"_auth",
",",
"client_id",
",",
"key",
",",
"method",
")",
"user_id",
"=",
"yield",
"Task",
"(",
"self",
".",
"_validate_request_code",
",",
"code",
",",
"client_id",
")",
"access_token",
"=",
"self",
".",
"_generate_token",
"(",
"64",
")",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"store",
",",
"'tokens'",
",",
"token",
"=",
"access_token",
",",
"user_id",
"=",
"user_id",
",",
"client_id",
"=",
"client_id",
")",
"callback",
"(",
"{",
"'access_token'",
":",
"access_token",
",",
"'token_type'",
":",
"'bearer'",
"}",
")"
] | request_access_token validates the client_id and client_secret, using the
provided method, then generates an access_token, stores it with the user_id
from the nonce, and returns a dictionary containing an access_token and
bearer token.
---
from the spec, it looks like there are different types of
tokens, but i don't understand the disctintions, so someone else can fix
this if need be.
regarding the method: it appears that it is intended for there to be
multiple ways to verify the client_id. my assumption is that you use the
secret as the salt and pass the hashed of the client_id or something, and
then compare hashes on the server end. currently the only implemented method
is direct comparison of the client_ids and client_secrets.
additional methods can be added to proauth2.auth_methods | [
"request_access_token",
"validates",
"the",
"client_id",
"and",
"client_secret",
"using",
"the",
"provided",
"method",
"then",
"generates",
"an",
"access_token",
"stores",
"it",
"with",
"the",
"user_id",
"from",
"the",
"nonce",
"and",
"returns",
"a",
"dictionary",
"containing",
"an",
"access_token",
"and",
"bearer",
"token",
".",
"---",
"from",
"the",
"spec",
"it",
"looks",
"like",
"there",
"are",
"different",
"types",
"of",
"tokens",
"but",
"i",
"don",
"t",
"understand",
"the",
"disctintions",
"so",
"someone",
"else",
"can",
"fix",
"this",
"if",
"need",
"be",
".",
"regarding",
"the",
"method",
":",
"it",
"appears",
"that",
"it",
"is",
"intended",
"for",
"there",
"to",
"be",
"multiple",
"ways",
"to",
"verify",
"the",
"client_id",
".",
"my",
"assumption",
"is",
"that",
"you",
"use",
"the",
"secret",
"as",
"the",
"salt",
"and",
"pass",
"the",
"hashed",
"of",
"the",
"client_id",
"or",
"something",
"and",
"then",
"compare",
"hashes",
"on",
"the",
"server",
"end",
".",
"currently",
"the",
"only",
"implemented",
"method",
"is",
"direct",
"comparison",
"of",
"the",
"client_ids",
"and",
"client_secrets",
".",
"additional",
"methods",
"can",
"be",
"added",
"to",
"proauth2",
".",
"auth_methods"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L77-L106 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2.authenticate_token | def authenticate_token(self, token, callback):
'''
authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls.
'''
token_data = yield Task(self.data_store.fetch, 'tokens', token=token)
if not token_data:
raise Proauth2Error('access_denied',
'token does not exist or has been revoked')
callback(token_data['user_id']) | python | def authenticate_token(self, token, callback):
'''
authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls.
'''
token_data = yield Task(self.data_store.fetch, 'tokens', token=token)
if not token_data:
raise Proauth2Error('access_denied',
'token does not exist or has been revoked')
callback(token_data['user_id']) | [
"def",
"authenticate_token",
"(",
"self",
",",
"token",
",",
"callback",
")",
":",
"token_data",
"=",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"fetch",
",",
"'tokens'",
",",
"token",
"=",
"token",
")",
"if",
"not",
"token_data",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
",",
"'token does not exist or has been revoked'",
")",
"callback",
"(",
"token_data",
"[",
"'user_id'",
"]",
")"
] | authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls. | [
"authenticate_token",
"checks",
"the",
"passed",
"token",
"and",
"returns",
"the",
"user_id",
"it",
"is",
"associated",
"with",
".",
"it",
"is",
"assumed",
"that",
"this",
"method",
"won",
"t",
"be",
"directly",
"exposed",
"to",
"the",
"oauth",
"client",
"but",
"some",
"kind",
"of",
"framework",
"or",
"wrapper",
".",
"this",
"allows",
"the",
"framework",
"to",
"have",
"the",
"user_id",
"without",
"doing",
"additional",
"DB",
"calls",
"."
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L109-L120 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2.revoke_token | def revoke_token(self, token, callback):
'''
revoke_token removes the access token from the data_store
'''
yield Task(self.data_store.remove, 'tokens', token=token)
callback() | python | def revoke_token(self, token, callback):
'''
revoke_token removes the access token from the data_store
'''
yield Task(self.data_store.remove, 'tokens', token=token)
callback() | [
"def",
"revoke_token",
"(",
"self",
",",
"token",
",",
"callback",
")",
":",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"remove",
",",
"'tokens'",
",",
"token",
"=",
"token",
")",
"callback",
"(",
")"
] | revoke_token removes the access token from the data_store | [
"revoke_token",
"removes",
"the",
"access",
"token",
"from",
"the",
"data_store"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L123-L128 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2._auth | def _auth(self, client_id, key, method, callback):
'''
_auth - internal method to ensure the client_id and client_secret passed with
the nonce match
'''
available = auth_methods.keys()
if method not in available:
raise Proauth2Error('invalid_request',
'unsupported authentication method: %s'
'available methods: %s' % \
(method, '\n'.join(available)))
client = yield Task(self.data_store.fetch, 'applications',
client_id=client_id)
if not client: raise Proauth2Error('access_denied')
if not auth_methods[method](key, client['client_secret']):
raise Proauth2Error('access_denied')
callback() | python | def _auth(self, client_id, key, method, callback):
'''
_auth - internal method to ensure the client_id and client_secret passed with
the nonce match
'''
available = auth_methods.keys()
if method not in available:
raise Proauth2Error('invalid_request',
'unsupported authentication method: %s'
'available methods: %s' % \
(method, '\n'.join(available)))
client = yield Task(self.data_store.fetch, 'applications',
client_id=client_id)
if not client: raise Proauth2Error('access_denied')
if not auth_methods[method](key, client['client_secret']):
raise Proauth2Error('access_denied')
callback() | [
"def",
"_auth",
"(",
"self",
",",
"client_id",
",",
"key",
",",
"method",
",",
"callback",
")",
":",
"available",
"=",
"auth_methods",
".",
"keys",
"(",
")",
"if",
"method",
"not",
"in",
"available",
":",
"raise",
"Proauth2Error",
"(",
"'invalid_request'",
",",
"'unsupported authentication method: %s'",
"'available methods: %s'",
"%",
"(",
"method",
",",
"'\\n'",
".",
"join",
"(",
"available",
")",
")",
")",
"client",
"=",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"fetch",
",",
"'applications'",
",",
"client_id",
"=",
"client_id",
")",
"if",
"not",
"client",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
")",
"if",
"not",
"auth_methods",
"[",
"method",
"]",
"(",
"key",
",",
"client",
"[",
"'client_secret'",
"]",
")",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
")",
"callback",
"(",
")"
] | _auth - internal method to ensure the client_id and client_secret passed with
the nonce match | [
"_auth",
"-",
"internal",
"method",
"to",
"ensure",
"the",
"client_id",
"and",
"client_secret",
"passed",
"with",
"the",
"nonce",
"match"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L131-L147 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2._validate_request_code | def _validate_request_code(self, code, client_id, callback):
'''
_validate_request_code - internal method for verifying the the given nonce.
also removes the nonce from the data_store, as they are intended for
one-time use.
'''
nonce = yield Task(self.data_store.fetch, 'nonce_codes', code=code)
if not nonce:
raise Proauth2Error('access_denied', 'invalid request code: %s' % code)
if client_id != nonce['client_id']:
raise Proauth2Error('access_denied', 'invalid request code: %s' % code)
user_id = nonce['user_id']
expires = nonce['expires']
yield Task(self.data_store.remove, 'nonce_codes', code=code,
client_id=client_id, user_id=user_id)
if time() > expires:
raise Proauth2Error('access_denied', 'request code %s expired' % code)
callback(user_id) | python | def _validate_request_code(self, code, client_id, callback):
'''
_validate_request_code - internal method for verifying the the given nonce.
also removes the nonce from the data_store, as they are intended for
one-time use.
'''
nonce = yield Task(self.data_store.fetch, 'nonce_codes', code=code)
if not nonce:
raise Proauth2Error('access_denied', 'invalid request code: %s' % code)
if client_id != nonce['client_id']:
raise Proauth2Error('access_denied', 'invalid request code: %s' % code)
user_id = nonce['user_id']
expires = nonce['expires']
yield Task(self.data_store.remove, 'nonce_codes', code=code,
client_id=client_id, user_id=user_id)
if time() > expires:
raise Proauth2Error('access_denied', 'request code %s expired' % code)
callback(user_id) | [
"def",
"_validate_request_code",
"(",
"self",
",",
"code",
",",
"client_id",
",",
"callback",
")",
":",
"nonce",
"=",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"fetch",
",",
"'nonce_codes'",
",",
"code",
"=",
"code",
")",
"if",
"not",
"nonce",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
",",
"'invalid request code: %s'",
"%",
"code",
")",
"if",
"client_id",
"!=",
"nonce",
"[",
"'client_id'",
"]",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
",",
"'invalid request code: %s'",
"%",
"code",
")",
"user_id",
"=",
"nonce",
"[",
"'user_id'",
"]",
"expires",
"=",
"nonce",
"[",
"'expires'",
"]",
"yield",
"Task",
"(",
"self",
".",
"data_store",
".",
"remove",
",",
"'nonce_codes'",
",",
"code",
"=",
"code",
",",
"client_id",
"=",
"client_id",
",",
"user_id",
"=",
"user_id",
")",
"if",
"time",
"(",
")",
">",
"expires",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
",",
"'request code %s expired'",
"%",
"code",
")",
"callback",
"(",
"user_id",
")"
] | _validate_request_code - internal method for verifying the the given nonce.
also removes the nonce from the data_store, as they are intended for
one-time use. | [
"_validate_request_code",
"-",
"internal",
"method",
"for",
"verifying",
"the",
"the",
"given",
"nonce",
".",
"also",
"removes",
"the",
"nonce",
"from",
"the",
"data_store",
"as",
"they",
"are",
"intended",
"for",
"one",
"-",
"time",
"use",
"."
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L150-L169 | valid |
charlesthomas/proauth2 | proauth2/async_proauth2.py | AsyncProauth2._generate_token | def _generate_token(self, length=32):
'''
_generate_token - internal function for generating randomized alphanumberic
strings of a given length
'''
return ''.join(choice(ascii_letters + digits) for x in range(length)) | python | def _generate_token(self, length=32):
'''
_generate_token - internal function for generating randomized alphanumberic
strings of a given length
'''
return ''.join(choice(ascii_letters + digits) for x in range(length)) | [
"def",
"_generate_token",
"(",
"self",
",",
"length",
"=",
"32",
")",
":",
"return",
"''",
".",
"join",
"(",
"choice",
"(",
"ascii_letters",
"+",
"digits",
")",
"for",
"x",
"in",
"range",
"(",
"length",
")",
")"
] | _generate_token - internal function for generating randomized alphanumberic
strings of a given length | [
"_generate_token",
"-",
"internal",
"function",
"for",
"generating",
"randomized",
"alphanumberic",
"strings",
"of",
"a",
"given",
"length"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L171-L176 | valid |
takaomag/chatora.util | chatora/util/functional.py | merge_ordered | def merge_ordered(ordereds: typing.Iterable[typing.Any]) -> typing.Iterable[typing.Any]:
"""Merge multiple ordered so that within-ordered order is preserved
"""
seen_set = set()
add_seen = seen_set.add
return reversed(tuple(map(
lambda obj: add_seen(obj) or obj,
filterfalse(
seen_set.__contains__,
chain.from_iterable(map(reversed, reversed(ordereds))),
),
))) | python | def merge_ordered(ordereds: typing.Iterable[typing.Any]) -> typing.Iterable[typing.Any]:
"""Merge multiple ordered so that within-ordered order is preserved
"""
seen_set = set()
add_seen = seen_set.add
return reversed(tuple(map(
lambda obj: add_seen(obj) or obj,
filterfalse(
seen_set.__contains__,
chain.from_iterable(map(reversed, reversed(ordereds))),
),
))) | [
"def",
"merge_ordered",
"(",
"ordereds",
":",
"typing",
".",
"Iterable",
"[",
"typing",
".",
"Any",
"]",
")",
"->",
"typing",
".",
"Iterable",
"[",
"typing",
".",
"Any",
"]",
":",
"seen_set",
"=",
"set",
"(",
")",
"add_seen",
"=",
"seen_set",
".",
"add",
"return",
"reversed",
"(",
"tuple",
"(",
"map",
"(",
"lambda",
"obj",
":",
"add_seen",
"(",
"obj",
")",
"or",
"obj",
",",
"filterfalse",
"(",
"seen_set",
".",
"__contains__",
",",
"chain",
".",
"from_iterable",
"(",
"map",
"(",
"reversed",
",",
"reversed",
"(",
"ordereds",
")",
")",
")",
",",
")",
",",
")",
")",
")"
] | Merge multiple ordered so that within-ordered order is preserved | [
"Merge",
"multiple",
"ordered",
"so",
"that",
"within",
"-",
"ordered",
"order",
"is",
"preserved"
] | 0fb36aca5da93bdd8e23a0c783095d621b582d89 | https://github.com/takaomag/chatora.util/blob/0fb36aca5da93bdd8e23a0c783095d621b582d89/chatora/util/functional.py#L86-L97 | valid |
tklovett/PyShirtsIO | ShirtsIO/helpers.py | validate_params | def validate_params(required, optional, params):
"""
Helps us validate the parameters for the request
:param valid_options: a list of strings of valid options for the
api request
:param params: a dict, the key-value store which we really only care about
the key which has tells us what the user is using for the
API request
:returns: None or throws an exception if the validation fails
"""
missing_fields = [x for x in required if x not in params]
if missing_fields:
field_strings = ", ".join(missing_fields)
raise Exception("Missing fields: %s" % field_strings)
disallowed_fields = [x for x in params if x not in optional and x not in required]
if disallowed_fields:
field_strings = ", ".join(disallowed_fields)
raise Exception("Disallowed fields: %s" % field_strings) | python | def validate_params(required, optional, params):
"""
Helps us validate the parameters for the request
:param valid_options: a list of strings of valid options for the
api request
:param params: a dict, the key-value store which we really only care about
the key which has tells us what the user is using for the
API request
:returns: None or throws an exception if the validation fails
"""
missing_fields = [x for x in required if x not in params]
if missing_fields:
field_strings = ", ".join(missing_fields)
raise Exception("Missing fields: %s" % field_strings)
disallowed_fields = [x for x in params if x not in optional and x not in required]
if disallowed_fields:
field_strings = ", ".join(disallowed_fields)
raise Exception("Disallowed fields: %s" % field_strings) | [
"def",
"validate_params",
"(",
"required",
",",
"optional",
",",
"params",
")",
":",
"missing_fields",
"=",
"[",
"x",
"for",
"x",
"in",
"required",
"if",
"x",
"not",
"in",
"params",
"]",
"if",
"missing_fields",
":",
"field_strings",
"=",
"\", \"",
".",
"join",
"(",
"missing_fields",
")",
"raise",
"Exception",
"(",
"\"Missing fields: %s\"",
"%",
"field_strings",
")",
"disallowed_fields",
"=",
"[",
"x",
"for",
"x",
"in",
"params",
"if",
"x",
"not",
"in",
"optional",
"and",
"x",
"not",
"in",
"required",
"]",
"if",
"disallowed_fields",
":",
"field_strings",
"=",
"\", \"",
".",
"join",
"(",
"disallowed_fields",
")",
"raise",
"Exception",
"(",
"\"Disallowed fields: %s\"",
"%",
"field_strings",
")"
] | Helps us validate the parameters for the request
:param valid_options: a list of strings of valid options for the
api request
:param params: a dict, the key-value store which we really only care about
the key which has tells us what the user is using for the
API request
:returns: None or throws an exception if the validation fails | [
"Helps",
"us",
"validate",
"the",
"parameters",
"for",
"the",
"request"
] | ff2f2d3b5e4ab2813abbce8545b27319c6af0def | https://github.com/tklovett/PyShirtsIO/blob/ff2f2d3b5e4ab2813abbce8545b27319c6af0def/ShirtsIO/helpers.py#L1-L22 | valid |
charlesthomas/proauth2 | proauth2/proauth2.py | Proauth2.authenticate_token | def authenticate_token( self, token ):
'''
authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls.
'''
token_data = self.data_store.fetch( 'tokens', token=token )
if not token_data:
raise Proauth2Error( 'access_denied',
'token does not exist or has been revoked' )
return token_data['user_id'] | python | def authenticate_token( self, token ):
'''
authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls.
'''
token_data = self.data_store.fetch( 'tokens', token=token )
if not token_data:
raise Proauth2Error( 'access_denied',
'token does not exist or has been revoked' )
return token_data['user_id'] | [
"def",
"authenticate_token",
"(",
"self",
",",
"token",
")",
":",
"token_data",
"=",
"self",
".",
"data_store",
".",
"fetch",
"(",
"'tokens'",
",",
"token",
"=",
"token",
")",
"if",
"not",
"token_data",
":",
"raise",
"Proauth2Error",
"(",
"'access_denied'",
",",
"'token does not exist or has been revoked'",
")",
"return",
"token_data",
"[",
"'user_id'",
"]"
] | authenticate_token checks the passed token and returns the user_id it is
associated with. it is assumed that this method won't be directly exposed to
the oauth client, but some kind of framework or wrapper. this allows the
framework to have the user_id without doing additional DB calls. | [
"authenticate_token",
"checks",
"the",
"passed",
"token",
"and",
"returns",
"the",
"user_id",
"it",
"is",
"associated",
"with",
".",
"it",
"is",
"assumed",
"that",
"this",
"method",
"won",
"t",
"be",
"directly",
"exposed",
"to",
"the",
"oauth",
"client",
"but",
"some",
"kind",
"of",
"framework",
"or",
"wrapper",
".",
"this",
"allows",
"the",
"framework",
"to",
"have",
"the",
"user_id",
"without",
"doing",
"additional",
"DB",
"calls",
"."
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/proauth2.py#L107-L118 | valid |
crazy-canux/arguspy | scripts/check_wmi_sh.py | main | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'filenumber':
plugin.filenumber_handle()
elif plugin.args.option == 'fileage':
plugin.fileage_handle()
elif plugin.args.option == 'sqlserverlocks':
plugin.sqlserverlocks_handle()
else:
plugin.unknown("Unknown actions.") | python | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'filenumber':
plugin.filenumber_handle()
elif plugin.args.option == 'fileage':
plugin.fileage_handle()
elif plugin.args.option == 'sqlserverlocks':
plugin.sqlserverlocks_handle()
else:
plugin.unknown("Unknown actions.") | [
"def",
"main",
"(",
")",
":",
"plugin",
"=",
"Register",
"(",
")",
"if",
"plugin",
".",
"args",
".",
"option",
"==",
"'filenumber'",
":",
"plugin",
".",
"filenumber_handle",
"(",
")",
"elif",
"plugin",
".",
"args",
".",
"option",
"==",
"'fileage'",
":",
"plugin",
".",
"fileage_handle",
"(",
")",
"elif",
"plugin",
".",
"args",
".",
"option",
"==",
"'sqlserverlocks'",
":",
"plugin",
".",
"sqlserverlocks_handle",
"(",
")",
"else",
":",
"plugin",
".",
"unknown",
"(",
"\"Unknown actions.\"",
")"
] | Register your own mode and handle method here. | [
"Register",
"your",
"own",
"mode",
"and",
"handle",
"method",
"here",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L436-L446 | valid |
crazy-canux/arguspy | scripts/check_wmi_sh.py | FileNumber.filenumber_handle | def filenumber_handle(self):
"""Get the number of file in the folder."""
self.file_list = []
self.count = 0
status = self.ok
if self.args.recursion:
self.__result, self.__file_list = self.__get_folder(self.args.path)
else:
self.__result, self.__file_list = self.__get_file(self.args.path)
# Compare the vlaue.
if self.__result > self.args.critical:
status = self.critical
elif self.__result > self.args.warning:
status = self.warning
else:
status = self.ok
# Output
self.shortoutput = "Found {0} files in {1}.".format(self.__result,
self.args.path)
self.logger.debug("file_list: {}".format(self.__file_list))
[self.longoutput.append(file_data.get('Name'))
for file_data in self.__file_list]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
path=self.args.path))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") | python | def filenumber_handle(self):
"""Get the number of file in the folder."""
self.file_list = []
self.count = 0
status = self.ok
if self.args.recursion:
self.__result, self.__file_list = self.__get_folder(self.args.path)
else:
self.__result, self.__file_list = self.__get_file(self.args.path)
# Compare the vlaue.
if self.__result > self.args.critical:
status = self.critical
elif self.__result > self.args.warning:
status = self.warning
else:
status = self.ok
# Output
self.shortoutput = "Found {0} files in {1}.".format(self.__result,
self.args.path)
self.logger.debug("file_list: {}".format(self.__file_list))
[self.longoutput.append(file_data.get('Name'))
for file_data in self.__file_list]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
path=self.args.path))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") | [
"def",
"filenumber_handle",
"(",
"self",
")",
":",
"self",
".",
"file_list",
"=",
"[",
"]",
"self",
".",
"count",
"=",
"0",
"status",
"=",
"self",
".",
"ok",
"if",
"self",
".",
"args",
".",
"recursion",
":",
"self",
".",
"__result",
",",
"self",
".",
"__file_list",
"=",
"self",
".",
"__get_folder",
"(",
"self",
".",
"args",
".",
"path",
")",
"else",
":",
"self",
".",
"__result",
",",
"self",
".",
"__file_list",
"=",
"self",
".",
"__get_file",
"(",
"self",
".",
"args",
".",
"path",
")",
"# Compare the vlaue.",
"if",
"self",
".",
"__result",
">",
"self",
".",
"args",
".",
"critical",
":",
"status",
"=",
"self",
".",
"critical",
"elif",
"self",
".",
"__result",
">",
"self",
".",
"args",
".",
"warning",
":",
"status",
"=",
"self",
".",
"warning",
"else",
":",
"status",
"=",
"self",
".",
"ok",
"# Output",
"self",
".",
"shortoutput",
"=",
"\"Found {0} files in {1}.\"",
".",
"format",
"(",
"self",
".",
"__result",
",",
"self",
".",
"args",
".",
"path",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"file_list: {}\"",
".",
"format",
"(",
"self",
".",
"__file_list",
")",
")",
"[",
"self",
".",
"longoutput",
".",
"append",
"(",
"file_data",
".",
"get",
"(",
"'Name'",
")",
")",
"for",
"file_data",
"in",
"self",
".",
"__file_list",
"]",
"self",
".",
"perfdata",
".",
"append",
"(",
"\"{path}={result};{warn};{crit};0;\"",
".",
"format",
"(",
"crit",
"=",
"self",
".",
"args",
".",
"critical",
",",
"warn",
"=",
"self",
".",
"args",
".",
"warning",
",",
"result",
"=",
"self",
".",
"__result",
",",
"path",
"=",
"self",
".",
"args",
".",
"path",
")",
")",
"# Return status with message to Nagios.",
"status",
"(",
"self",
".",
"output",
"(",
"long_output_limit",
"=",
"None",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Return status and exit to Nagios.\"",
")"
] | Get the number of file in the folder. | [
"Get",
"the",
"number",
"of",
"file",
"in",
"the",
"folder",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L106-L139 | valid |
crazy-canux/arguspy | scripts/check_wmi_sh.py | FileAge.__get_current_datetime | def __get_current_datetime(self):
"""Get current datetime for every file."""
self.wql_time = "SELECT LocalDateTime FROM Win32_OperatingSystem"
self.current_time = self.query(self.wql_time)
# [{'LocalDateTime': '20160824161431.977000+480'}]'
self.current_time_string = str(
self.current_time[0].get('LocalDateTime').split('.')[0])
# '20160824161431'
self.current_time_format = datetime.datetime.strptime(
self.current_time_string, '%Y%m%d%H%M%S')
# param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type:
# datetime.datetime
return self.current_time_format | python | def __get_current_datetime(self):
"""Get current datetime for every file."""
self.wql_time = "SELECT LocalDateTime FROM Win32_OperatingSystem"
self.current_time = self.query(self.wql_time)
# [{'LocalDateTime': '20160824161431.977000+480'}]'
self.current_time_string = str(
self.current_time[0].get('LocalDateTime').split('.')[0])
# '20160824161431'
self.current_time_format = datetime.datetime.strptime(
self.current_time_string, '%Y%m%d%H%M%S')
# param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type:
# datetime.datetime
return self.current_time_format | [
"def",
"__get_current_datetime",
"(",
"self",
")",
":",
"self",
".",
"wql_time",
"=",
"\"SELECT LocalDateTime FROM Win32_OperatingSystem\"",
"self",
".",
"current_time",
"=",
"self",
".",
"query",
"(",
"self",
".",
"wql_time",
")",
"# [{'LocalDateTime': '20160824161431.977000+480'}]'",
"self",
".",
"current_time_string",
"=",
"str",
"(",
"self",
".",
"current_time",
"[",
"0",
"]",
".",
"get",
"(",
"'LocalDateTime'",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
"# '20160824161431'",
"self",
".",
"current_time_format",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"self",
".",
"current_time_string",
",",
"'%Y%m%d%H%M%S'",
")",
"# param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type:",
"# datetime.datetime",
"return",
"self",
".",
"current_time_format"
] | Get current datetime for every file. | [
"Get",
"current",
"datetime",
"for",
"every",
"file",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L226-L238 | valid |
crazy-canux/arguspy | scripts/check_wmi_sh.py | FileAge.fileage_handle | def fileage_handle(self):
"""Get the number of file in the folder."""
self.file_list = []
self.ok_file = []
self.warn_file = []
self.crit_file = []
status = self.ok
if self.args.recursion:
self.__file_list = self.__get_folder(self.args.path)
else:
self.__file_list = self.__get_file(self.args.path)
self.logger.debug("file_list: {}".format(self.__file_list))
# [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\test\\1.txt'},
# {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\test\\2.txt'},
# {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\test\\test1\\21.txt'}]
for file_dict in self.__file_list:
self.filename = file_dict.get('Name')
if self.filename and self.filename != 'Name':
self.logger.debug(
"===== start to compare {} =====".format(
self.filename))
self.file_datetime_string = file_dict.get(
'LastModified').split('.')[0]
self.file_datetime = datetime.datetime.strptime(
self.file_datetime_string, '%Y%m%d%H%M%S')
self.logger.debug(
"file_datetime: {}".format(
self.file_datetime))
self.current_datetime = self.__get_current_datetime()
self.logger.debug(
"current_datetime: {}".format(
self.current_datetime))
self.__delta_datetime = self.current_datetime - self.file_datetime
self.logger.debug(
"delta_datetime: {}".format(
self.__delta_datetime))
self.logger.debug(
"warn_datetime: {}".format(
datetime.timedelta(
minutes=self.args.warning)))
self.logger.debug(
"crit_datetime: {}".format(
datetime.timedelta(
minutes=self.args.critical)))
if self.__delta_datetime > datetime.timedelta(
minutes=self.args.critical):
self.crit_file.append(self.filename)
elif self.__delta_datetime > datetime.timedelta(minutes=self.args.warning):
self.warn_file.append(self.filename)
else:
self.ok_file.append(self.filename)
# Compare the vlaue.
if self.crit_file:
status = self.critical
elif self.warn_file:
status = self.warning
else:
status = self.ok
# Output
self.shortoutput = "Found {0} files out of date.".format(
len(self.crit_file))
if self.crit_file:
self.longoutput.append("===== Critical File out of date ====")
[self.longoutput.append(filename)
for filename in self.crit_file if self.crit_file]
if self.warn_file:
self.longoutput.append("===== Warning File out of date ====")
[self.longoutput.append(filename)
for filename in self.warn_file if self.warn_file]
if self.ok_file:
self.longoutput.append("===== OK File out of date ====")
[self.longoutput.append(filename)
for filename in self.ok_file if self.ok_file]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=len(self.crit_file),
path=self.args.drive + self.args.path))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") | python | def fileage_handle(self):
"""Get the number of file in the folder."""
self.file_list = []
self.ok_file = []
self.warn_file = []
self.crit_file = []
status = self.ok
if self.args.recursion:
self.__file_list = self.__get_folder(self.args.path)
else:
self.__file_list = self.__get_file(self.args.path)
self.logger.debug("file_list: {}".format(self.__file_list))
# [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\test\\1.txt'},
# {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\test\\2.txt'},
# {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\test\\test1\\21.txt'}]
for file_dict in self.__file_list:
self.filename = file_dict.get('Name')
if self.filename and self.filename != 'Name':
self.logger.debug(
"===== start to compare {} =====".format(
self.filename))
self.file_datetime_string = file_dict.get(
'LastModified').split('.')[0]
self.file_datetime = datetime.datetime.strptime(
self.file_datetime_string, '%Y%m%d%H%M%S')
self.logger.debug(
"file_datetime: {}".format(
self.file_datetime))
self.current_datetime = self.__get_current_datetime()
self.logger.debug(
"current_datetime: {}".format(
self.current_datetime))
self.__delta_datetime = self.current_datetime - self.file_datetime
self.logger.debug(
"delta_datetime: {}".format(
self.__delta_datetime))
self.logger.debug(
"warn_datetime: {}".format(
datetime.timedelta(
minutes=self.args.warning)))
self.logger.debug(
"crit_datetime: {}".format(
datetime.timedelta(
minutes=self.args.critical)))
if self.__delta_datetime > datetime.timedelta(
minutes=self.args.critical):
self.crit_file.append(self.filename)
elif self.__delta_datetime > datetime.timedelta(minutes=self.args.warning):
self.warn_file.append(self.filename)
else:
self.ok_file.append(self.filename)
# Compare the vlaue.
if self.crit_file:
status = self.critical
elif self.warn_file:
status = self.warning
else:
status = self.ok
# Output
self.shortoutput = "Found {0} files out of date.".format(
len(self.crit_file))
if self.crit_file:
self.longoutput.append("===== Critical File out of date ====")
[self.longoutput.append(filename)
for filename in self.crit_file if self.crit_file]
if self.warn_file:
self.longoutput.append("===== Warning File out of date ====")
[self.longoutput.append(filename)
for filename in self.warn_file if self.warn_file]
if self.ok_file:
self.longoutput.append("===== OK File out of date ====")
[self.longoutput.append(filename)
for filename in self.ok_file if self.ok_file]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=len(self.crit_file),
path=self.args.drive + self.args.path))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") | [
"def",
"fileage_handle",
"(",
"self",
")",
":",
"self",
".",
"file_list",
"=",
"[",
"]",
"self",
".",
"ok_file",
"=",
"[",
"]",
"self",
".",
"warn_file",
"=",
"[",
"]",
"self",
".",
"crit_file",
"=",
"[",
"]",
"status",
"=",
"self",
".",
"ok",
"if",
"self",
".",
"args",
".",
"recursion",
":",
"self",
".",
"__file_list",
"=",
"self",
".",
"__get_folder",
"(",
"self",
".",
"args",
".",
"path",
")",
"else",
":",
"self",
".",
"__file_list",
"=",
"self",
".",
"__get_file",
"(",
"self",
".",
"args",
".",
"path",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"file_list: {}\"",
".",
"format",
"(",
"self",
".",
"__file_list",
")",
")",
"# [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\\\test\\\\1.txt'},",
"# {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\\\test\\\\2.txt'},",
"# {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\\\test\\\\test1\\\\21.txt'}]",
"for",
"file_dict",
"in",
"self",
".",
"__file_list",
":",
"self",
".",
"filename",
"=",
"file_dict",
".",
"get",
"(",
"'Name'",
")",
"if",
"self",
".",
"filename",
"and",
"self",
".",
"filename",
"!=",
"'Name'",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"===== start to compare {} =====\"",
".",
"format",
"(",
"self",
".",
"filename",
")",
")",
"self",
".",
"file_datetime_string",
"=",
"file_dict",
".",
"get",
"(",
"'LastModified'",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"self",
".",
"file_datetime",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"self",
".",
"file_datetime_string",
",",
"'%Y%m%d%H%M%S'",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"file_datetime: {}\"",
".",
"format",
"(",
"self",
".",
"file_datetime",
")",
")",
"self",
".",
"current_datetime",
"=",
"self",
".",
"__get_current_datetime",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"current_datetime: {}\"",
".",
"format",
"(",
"self",
".",
"current_datetime",
")",
")",
"self",
".",
"__delta_datetime",
"=",
"self",
".",
"current_datetime",
"-",
"self",
".",
"file_datetime",
"self",
".",
"logger",
".",
"debug",
"(",
"\"delta_datetime: {}\"",
".",
"format",
"(",
"self",
".",
"__delta_datetime",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"warn_datetime: {}\"",
".",
"format",
"(",
"datetime",
".",
"timedelta",
"(",
"minutes",
"=",
"self",
".",
"args",
".",
"warning",
")",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"crit_datetime: {}\"",
".",
"format",
"(",
"datetime",
".",
"timedelta",
"(",
"minutes",
"=",
"self",
".",
"args",
".",
"critical",
")",
")",
")",
"if",
"self",
".",
"__delta_datetime",
">",
"datetime",
".",
"timedelta",
"(",
"minutes",
"=",
"self",
".",
"args",
".",
"critical",
")",
":",
"self",
".",
"crit_file",
".",
"append",
"(",
"self",
".",
"filename",
")",
"elif",
"self",
".",
"__delta_datetime",
">",
"datetime",
".",
"timedelta",
"(",
"minutes",
"=",
"self",
".",
"args",
".",
"warning",
")",
":",
"self",
".",
"warn_file",
".",
"append",
"(",
"self",
".",
"filename",
")",
"else",
":",
"self",
".",
"ok_file",
".",
"append",
"(",
"self",
".",
"filename",
")",
"# Compare the vlaue.",
"if",
"self",
".",
"crit_file",
":",
"status",
"=",
"self",
".",
"critical",
"elif",
"self",
".",
"warn_file",
":",
"status",
"=",
"self",
".",
"warning",
"else",
":",
"status",
"=",
"self",
".",
"ok",
"# Output",
"self",
".",
"shortoutput",
"=",
"\"Found {0} files out of date.\"",
".",
"format",
"(",
"len",
"(",
"self",
".",
"crit_file",
")",
")",
"if",
"self",
".",
"crit_file",
":",
"self",
".",
"longoutput",
".",
"append",
"(",
"\"===== Critical File out of date ====\"",
")",
"[",
"self",
".",
"longoutput",
".",
"append",
"(",
"filename",
")",
"for",
"filename",
"in",
"self",
".",
"crit_file",
"if",
"self",
".",
"crit_file",
"]",
"if",
"self",
".",
"warn_file",
":",
"self",
".",
"longoutput",
".",
"append",
"(",
"\"===== Warning File out of date ====\"",
")",
"[",
"self",
".",
"longoutput",
".",
"append",
"(",
"filename",
")",
"for",
"filename",
"in",
"self",
".",
"warn_file",
"if",
"self",
".",
"warn_file",
"]",
"if",
"self",
".",
"ok_file",
":",
"self",
".",
"longoutput",
".",
"append",
"(",
"\"===== OK File out of date ====\"",
")",
"[",
"self",
".",
"longoutput",
".",
"append",
"(",
"filename",
")",
"for",
"filename",
"in",
"self",
".",
"ok_file",
"if",
"self",
".",
"ok_file",
"]",
"self",
".",
"perfdata",
".",
"append",
"(",
"\"{path}={result};{warn};{crit};0;\"",
".",
"format",
"(",
"crit",
"=",
"self",
".",
"args",
".",
"critical",
",",
"warn",
"=",
"self",
".",
"args",
".",
"warning",
",",
"result",
"=",
"len",
"(",
"self",
".",
"crit_file",
")",
",",
"path",
"=",
"self",
".",
"args",
".",
"drive",
"+",
"self",
".",
"args",
".",
"path",
")",
")",
"# Return status with message to Nagios.",
"status",
"(",
"self",
".",
"output",
"(",
"long_output_limit",
"=",
"None",
")",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Return status and exit to Nagios.\"",
")"
] | Get the number of file in the folder. | [
"Get",
"the",
"number",
"of",
"file",
"in",
"the",
"folder",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L240-L328 | valid |
ecmadao/threads-creator | threads_creator/threads/branch_thread.py | BranchThread.run | def run(self):
"""run your main spider here
as for branch spider result data, you can return everything or do whatever with it
in your own code
:return: None
"""
config = config_creator()
debug = config.debug
branch_thread_sleep = config.branch_thread_sleep
while 1:
url = self.branch_queue.get()
if debug:
print('branch thread-{} start'.format(url))
branch_spider = self.branch_spider(url)
sleep(random.randrange(*branch_thread_sleep))
branch_spider.request_page()
if debug:
print('branch thread-{} end'.format(url))
self.branch_queue.task_done() | python | def run(self):
"""run your main spider here
as for branch spider result data, you can return everything or do whatever with it
in your own code
:return: None
"""
config = config_creator()
debug = config.debug
branch_thread_sleep = config.branch_thread_sleep
while 1:
url = self.branch_queue.get()
if debug:
print('branch thread-{} start'.format(url))
branch_spider = self.branch_spider(url)
sleep(random.randrange(*branch_thread_sleep))
branch_spider.request_page()
if debug:
print('branch thread-{} end'.format(url))
self.branch_queue.task_done() | [
"def",
"run",
"(",
"self",
")",
":",
"config",
"=",
"config_creator",
"(",
")",
"debug",
"=",
"config",
".",
"debug",
"branch_thread_sleep",
"=",
"config",
".",
"branch_thread_sleep",
"while",
"1",
":",
"url",
"=",
"self",
".",
"branch_queue",
".",
"get",
"(",
")",
"if",
"debug",
":",
"print",
"(",
"'branch thread-{} start'",
".",
"format",
"(",
"url",
")",
")",
"branch_spider",
"=",
"self",
".",
"branch_spider",
"(",
"url",
")",
"sleep",
"(",
"random",
".",
"randrange",
"(",
"*",
"branch_thread_sleep",
")",
")",
"branch_spider",
".",
"request_page",
"(",
")",
"if",
"debug",
":",
"print",
"(",
"'branch thread-{} end'",
".",
"format",
"(",
"url",
")",
")",
"self",
".",
"branch_queue",
".",
"task_done",
"(",
")"
] | run your main spider here
as for branch spider result data, you can return everything or do whatever with it
in your own code
:return: None | [
"run",
"your",
"main",
"spider",
"here",
"as",
"for",
"branch",
"spider",
"result",
"data",
"you",
"can",
"return",
"everything",
"or",
"do",
"whatever",
"with",
"it",
"in",
"your",
"own",
"code"
] | f081091425d4382e5e9776c395c20e1af2332657 | https://github.com/ecmadao/threads-creator/blob/f081091425d4382e5e9776c395c20e1af2332657/threads_creator/threads/branch_thread.py#L20-L39 | valid |
nyaruka/python-librato-bg | setup.py | get_version | def get_version(relpath):
"""Read version info from a file without importing it"""
from os.path import dirname, join
if '__file__' not in globals():
# Allow to use function interactively
root = '.'
else:
root = dirname(__file__)
# The code below reads text file with unknown encoding in
# in Python2/3 compatible way. Reading this text file
# without specifying encoding will fail in Python 3 on some
# systems (see http://goo.gl/5XmOH). Specifying encoding as
# open() parameter is incompatible with Python 2
# cp437 is the encoding without missing points, safe against:
# UnicodeDecodeError: 'charmap' codec can't decode byte...
for line in open(join(root, relpath), 'rb'):
line = line.decode('cp437')
if '__version__' in line:
if '"' in line:
# __version__ = "0.9"
return line.split('"')[1]
elif "'" in line:
return line.split("'")[1] | python | def get_version(relpath):
"""Read version info from a file without importing it"""
from os.path import dirname, join
if '__file__' not in globals():
# Allow to use function interactively
root = '.'
else:
root = dirname(__file__)
# The code below reads text file with unknown encoding in
# in Python2/3 compatible way. Reading this text file
# without specifying encoding will fail in Python 3 on some
# systems (see http://goo.gl/5XmOH). Specifying encoding as
# open() parameter is incompatible with Python 2
# cp437 is the encoding without missing points, safe against:
# UnicodeDecodeError: 'charmap' codec can't decode byte...
for line in open(join(root, relpath), 'rb'):
line = line.decode('cp437')
if '__version__' in line:
if '"' in line:
# __version__ = "0.9"
return line.split('"')[1]
elif "'" in line:
return line.split("'")[1] | [
"def",
"get_version",
"(",
"relpath",
")",
":",
"from",
"os",
".",
"path",
"import",
"dirname",
",",
"join",
"if",
"'__file__'",
"not",
"in",
"globals",
"(",
")",
":",
"# Allow to use function interactively",
"root",
"=",
"'.'",
"else",
":",
"root",
"=",
"dirname",
"(",
"__file__",
")",
"# The code below reads text file with unknown encoding in",
"# in Python2/3 compatible way. Reading this text file",
"# without specifying encoding will fail in Python 3 on some",
"# systems (see http://goo.gl/5XmOH). Specifying encoding as",
"# open() parameter is incompatible with Python 2",
"# cp437 is the encoding without missing points, safe against:",
"# UnicodeDecodeError: 'charmap' codec can't decode byte...",
"for",
"line",
"in",
"open",
"(",
"join",
"(",
"root",
",",
"relpath",
")",
",",
"'rb'",
")",
":",
"line",
"=",
"line",
".",
"decode",
"(",
"'cp437'",
")",
"if",
"'__version__'",
"in",
"line",
":",
"if",
"'\"'",
"in",
"line",
":",
"# __version__ = \"0.9\"",
"return",
"line",
".",
"split",
"(",
"'\"'",
")",
"[",
"1",
"]",
"elif",
"\"'\"",
"in",
"line",
":",
"return",
"line",
".",
"split",
"(",
"\"'\"",
")",
"[",
"1",
"]"
] | Read version info from a file without importing it | [
"Read",
"version",
"info",
"from",
"a",
"file",
"without",
"importing",
"it"
] | e541092838694de31d256becea8391a9cfe086c7 | https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/setup.py#L32-L58 | valid |
ibelie/typy | typy/google/protobuf/descriptor.py | MakeDescriptor | def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
syntax=None):
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
Handles nested descriptors. Note that this is limited to the scope of defining
a message inside of another message. Composite fields can currently only be
resolved if the message is defined in the same scope as the field.
Args:
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
package: Optional package name for the new message Descriptor (string).
build_file_if_cpp: Update the C++ descriptor pool if api matches.
Set to False on recursion, so no duplicates are created.
syntax: The syntax/semantics that should be used. Set to "proto3" to get
proto3 field presence semantics.
Returns:
A Descriptor for protobuf messages.
"""
if api_implementation.Type() == 'cpp' and build_file_if_cpp:
# The C++ implementation requires all descriptors to be backed by the same
# definition in the C++ descriptor pool. To do this, we build a
# FileDescriptorProto with the same definition as this descriptor and build
# it into the pool.
from typy.google.protobuf import descriptor_pb2
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
# Generate a random name for this proto file to prevent conflicts with any
# imported ones. We need to specify a file name so the descriptor pool
# accepts our FileDescriptorProto, but it is not important what that file
# name is actually set to.
proto_name = str(uuid.uuid4())
if package:
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
proto_name + '.proto')
file_descriptor_proto.package = package
else:
file_descriptor_proto.name = proto_name + '.proto'
_message.default_pool.Add(file_descriptor_proto)
result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
if _USE_C_DESCRIPTORS:
return result.message_types_by_name[desc_proto.name]
full_message_name = [desc_proto.name]
if package: full_message_name.insert(0, package)
# Create Descriptors for enum types
enum_types = {}
for enum_proto in desc_proto.enum_type:
full_name = '.'.join(full_message_name + [enum_proto.name])
enum_desc = EnumDescriptor(
enum_proto.name, full_name, None, [
EnumValueDescriptor(enum_val.name, ii, enum_val.number)
for ii, enum_val in enumerate(enum_proto.value)])
enum_types[full_name] = enum_desc
# Create Descriptors for nested types
nested_types = {}
for nested_proto in desc_proto.nested_type:
full_name = '.'.join(full_message_name + [nested_proto.name])
# Nested types are just those defined inside of the message, not all types
# used by fields in the message, so no loops are possible here.
nested_desc = MakeDescriptor(nested_proto,
package='.'.join(full_message_name),
build_file_if_cpp=False,
syntax=syntax)
nested_types[full_name] = nested_desc
fields = []
for field_proto in desc_proto.field:
full_name = '.'.join(full_message_name + [field_proto.name])
enum_desc = None
nested_desc = None
if field_proto.HasField('type_name'):
type_name = field_proto.type_name
full_type_name = '.'.join(full_message_name +
[type_name[type_name.rfind('.')+1:]])
if full_type_name in nested_types:
nested_desc = nested_types[full_type_name]
elif full_type_name in enum_types:
enum_desc = enum_types[full_type_name]
# Else type_name references a non-local type, which isn't implemented
field = FieldDescriptor(
field_proto.name, full_name, field_proto.number - 1,
field_proto.number, field_proto.type,
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
field_proto.label, None, nested_desc, enum_desc, None, False, None,
options=field_proto.options, has_default_value=False)
fields.append(field)
desc_name = '.'.join(full_message_name)
return Descriptor(desc_proto.name, desc_name, None, None, fields,
list(nested_types.values()), list(enum_types.values()), [],
options=desc_proto.options) | python | def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
syntax=None):
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
Handles nested descriptors. Note that this is limited to the scope of defining
a message inside of another message. Composite fields can currently only be
resolved if the message is defined in the same scope as the field.
Args:
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
package: Optional package name for the new message Descriptor (string).
build_file_if_cpp: Update the C++ descriptor pool if api matches.
Set to False on recursion, so no duplicates are created.
syntax: The syntax/semantics that should be used. Set to "proto3" to get
proto3 field presence semantics.
Returns:
A Descriptor for protobuf messages.
"""
if api_implementation.Type() == 'cpp' and build_file_if_cpp:
# The C++ implementation requires all descriptors to be backed by the same
# definition in the C++ descriptor pool. To do this, we build a
# FileDescriptorProto with the same definition as this descriptor and build
# it into the pool.
from typy.google.protobuf import descriptor_pb2
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
# Generate a random name for this proto file to prevent conflicts with any
# imported ones. We need to specify a file name so the descriptor pool
# accepts our FileDescriptorProto, but it is not important what that file
# name is actually set to.
proto_name = str(uuid.uuid4())
if package:
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
proto_name + '.proto')
file_descriptor_proto.package = package
else:
file_descriptor_proto.name = proto_name + '.proto'
_message.default_pool.Add(file_descriptor_proto)
result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
if _USE_C_DESCRIPTORS:
return result.message_types_by_name[desc_proto.name]
full_message_name = [desc_proto.name]
if package: full_message_name.insert(0, package)
# Create Descriptors for enum types
enum_types = {}
for enum_proto in desc_proto.enum_type:
full_name = '.'.join(full_message_name + [enum_proto.name])
enum_desc = EnumDescriptor(
enum_proto.name, full_name, None, [
EnumValueDescriptor(enum_val.name, ii, enum_val.number)
for ii, enum_val in enumerate(enum_proto.value)])
enum_types[full_name] = enum_desc
# Create Descriptors for nested types
nested_types = {}
for nested_proto in desc_proto.nested_type:
full_name = '.'.join(full_message_name + [nested_proto.name])
# Nested types are just those defined inside of the message, not all types
# used by fields in the message, so no loops are possible here.
nested_desc = MakeDescriptor(nested_proto,
package='.'.join(full_message_name),
build_file_if_cpp=False,
syntax=syntax)
nested_types[full_name] = nested_desc
fields = []
for field_proto in desc_proto.field:
full_name = '.'.join(full_message_name + [field_proto.name])
enum_desc = None
nested_desc = None
if field_proto.HasField('type_name'):
type_name = field_proto.type_name
full_type_name = '.'.join(full_message_name +
[type_name[type_name.rfind('.')+1:]])
if full_type_name in nested_types:
nested_desc = nested_types[full_type_name]
elif full_type_name in enum_types:
enum_desc = enum_types[full_type_name]
# Else type_name references a non-local type, which isn't implemented
field = FieldDescriptor(
field_proto.name, full_name, field_proto.number - 1,
field_proto.number, field_proto.type,
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
field_proto.label, None, nested_desc, enum_desc, None, False, None,
options=field_proto.options, has_default_value=False)
fields.append(field)
desc_name = '.'.join(full_message_name)
return Descriptor(desc_proto.name, desc_name, None, None, fields,
list(nested_types.values()), list(enum_types.values()), [],
options=desc_proto.options) | [
"def",
"MakeDescriptor",
"(",
"desc_proto",
",",
"package",
"=",
"''",
",",
"build_file_if_cpp",
"=",
"True",
",",
"syntax",
"=",
"None",
")",
":",
"if",
"api_implementation",
".",
"Type",
"(",
")",
"==",
"'cpp'",
"and",
"build_file_if_cpp",
":",
"# The C++ implementation requires all descriptors to be backed by the same",
"# definition in the C++ descriptor pool. To do this, we build a",
"# FileDescriptorProto with the same definition as this descriptor and build",
"# it into the pool.",
"from",
"typy",
".",
"google",
".",
"protobuf",
"import",
"descriptor_pb2",
"file_descriptor_proto",
"=",
"descriptor_pb2",
".",
"FileDescriptorProto",
"(",
")",
"file_descriptor_proto",
".",
"message_type",
".",
"add",
"(",
")",
".",
"MergeFrom",
"(",
"desc_proto",
")",
"# Generate a random name for this proto file to prevent conflicts with any",
"# imported ones. We need to specify a file name so the descriptor pool",
"# accepts our FileDescriptorProto, but it is not important what that file",
"# name is actually set to.",
"proto_name",
"=",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"if",
"package",
":",
"file_descriptor_proto",
".",
"name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"package",
".",
"replace",
"(",
"'.'",
",",
"'/'",
")",
",",
"proto_name",
"+",
"'.proto'",
")",
"file_descriptor_proto",
".",
"package",
"=",
"package",
"else",
":",
"file_descriptor_proto",
".",
"name",
"=",
"proto_name",
"+",
"'.proto'",
"_message",
".",
"default_pool",
".",
"Add",
"(",
"file_descriptor_proto",
")",
"result",
"=",
"_message",
".",
"default_pool",
".",
"FindFileByName",
"(",
"file_descriptor_proto",
".",
"name",
")",
"if",
"_USE_C_DESCRIPTORS",
":",
"return",
"result",
".",
"message_types_by_name",
"[",
"desc_proto",
".",
"name",
"]",
"full_message_name",
"=",
"[",
"desc_proto",
".",
"name",
"]",
"if",
"package",
":",
"full_message_name",
".",
"insert",
"(",
"0",
",",
"package",
")",
"# Create Descriptors for enum types",
"enum_types",
"=",
"{",
"}",
"for",
"enum_proto",
"in",
"desc_proto",
".",
"enum_type",
":",
"full_name",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
"+",
"[",
"enum_proto",
".",
"name",
"]",
")",
"enum_desc",
"=",
"EnumDescriptor",
"(",
"enum_proto",
".",
"name",
",",
"full_name",
",",
"None",
",",
"[",
"EnumValueDescriptor",
"(",
"enum_val",
".",
"name",
",",
"ii",
",",
"enum_val",
".",
"number",
")",
"for",
"ii",
",",
"enum_val",
"in",
"enumerate",
"(",
"enum_proto",
".",
"value",
")",
"]",
")",
"enum_types",
"[",
"full_name",
"]",
"=",
"enum_desc",
"# Create Descriptors for nested types",
"nested_types",
"=",
"{",
"}",
"for",
"nested_proto",
"in",
"desc_proto",
".",
"nested_type",
":",
"full_name",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
"+",
"[",
"nested_proto",
".",
"name",
"]",
")",
"# Nested types are just those defined inside of the message, not all types",
"# used by fields in the message, so no loops are possible here.",
"nested_desc",
"=",
"MakeDescriptor",
"(",
"nested_proto",
",",
"package",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
")",
",",
"build_file_if_cpp",
"=",
"False",
",",
"syntax",
"=",
"syntax",
")",
"nested_types",
"[",
"full_name",
"]",
"=",
"nested_desc",
"fields",
"=",
"[",
"]",
"for",
"field_proto",
"in",
"desc_proto",
".",
"field",
":",
"full_name",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
"+",
"[",
"field_proto",
".",
"name",
"]",
")",
"enum_desc",
"=",
"None",
"nested_desc",
"=",
"None",
"if",
"field_proto",
".",
"HasField",
"(",
"'type_name'",
")",
":",
"type_name",
"=",
"field_proto",
".",
"type_name",
"full_type_name",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
"+",
"[",
"type_name",
"[",
"type_name",
".",
"rfind",
"(",
"'.'",
")",
"+",
"1",
":",
"]",
"]",
")",
"if",
"full_type_name",
"in",
"nested_types",
":",
"nested_desc",
"=",
"nested_types",
"[",
"full_type_name",
"]",
"elif",
"full_type_name",
"in",
"enum_types",
":",
"enum_desc",
"=",
"enum_types",
"[",
"full_type_name",
"]",
"# Else type_name references a non-local type, which isn't implemented",
"field",
"=",
"FieldDescriptor",
"(",
"field_proto",
".",
"name",
",",
"full_name",
",",
"field_proto",
".",
"number",
"-",
"1",
",",
"field_proto",
".",
"number",
",",
"field_proto",
".",
"type",
",",
"FieldDescriptor",
".",
"ProtoTypeToCppProtoType",
"(",
"field_proto",
".",
"type",
")",
",",
"field_proto",
".",
"label",
",",
"None",
",",
"nested_desc",
",",
"enum_desc",
",",
"None",
",",
"False",
",",
"None",
",",
"options",
"=",
"field_proto",
".",
"options",
",",
"has_default_value",
"=",
"False",
")",
"fields",
".",
"append",
"(",
"field",
")",
"desc_name",
"=",
"'.'",
".",
"join",
"(",
"full_message_name",
")",
"return",
"Descriptor",
"(",
"desc_proto",
".",
"name",
",",
"desc_name",
",",
"None",
",",
"None",
",",
"fields",
",",
"list",
"(",
"nested_types",
".",
"values",
"(",
")",
")",
",",
"list",
"(",
"enum_types",
".",
"values",
"(",
")",
")",
",",
"[",
"]",
",",
"options",
"=",
"desc_proto",
".",
"options",
")"
] | Make a protobuf Descriptor given a DescriptorProto protobuf.
Handles nested descriptors. Note that this is limited to the scope of defining
a message inside of another message. Composite fields can currently only be
resolved if the message is defined in the same scope as the field.
Args:
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
package: Optional package name for the new message Descriptor (string).
build_file_if_cpp: Update the C++ descriptor pool if api matches.
Set to False on recursion, so no duplicates are created.
syntax: The syntax/semantics that should be used. Set to "proto3" to get
proto3 field presence semantics.
Returns:
A Descriptor for protobuf messages. | [
"Make",
"a",
"protobuf",
"Descriptor",
"given",
"a",
"DescriptorProto",
"protobuf",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L875-L971 | valid |
ibelie/typy | typy/google/protobuf/descriptor.py | _NestedDescriptorBase.GetTopLevelContainingType | def GetTopLevelContainingType(self):
"""Returns the root if this is a nested type, or itself if its the root."""
desc = self
while desc.containing_type is not None:
desc = desc.containing_type
return desc | python | def GetTopLevelContainingType(self):
"""Returns the root if this is a nested type, or itself if its the root."""
desc = self
while desc.containing_type is not None:
desc = desc.containing_type
return desc | [
"def",
"GetTopLevelContainingType",
"(",
"self",
")",
":",
"desc",
"=",
"self",
"while",
"desc",
".",
"containing_type",
"is",
"not",
"None",
":",
"desc",
"=",
"desc",
".",
"containing_type",
"return",
"desc"
] | Returns the root if this is a nested type, or itself if its the root. | [
"Returns",
"the",
"root",
"if",
"this",
"is",
"a",
"nested",
"type",
"or",
"itself",
"if",
"its",
"the",
"root",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L174-L179 | valid |
ibelie/typy | typy/google/protobuf/descriptor.py | ServiceDescriptor.FindMethodByName | def FindMethodByName(self, name):
"""Searches for the specified method, and returns its descriptor."""
for method in self.methods:
if name == method.name:
return method
return None | python | def FindMethodByName(self, name):
"""Searches for the specified method, and returns its descriptor."""
for method in self.methods:
if name == method.name:
return method
return None | [
"def",
"FindMethodByName",
"(",
"self",
",",
"name",
")",
":",
"for",
"method",
"in",
"self",
".",
"methods",
":",
"if",
"name",
"==",
"method",
".",
"name",
":",
"return",
"method",
"return",
"None"
] | Searches for the specified method, and returns its descriptor. | [
"Searches",
"for",
"the",
"specified",
"method",
"and",
"returns",
"its",
"descriptor",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L725-L730 | valid |
crazy-canux/arguspy | scripts/check_winrm.py | main | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'sqlserverlocks':
plugin.sqlserverlocks_handle()
else:
plugin.unknown("Unknown actions.") | python | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'sqlserverlocks':
plugin.sqlserverlocks_handle()
else:
plugin.unknown("Unknown actions.") | [
"def",
"main",
"(",
")",
":",
"plugin",
"=",
"Register",
"(",
")",
"if",
"plugin",
".",
"args",
".",
"option",
"==",
"'sqlserverlocks'",
":",
"plugin",
".",
"sqlserverlocks_handle",
"(",
")",
"else",
":",
"plugin",
".",
"unknown",
"(",
"\"Unknown actions.\"",
")"
] | Register your own mode and handle method here. | [
"Register",
"your",
"own",
"mode",
"and",
"handle",
"method",
"here",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_winrm.py#L115-L121 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | MessageToJson | def MessageToJson(message, including_default_value_fields=False):
"""Converts protobuf message to JSON format.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
js = _MessageToJsonObject(message, including_default_value_fields)
return json.dumps(js, indent=2) | python | def MessageToJson(message, including_default_value_fields=False):
"""Converts protobuf message to JSON format.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
js = _MessageToJsonObject(message, including_default_value_fields)
return json.dumps(js, indent=2) | [
"def",
"MessageToJson",
"(",
"message",
",",
"including_default_value_fields",
"=",
"False",
")",
":",
"js",
"=",
"_MessageToJsonObject",
"(",
"message",
",",
"including_default_value_fields",
")",
"return",
"json",
".",
"dumps",
"(",
"js",
",",
"indent",
"=",
"2",
")"
] | Converts protobuf message to JSON format.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
Returns:
A string containing the JSON formatted protocol buffer message. | [
"Converts",
"protobuf",
"message",
"to",
"JSON",
"format",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L80-L94 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _MessageToJsonObject | def _MessageToJsonObject(message, including_default_value_fields):
"""Converts message to an object according to Proto3 JSON Specification."""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
return _WrapperMessageToJsonObject(message)
if full_name in _WKTJSONMETHODS:
return _WKTJSONMETHODS[full_name][0](
message, including_default_value_fields)
js = {}
return _RegularMessageToJsonObject(
message, js, including_default_value_fields) | python | def _MessageToJsonObject(message, including_default_value_fields):
"""Converts message to an object according to Proto3 JSON Specification."""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
return _WrapperMessageToJsonObject(message)
if full_name in _WKTJSONMETHODS:
return _WKTJSONMETHODS[full_name][0](
message, including_default_value_fields)
js = {}
return _RegularMessageToJsonObject(
message, js, including_default_value_fields) | [
"def",
"_MessageToJsonObject",
"(",
"message",
",",
"including_default_value_fields",
")",
":",
"message_descriptor",
"=",
"message",
".",
"DESCRIPTOR",
"full_name",
"=",
"message_descriptor",
".",
"full_name",
"if",
"_IsWrapperMessage",
"(",
"message_descriptor",
")",
":",
"return",
"_WrapperMessageToJsonObject",
"(",
"message",
")",
"if",
"full_name",
"in",
"_WKTJSONMETHODS",
":",
"return",
"_WKTJSONMETHODS",
"[",
"full_name",
"]",
"[",
"0",
"]",
"(",
"message",
",",
"including_default_value_fields",
")",
"js",
"=",
"{",
"}",
"return",
"_RegularMessageToJsonObject",
"(",
"message",
",",
"js",
",",
"including_default_value_fields",
")"
] | Converts message to an object according to Proto3 JSON Specification. | [
"Converts",
"message",
"to",
"an",
"object",
"according",
"to",
"Proto3",
"JSON",
"Specification",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L97-L108 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _StructMessageToJsonObject | def _StructMessageToJsonObject(message, unused_including_default=False):
"""Converts Struct message according to Proto3 JSON Specification."""
fields = message.fields
ret = {}
for key in fields:
ret[key] = _ValueMessageToJsonObject(fields[key])
return ret | python | def _StructMessageToJsonObject(message, unused_including_default=False):
"""Converts Struct message according to Proto3 JSON Specification."""
fields = message.fields
ret = {}
for key in fields:
ret[key] = _ValueMessageToJsonObject(fields[key])
return ret | [
"def",
"_StructMessageToJsonObject",
"(",
"message",
",",
"unused_including_default",
"=",
"False",
")",
":",
"fields",
"=",
"message",
".",
"fields",
"ret",
"=",
"{",
"}",
"for",
"key",
"in",
"fields",
":",
"ret",
"[",
"key",
"]",
"=",
"_ValueMessageToJsonObject",
"(",
"fields",
"[",
"key",
"]",
")",
"return",
"ret"
] | Converts Struct message according to Proto3 JSON Specification. | [
"Converts",
"Struct",
"message",
"according",
"to",
"Proto3",
"JSON",
"Specification",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L271-L277 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | Parse | def Parse(text, message):
"""Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol beffer message to merge into.
Returns:
The same message passed as argument.
Raises::
ParseError: On JSON parsing problems.
"""
if not isinstance(text, six.text_type): text = text.decode('utf-8')
try:
if sys.version_info < (2, 7):
# object_pair_hook is not supported before python2.7
js = json.loads(text)
else:
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
_ConvertMessage(js, message)
return message | python | def Parse(text, message):
"""Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol beffer message to merge into.
Returns:
The same message passed as argument.
Raises::
ParseError: On JSON parsing problems.
"""
if not isinstance(text, six.text_type): text = text.decode('utf-8')
try:
if sys.version_info < (2, 7):
# object_pair_hook is not supported before python2.7
js = json.loads(text)
else:
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
_ConvertMessage(js, message)
return message | [
"def",
"Parse",
"(",
"text",
",",
"message",
")",
":",
"if",
"not",
"isinstance",
"(",
"text",
",",
"six",
".",
"text_type",
")",
":",
"text",
"=",
"text",
".",
"decode",
"(",
"'utf-8'",
")",
"try",
":",
"if",
"sys",
".",
"version_info",
"<",
"(",
"2",
",",
"7",
")",
":",
"# object_pair_hook is not supported before python2.7",
"js",
"=",
"json",
".",
"loads",
"(",
"text",
")",
"else",
":",
"js",
"=",
"json",
".",
"loads",
"(",
"text",
",",
"object_pairs_hook",
"=",
"_DuplicateChecker",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"ParseError",
"(",
"'Failed to load JSON: {0}.'",
".",
"format",
"(",
"str",
"(",
"e",
")",
")",
")",
"_ConvertMessage",
"(",
"js",
",",
"message",
")",
"return",
"message"
] | Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol beffer message to merge into.
Returns:
The same message passed as argument.
Raises::
ParseError: On JSON parsing problems. | [
"Parses",
"a",
"JSON",
"representation",
"of",
"a",
"protocol",
"message",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L298-L321 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _ConvertFieldValuePair | def _ConvertFieldValuePair(js, message):
"""Convert field value pairs into regular message.
Args:
js: A JSON object to convert the field value pairs.
message: A regular protocol message to record the data.
Raises:
ParseError: In case of problems converting.
"""
names = []
message_descriptor = message.DESCRIPTOR
for name in js:
try:
field = message_descriptor.fields_by_camelcase_name.get(name, None)
if not field:
raise ParseError(
'Message type "{0}" has no field named "{1}".'.format(
message_descriptor.full_name, name))
if name in names:
raise ParseError(
'Message type "{0}" should not have multiple "{1}" fields.'.format(
message.DESCRIPTOR.full_name, name))
names.append(name)
# Check no other oneof field is parsed.
if field.containing_oneof is not None:
oneof_name = field.containing_oneof.name
if oneof_name in names:
raise ParseError('Message type "{0}" should not have multiple "{1}" '
'oneof fields.'.format(
message.DESCRIPTOR.full_name, oneof_name))
names.append(oneof_name)
value = js[name]
if value is None:
message.ClearField(field.name)
continue
# Parse field value.
if _IsMapEntry(field):
message.ClearField(field.name)
_ConvertMapFieldValue(value, message, field)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
message.ClearField(field.name)
if not isinstance(value, list):
raise ParseError('repeated field {0} must be in [] which is '
'{1}.'.format(name, value))
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
# Repeated message field.
for item in value:
sub_message = getattr(message, field.name).add()
# None is a null_value in Value.
if (item is None and
sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
_ConvertMessage(item, sub_message)
else:
# Repeated scalar field.
for item in value:
if item is None:
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
getattr(message, field.name).append(
_ConvertScalarFieldValue(item, field))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
sub_message = getattr(message, field.name)
_ConvertMessage(value, sub_message)
else:
setattr(message, field.name, _ConvertScalarFieldValue(value, field))
except ParseError as e:
if field and field.containing_oneof is None:
raise ParseError('Failed to parse {0} field: {1}'.format(name, e))
else:
raise ParseError(str(e))
except ValueError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
except TypeError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | python | def _ConvertFieldValuePair(js, message):
"""Convert field value pairs into regular message.
Args:
js: A JSON object to convert the field value pairs.
message: A regular protocol message to record the data.
Raises:
ParseError: In case of problems converting.
"""
names = []
message_descriptor = message.DESCRIPTOR
for name in js:
try:
field = message_descriptor.fields_by_camelcase_name.get(name, None)
if not field:
raise ParseError(
'Message type "{0}" has no field named "{1}".'.format(
message_descriptor.full_name, name))
if name in names:
raise ParseError(
'Message type "{0}" should not have multiple "{1}" fields.'.format(
message.DESCRIPTOR.full_name, name))
names.append(name)
# Check no other oneof field is parsed.
if field.containing_oneof is not None:
oneof_name = field.containing_oneof.name
if oneof_name in names:
raise ParseError('Message type "{0}" should not have multiple "{1}" '
'oneof fields.'.format(
message.DESCRIPTOR.full_name, oneof_name))
names.append(oneof_name)
value = js[name]
if value is None:
message.ClearField(field.name)
continue
# Parse field value.
if _IsMapEntry(field):
message.ClearField(field.name)
_ConvertMapFieldValue(value, message, field)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
message.ClearField(field.name)
if not isinstance(value, list):
raise ParseError('repeated field {0} must be in [] which is '
'{1}.'.format(name, value))
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
# Repeated message field.
for item in value:
sub_message = getattr(message, field.name).add()
# None is a null_value in Value.
if (item is None and
sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
_ConvertMessage(item, sub_message)
else:
# Repeated scalar field.
for item in value:
if item is None:
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
getattr(message, field.name).append(
_ConvertScalarFieldValue(item, field))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
sub_message = getattr(message, field.name)
_ConvertMessage(value, sub_message)
else:
setattr(message, field.name, _ConvertScalarFieldValue(value, field))
except ParseError as e:
if field and field.containing_oneof is None:
raise ParseError('Failed to parse {0} field: {1}'.format(name, e))
else:
raise ParseError(str(e))
except ValueError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
except TypeError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | [
"def",
"_ConvertFieldValuePair",
"(",
"js",
",",
"message",
")",
":",
"names",
"=",
"[",
"]",
"message_descriptor",
"=",
"message",
".",
"DESCRIPTOR",
"for",
"name",
"in",
"js",
":",
"try",
":",
"field",
"=",
"message_descriptor",
".",
"fields_by_camelcase_name",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"not",
"field",
":",
"raise",
"ParseError",
"(",
"'Message type \"{0}\" has no field named \"{1}\".'",
".",
"format",
"(",
"message_descriptor",
".",
"full_name",
",",
"name",
")",
")",
"if",
"name",
"in",
"names",
":",
"raise",
"ParseError",
"(",
"'Message type \"{0}\" should not have multiple \"{1}\" fields.'",
".",
"format",
"(",
"message",
".",
"DESCRIPTOR",
".",
"full_name",
",",
"name",
")",
")",
"names",
".",
"append",
"(",
"name",
")",
"# Check no other oneof field is parsed.",
"if",
"field",
".",
"containing_oneof",
"is",
"not",
"None",
":",
"oneof_name",
"=",
"field",
".",
"containing_oneof",
".",
"name",
"if",
"oneof_name",
"in",
"names",
":",
"raise",
"ParseError",
"(",
"'Message type \"{0}\" should not have multiple \"{1}\" '",
"'oneof fields.'",
".",
"format",
"(",
"message",
".",
"DESCRIPTOR",
".",
"full_name",
",",
"oneof_name",
")",
")",
"names",
".",
"append",
"(",
"oneof_name",
")",
"value",
"=",
"js",
"[",
"name",
"]",
"if",
"value",
"is",
"None",
":",
"message",
".",
"ClearField",
"(",
"field",
".",
"name",
")",
"continue",
"# Parse field value.",
"if",
"_IsMapEntry",
"(",
"field",
")",
":",
"message",
".",
"ClearField",
"(",
"field",
".",
"name",
")",
"_ConvertMapFieldValue",
"(",
"value",
",",
"message",
",",
"field",
")",
"elif",
"field",
".",
"label",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"LABEL_REPEATED",
":",
"message",
".",
"ClearField",
"(",
"field",
".",
"name",
")",
"if",
"not",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"raise",
"ParseError",
"(",
"'repeated field {0} must be in [] which is '",
"'{1}.'",
".",
"format",
"(",
"name",
",",
"value",
")",
")",
"if",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"# Repeated message field.",
"for",
"item",
"in",
"value",
":",
"sub_message",
"=",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
".",
"add",
"(",
")",
"# None is a null_value in Value.",
"if",
"(",
"item",
"is",
"None",
"and",
"sub_message",
".",
"DESCRIPTOR",
".",
"full_name",
"!=",
"'google.protobuf.Value'",
")",
":",
"raise",
"ParseError",
"(",
"'null is not allowed to be used as an element'",
"' in a repeated field.'",
")",
"_ConvertMessage",
"(",
"item",
",",
"sub_message",
")",
"else",
":",
"# Repeated scalar field.",
"for",
"item",
"in",
"value",
":",
"if",
"item",
"is",
"None",
":",
"raise",
"ParseError",
"(",
"'null is not allowed to be used as an element'",
"' in a repeated field.'",
")",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
".",
"append",
"(",
"_ConvertScalarFieldValue",
"(",
"item",
",",
"field",
")",
")",
"elif",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"sub_message",
"=",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
"_ConvertMessage",
"(",
"value",
",",
"sub_message",
")",
"else",
":",
"setattr",
"(",
"message",
",",
"field",
".",
"name",
",",
"_ConvertScalarFieldValue",
"(",
"value",
",",
"field",
")",
")",
"except",
"ParseError",
"as",
"e",
":",
"if",
"field",
"and",
"field",
".",
"containing_oneof",
"is",
"None",
":",
"raise",
"ParseError",
"(",
"'Failed to parse {0} field: {1}'",
".",
"format",
"(",
"name",
",",
"e",
")",
")",
"else",
":",
"raise",
"ParseError",
"(",
"str",
"(",
"e",
")",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"ParseError",
"(",
"'Failed to parse {0} field: {1}.'",
".",
"format",
"(",
"name",
",",
"e",
")",
")",
"except",
"TypeError",
"as",
"e",
":",
"raise",
"ParseError",
"(",
"'Failed to parse {0} field: {1}.'",
".",
"format",
"(",
"name",
",",
"e",
")",
")"
] | Convert field value pairs into regular message.
Args:
js: A JSON object to convert the field value pairs.
message: A regular protocol message to record the data.
Raises:
ParseError: In case of problems converting. | [
"Convert",
"field",
"value",
"pairs",
"into",
"regular",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L324-L402 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _ConvertMessage | def _ConvertMessage(value, message):
"""Convert a JSON object into a message.
Args:
value: A JSON object.
message: A WKT or regular protocol message to record the data.
Raises:
ParseError: In case of convert problems.
"""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
_ConvertWrapperMessage(value, message)
elif full_name in _WKTJSONMETHODS:
_WKTJSONMETHODS[full_name][1](value, message)
else:
_ConvertFieldValuePair(value, message) | python | def _ConvertMessage(value, message):
"""Convert a JSON object into a message.
Args:
value: A JSON object.
message: A WKT or regular protocol message to record the data.
Raises:
ParseError: In case of convert problems.
"""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
_ConvertWrapperMessage(value, message)
elif full_name in _WKTJSONMETHODS:
_WKTJSONMETHODS[full_name][1](value, message)
else:
_ConvertFieldValuePair(value, message) | [
"def",
"_ConvertMessage",
"(",
"value",
",",
"message",
")",
":",
"message_descriptor",
"=",
"message",
".",
"DESCRIPTOR",
"full_name",
"=",
"message_descriptor",
".",
"full_name",
"if",
"_IsWrapperMessage",
"(",
"message_descriptor",
")",
":",
"_ConvertWrapperMessage",
"(",
"value",
",",
"message",
")",
"elif",
"full_name",
"in",
"_WKTJSONMETHODS",
":",
"_WKTJSONMETHODS",
"[",
"full_name",
"]",
"[",
"1",
"]",
"(",
"value",
",",
"message",
")",
"else",
":",
"_ConvertFieldValuePair",
"(",
"value",
",",
"message",
")"
] | Convert a JSON object into a message.
Args:
value: A JSON object.
message: A WKT or regular protocol message to record the data.
Raises:
ParseError: In case of convert problems. | [
"Convert",
"a",
"JSON",
"object",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L405-L422 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _ConvertValueMessage | def _ConvertValueMessage(value, message):
"""Convert a JSON representation into Value message."""
if isinstance(value, dict):
_ConvertStructMessage(value, message.struct_value)
elif isinstance(value, list):
_ConvertListValueMessage(value, message.list_value)
elif value is None:
message.null_value = 0
elif isinstance(value, bool):
message.bool_value = value
elif isinstance(value, six.string_types):
message.string_value = value
elif isinstance(value, _INT_OR_FLOAT):
message.number_value = value
else:
raise ParseError('Unexpected type for Value message.') | python | def _ConvertValueMessage(value, message):
"""Convert a JSON representation into Value message."""
if isinstance(value, dict):
_ConvertStructMessage(value, message.struct_value)
elif isinstance(value, list):
_ConvertListValueMessage(value, message.list_value)
elif value is None:
message.null_value = 0
elif isinstance(value, bool):
message.bool_value = value
elif isinstance(value, six.string_types):
message.string_value = value
elif isinstance(value, _INT_OR_FLOAT):
message.number_value = value
else:
raise ParseError('Unexpected type for Value message.') | [
"def",
"_ConvertValueMessage",
"(",
"value",
",",
"message",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"_ConvertStructMessage",
"(",
"value",
",",
"message",
".",
"struct_value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"_ConvertListValueMessage",
"(",
"value",
",",
"message",
".",
"list_value",
")",
"elif",
"value",
"is",
"None",
":",
"message",
".",
"null_value",
"=",
"0",
"elif",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"message",
".",
"bool_value",
"=",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"message",
".",
"string_value",
"=",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"_INT_OR_FLOAT",
")",
":",
"message",
".",
"number_value",
"=",
"value",
"else",
":",
"raise",
"ParseError",
"(",
"'Unexpected type for Value message.'",
")"
] | Convert a JSON representation into Value message. | [
"Convert",
"a",
"JSON",
"representation",
"into",
"Value",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L459-L474 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _ConvertListValueMessage | def _ConvertListValueMessage(value, message):
"""Convert a JSON representation into ListValue message."""
if not isinstance(value, list):
raise ParseError(
'ListValue must be in [] which is {0}.'.format(value))
message.ClearField('values')
for item in value:
_ConvertValueMessage(item, message.values.add()) | python | def _ConvertListValueMessage(value, message):
"""Convert a JSON representation into ListValue message."""
if not isinstance(value, list):
raise ParseError(
'ListValue must be in [] which is {0}.'.format(value))
message.ClearField('values')
for item in value:
_ConvertValueMessage(item, message.values.add()) | [
"def",
"_ConvertListValueMessage",
"(",
"value",
",",
"message",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"raise",
"ParseError",
"(",
"'ListValue must be in [] which is {0}.'",
".",
"format",
"(",
"value",
")",
")",
"message",
".",
"ClearField",
"(",
"'values'",
")",
"for",
"item",
"in",
"value",
":",
"_ConvertValueMessage",
"(",
"item",
",",
"message",
".",
"values",
".",
"add",
"(",
")",
")"
] | Convert a JSON representation into ListValue message. | [
"Convert",
"a",
"JSON",
"representation",
"into",
"ListValue",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L477-L484 | valid |
ibelie/typy | typy/google/protobuf/json_format.py | _ConvertStructMessage | def _ConvertStructMessage(value, message):
"""Convert a JSON representation into Struct message."""
if not isinstance(value, dict):
raise ParseError(
'Struct must be in a dict which is {0}.'.format(value))
for key in value:
_ConvertValueMessage(value[key], message.fields[key])
return | python | def _ConvertStructMessage(value, message):
"""Convert a JSON representation into Struct message."""
if not isinstance(value, dict):
raise ParseError(
'Struct must be in a dict which is {0}.'.format(value))
for key in value:
_ConvertValueMessage(value[key], message.fields[key])
return | [
"def",
"_ConvertStructMessage",
"(",
"value",
",",
"message",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"raise",
"ParseError",
"(",
"'Struct must be in a dict which is {0}.'",
".",
"format",
"(",
"value",
")",
")",
"for",
"key",
"in",
"value",
":",
"_ConvertValueMessage",
"(",
"value",
"[",
"key",
"]",
",",
"message",
".",
"fields",
"[",
"key",
"]",
")",
"return"
] | Convert a JSON representation into Struct message. | [
"Convert",
"a",
"JSON",
"representation",
"into",
"Struct",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L487-L494 | valid |
robchambers/nbserve | nbserve/app.py | update_config | def update_config(new_config):
""" Update config options with the provided dictionary of options.
"""
flask_app.base_config.update(new_config)
# Check for changed working directory.
if new_config.has_key('working_directory'):
wd = os.path.abspath(new_config['working_directory'])
if nbmanager.notebook_dir != wd:
if not os.path.exists(wd):
raise IOError('Path not found: %s' % wd)
nbmanager.notebook_dir = wd | python | def update_config(new_config):
""" Update config options with the provided dictionary of options.
"""
flask_app.base_config.update(new_config)
# Check for changed working directory.
if new_config.has_key('working_directory'):
wd = os.path.abspath(new_config['working_directory'])
if nbmanager.notebook_dir != wd:
if not os.path.exists(wd):
raise IOError('Path not found: %s' % wd)
nbmanager.notebook_dir = wd | [
"def",
"update_config",
"(",
"new_config",
")",
":",
"flask_app",
".",
"base_config",
".",
"update",
"(",
"new_config",
")",
"# Check for changed working directory.",
"if",
"new_config",
".",
"has_key",
"(",
"'working_directory'",
")",
":",
"wd",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"new_config",
"[",
"'working_directory'",
"]",
")",
"if",
"nbmanager",
".",
"notebook_dir",
"!=",
"wd",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"wd",
")",
":",
"raise",
"IOError",
"(",
"'Path not found: %s'",
"%",
"wd",
")",
"nbmanager",
".",
"notebook_dir",
"=",
"wd"
] | Update config options with the provided dictionary of options. | [
"Update",
"config",
"options",
"with",
"the",
"provided",
"dictionary",
"of",
"options",
"."
] | 74d820fdd5dd7cdaafae22698dcba9487974bcc5 | https://github.com/robchambers/nbserve/blob/74d820fdd5dd7cdaafae22698dcba9487974bcc5/nbserve/app.py#L62-L73 | valid |
robchambers/nbserve | nbserve/app.py | set_config | def set_config(new_config={}):
""" Reset config options to defaults, and then update (optionally)
with the provided dictionary of options. """
# The default base configuration.
flask_app.base_config = dict(working_directory='.',
template='collapse-input',
debug=False,
port=None)
update_config(new_config) | python | def set_config(new_config={}):
""" Reset config options to defaults, and then update (optionally)
with the provided dictionary of options. """
# The default base configuration.
flask_app.base_config = dict(working_directory='.',
template='collapse-input',
debug=False,
port=None)
update_config(new_config) | [
"def",
"set_config",
"(",
"new_config",
"=",
"{",
"}",
")",
":",
"# The default base configuration.",
"flask_app",
".",
"base_config",
"=",
"dict",
"(",
"working_directory",
"=",
"'.'",
",",
"template",
"=",
"'collapse-input'",
",",
"debug",
"=",
"False",
",",
"port",
"=",
"None",
")",
"update_config",
"(",
"new_config",
")"
] | Reset config options to defaults, and then update (optionally)
with the provided dictionary of options. | [
"Reset",
"config",
"options",
"to",
"defaults",
"and",
"then",
"update",
"(",
"optionally",
")",
"with",
"the",
"provided",
"dictionary",
"of",
"options",
"."
] | 74d820fdd5dd7cdaafae22698dcba9487974bcc5 | https://github.com/robchambers/nbserve/blob/74d820fdd5dd7cdaafae22698dcba9487974bcc5/nbserve/app.py#L76-L84 | valid |
pip-services/pip-services-commons-python | pip_services_commons/commands/Command.py | Command.execute | def execute(self, correlation_id, args):
"""
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason.
"""
# Validate arguments
if self._schema != None:
self.validate_and_throw_exception(correlation_id, args)
# Call the function
try:
return self._function(correlation_id, args)
# Intercept unhandled errors
except Exception as ex:
raise InvocationException(
correlation_id,
"EXEC_FAILED",
"Execution " + self._name + " failed: " + str(ex)
).with_details("command", self._name).wrap(ex) | python | def execute(self, correlation_id, args):
"""
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason.
"""
# Validate arguments
if self._schema != None:
self.validate_and_throw_exception(correlation_id, args)
# Call the function
try:
return self._function(correlation_id, args)
# Intercept unhandled errors
except Exception as ex:
raise InvocationException(
correlation_id,
"EXEC_FAILED",
"Execution " + self._name + " failed: " + str(ex)
).with_details("command", self._name).wrap(ex) | [
"def",
"execute",
"(",
"self",
",",
"correlation_id",
",",
"args",
")",
":",
"# Validate arguments\r",
"if",
"self",
".",
"_schema",
"!=",
"None",
":",
"self",
".",
"validate_and_throw_exception",
"(",
"correlation_id",
",",
"args",
")",
"# Call the function\r",
"try",
":",
"return",
"self",
".",
"_function",
"(",
"correlation_id",
",",
"args",
")",
"# Intercept unhandled errors\r",
"except",
"Exception",
"as",
"ex",
":",
"raise",
"InvocationException",
"(",
"correlation_id",
",",
"\"EXEC_FAILED\"",
",",
"\"Execution \"",
"+",
"self",
".",
"_name",
"+",
"\" failed: \"",
"+",
"str",
"(",
"ex",
")",
")",
".",
"with_details",
"(",
"\"command\"",
",",
"self",
".",
"_name",
")",
".",
"wrap",
"(",
"ex",
")"
] | Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
ApplicationException: when execution fails for whatever reason. | [
"Executes",
"the",
"command",
"given",
"specific",
"arguments",
"as",
"an",
"input",
".",
"Args",
":",
"correlation_id",
":",
"a",
"unique",
"correlation",
"/",
"transaction",
"id",
"args",
":",
"command",
"arguments",
"Returns",
":",
"an",
"execution",
"result",
".",
"Raises",
":",
"ApplicationException",
":",
"when",
"execution",
"fails",
"for",
"whatever",
"reason",
"."
] | 2205b18c45c60372966c62c1f23ac4fbc31e11b3 | https://github.com/pip-services/pip-services-commons-python/blob/2205b18c45c60372966c62c1f23ac4fbc31e11b3/pip_services_commons/commands/Command.py#L50-L76 | valid |
JohannesBuchner/jbopt | jbopt/optimize1d.py | optimize | def optimize(function, x0, cons=[], ftol=0.2, disp=0, plot=False):
"""
**Optimization method based on Brent's method**
First, a bracket (a b c) is sought that contains the minimum (b value is
smaller than both a or c).
The bracket is then recursively halfed. Here we apply some modifications
to ensure our suggested point is not too close to either a or c,
because that could be problematic with the local approximation.
Also, if the bracket does not seem to include the minimum,
it is expanded generously in the right direction until it covers it.
Thus, this function is fail safe, and will always find a local minimum.
"""
if disp > 0:
print
print ' ===== custom 1d optimization routine ==== '
print
print 'initial suggestion on', function, ':', x0
points = []
values = []
def recordfunction(x):
v = function(x)
points.append(x)
values.append(v)
return v
(a, b, c), (va, vb, vc) = seek_minimum_bracket(recordfunction, x0, cons=cons, ftol=ftol, disp=disp, plot=plot)
if disp > 0:
print '---------------------------------------------------'
print 'found useable minimum bracker after %d evaluations:' % len(points), (a, b, c), (va, vb, vc)
if disp > 2:
if plot:
plot_values(values, points, lastpoint=-1, ftol=ftol)
pause()
result = brent(recordfunction, a, b, c, va, vb, vc, cons=cons, ftol=ftol, disp=disp, plot=plot)
if disp > 0:
print '---------------------------------------------------'
print 'found minimum after %d evaluations:' % len(points), result
if disp > 1 or len(points) > 20:
if plot:
plot_values(values, points, lastpoint=-1, ftol=ftol)
if disp > 2:
pause()
if disp > 0:
print '---------------------------------------------------'
print
print ' ===== end of custom 1d optimization routine ==== '
print
global neval
neval += len(points)
return result | python | def optimize(function, x0, cons=[], ftol=0.2, disp=0, plot=False):
"""
**Optimization method based on Brent's method**
First, a bracket (a b c) is sought that contains the minimum (b value is
smaller than both a or c).
The bracket is then recursively halfed. Here we apply some modifications
to ensure our suggested point is not too close to either a or c,
because that could be problematic with the local approximation.
Also, if the bracket does not seem to include the minimum,
it is expanded generously in the right direction until it covers it.
Thus, this function is fail safe, and will always find a local minimum.
"""
if disp > 0:
print
print ' ===== custom 1d optimization routine ==== '
print
print 'initial suggestion on', function, ':', x0
points = []
values = []
def recordfunction(x):
v = function(x)
points.append(x)
values.append(v)
return v
(a, b, c), (va, vb, vc) = seek_minimum_bracket(recordfunction, x0, cons=cons, ftol=ftol, disp=disp, plot=plot)
if disp > 0:
print '---------------------------------------------------'
print 'found useable minimum bracker after %d evaluations:' % len(points), (a, b, c), (va, vb, vc)
if disp > 2:
if plot:
plot_values(values, points, lastpoint=-1, ftol=ftol)
pause()
result = brent(recordfunction, a, b, c, va, vb, vc, cons=cons, ftol=ftol, disp=disp, plot=plot)
if disp > 0:
print '---------------------------------------------------'
print 'found minimum after %d evaluations:' % len(points), result
if disp > 1 or len(points) > 20:
if plot:
plot_values(values, points, lastpoint=-1, ftol=ftol)
if disp > 2:
pause()
if disp > 0:
print '---------------------------------------------------'
print
print ' ===== end of custom 1d optimization routine ==== '
print
global neval
neval += len(points)
return result | [
"def",
"optimize",
"(",
"function",
",",
"x0",
",",
"cons",
"=",
"[",
"]",
",",
"ftol",
"=",
"0.2",
",",
"disp",
"=",
"0",
",",
"plot",
"=",
"False",
")",
":",
"if",
"disp",
">",
"0",
":",
"print",
"print",
"' ===== custom 1d optimization routine ==== '",
"print",
"print",
"'initial suggestion on'",
",",
"function",
",",
"':'",
",",
"x0",
"points",
"=",
"[",
"]",
"values",
"=",
"[",
"]",
"def",
"recordfunction",
"(",
"x",
")",
":",
"v",
"=",
"function",
"(",
"x",
")",
"points",
".",
"append",
"(",
"x",
")",
"values",
".",
"append",
"(",
"v",
")",
"return",
"v",
"(",
"a",
",",
"b",
",",
"c",
")",
",",
"(",
"va",
",",
"vb",
",",
"vc",
")",
"=",
"seek_minimum_bracket",
"(",
"recordfunction",
",",
"x0",
",",
"cons",
"=",
"cons",
",",
"ftol",
"=",
"ftol",
",",
"disp",
"=",
"disp",
",",
"plot",
"=",
"plot",
")",
"if",
"disp",
">",
"0",
":",
"print",
"'---------------------------------------------------'",
"print",
"'found useable minimum bracker after %d evaluations:'",
"%",
"len",
"(",
"points",
")",
",",
"(",
"a",
",",
"b",
",",
"c",
")",
",",
"(",
"va",
",",
"vb",
",",
"vc",
")",
"if",
"disp",
">",
"2",
":",
"if",
"plot",
":",
"plot_values",
"(",
"values",
",",
"points",
",",
"lastpoint",
"=",
"-",
"1",
",",
"ftol",
"=",
"ftol",
")",
"pause",
"(",
")",
"result",
"=",
"brent",
"(",
"recordfunction",
",",
"a",
",",
"b",
",",
"c",
",",
"va",
",",
"vb",
",",
"vc",
",",
"cons",
"=",
"cons",
",",
"ftol",
"=",
"ftol",
",",
"disp",
"=",
"disp",
",",
"plot",
"=",
"plot",
")",
"if",
"disp",
">",
"0",
":",
"print",
"'---------------------------------------------------'",
"print",
"'found minimum after %d evaluations:'",
"%",
"len",
"(",
"points",
")",
",",
"result",
"if",
"disp",
">",
"1",
"or",
"len",
"(",
"points",
")",
">",
"20",
":",
"if",
"plot",
":",
"plot_values",
"(",
"values",
",",
"points",
",",
"lastpoint",
"=",
"-",
"1",
",",
"ftol",
"=",
"ftol",
")",
"if",
"disp",
">",
"2",
":",
"pause",
"(",
")",
"if",
"disp",
">",
"0",
":",
"print",
"'---------------------------------------------------'",
"print",
"print",
"' ===== end of custom 1d optimization routine ==== '",
"print",
"global",
"neval",
"neval",
"+=",
"len",
"(",
"points",
")",
"return",
"result"
] | **Optimization method based on Brent's method**
First, a bracket (a b c) is sought that contains the minimum (b value is
smaller than both a or c).
The bracket is then recursively halfed. Here we apply some modifications
to ensure our suggested point is not too close to either a or c,
because that could be problematic with the local approximation.
Also, if the bracket does not seem to include the minimum,
it is expanded generously in the right direction until it covers it.
Thus, this function is fail safe, and will always find a local minimum. | [
"**",
"Optimization",
"method",
"based",
"on",
"Brent",
"s",
"method",
"**",
"First",
"a",
"bracket",
"(",
"a",
"b",
"c",
")",
"is",
"sought",
"that",
"contains",
"the",
"minimum",
"(",
"b",
"value",
"is",
"smaller",
"than",
"both",
"a",
"or",
"c",
")",
".",
"The",
"bracket",
"is",
"then",
"recursively",
"halfed",
".",
"Here",
"we",
"apply",
"some",
"modifications",
"to",
"ensure",
"our",
"suggested",
"point",
"is",
"not",
"too",
"close",
"to",
"either",
"a",
"or",
"c",
"because",
"that",
"could",
"be",
"problematic",
"with",
"the",
"local",
"approximation",
".",
"Also",
"if",
"the",
"bracket",
"does",
"not",
"seem",
"to",
"include",
"the",
"minimum",
"it",
"is",
"expanded",
"generously",
"in",
"the",
"right",
"direction",
"until",
"it",
"covers",
"it",
".",
"Thus",
"this",
"function",
"is",
"fail",
"safe",
"and",
"will",
"always",
"find",
"a",
"local",
"minimum",
"."
] | 11b721ea001625ad7820f71ff684723c71216646 | https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/optimize1d.py#L270-L322 | valid |
JohannesBuchner/jbopt | jbopt/optimize1d.py | cache2errors | def cache2errors(function, cache, disp=0, ftol=0.05):
"""
This function will attempt to identify 1 sigma errors, assuming your
function is a chi^2. For this, the 1-sigma is bracketed.
If you were smart enough to build a cache list of [x,y] into your function,
you can pass it here. The values bracketing 1 sigma will be used as
starting values.
If no such values exist, e.g. because all values were very close to the
optimum (good starting values), the bracket is expanded.
"""
vals = numpy.array(sorted(cache, key=lambda x: x[0]))
if disp > 0: print ' --- cache2errors --- ', vals
vi = vals[:,1].min()
def renormedfunc(x):
y = function(x)
cache.append([x, y])
if disp > 1: print ' renormed:', x, y, y - (vi + 1)
return y - (vi + 1)
vals[:,1] -= vi + 1
lowmask = vals[:,1] < 0
highmask = vals[:,1] > 0
indices = numpy.arange(len(vals))
b, vb = vals[indices[lowmask][ 0],:]
c, vc = vals[indices[lowmask][-1],:]
if any(vals[:,0][highmask] < b):
if disp > 0: print 'already have bracket'
a, va = vals[indices[highmask][vals[:,0][highmask] < b][-1],:]
else:
a = b
va = vb
while b > -50:
a = b - max(vals[-1,0] - vals[0,0], 1)
va = renormedfunc(a)
if disp > 0: print 'going further left: %.1f [%.1f] --> %.1f [%.1f]' % (b, vb, a, va)
if va > 0:
if disp > 0: print 'found outer part'
break
else:
# need to go further
b = a
vb = va
if disp > 0: print 'left bracket', a, b, va, vb
if va > 0 and vb < 0:
leftroot = scipy.optimize.brentq(renormedfunc, a, b, rtol=ftol)
else:
if disp > 0: print 'WARNING: border problem found.'
leftroot = a
if disp > 0: print 'left root', leftroot
if any(vals[:,0][highmask] > c):
if disp > 0: print 'already have bracket'
d, vd = vals[indices[highmask][vals[:,0][highmask] > c][ 0],:]
else:
d = c
vd = vc
while c < 50:
d = c + max(vals[-1,0] - vals[0,0], 1)
vd = renormedfunc(d)
if disp > 0: print 'going further right: %.1f [%.1f] --> %.1f [%.1f]' % (c, vc, d, vd)
if vd > 0:
if disp > 0: print 'found outer part'
break
else:
# need to go further
c = d
vc = vd
if disp > 0: print 'right bracket', c, d, vc, vd
if vd > 0 and vc < 0:
rightroot = scipy.optimize.brentq(renormedfunc, c, d, rtol=ftol)
else:
if disp > 0: print 'WARNING: border problem found.'
rightroot = d
if disp > 0: print 'right root', rightroot
assert leftroot < rightroot
if disp > 2:
fullvals = numpy.array(sorted(cache, key=lambda x: x[0]))
fullvals[:,1] -= vi + 1
plt.figure()
plt.plot(fullvals[:,0], fullvals[:,1], 's')
plt.plot(vals[:,0], vals[:,1], 'o')
plt.xlim(a, d)
plt.ylim(min(va, vb, vc, vd), max(va, vb, vc, vd))
ymin, ymax = plt.ylim()
plt.vlines([leftroot, rightroot], ymin, ymax, linestyles='dotted')
plt.savefig('cache_brent.pdf')
return leftroot, rightroot | python | def cache2errors(function, cache, disp=0, ftol=0.05):
"""
This function will attempt to identify 1 sigma errors, assuming your
function is a chi^2. For this, the 1-sigma is bracketed.
If you were smart enough to build a cache list of [x,y] into your function,
you can pass it here. The values bracketing 1 sigma will be used as
starting values.
If no such values exist, e.g. because all values were very close to the
optimum (good starting values), the bracket is expanded.
"""
vals = numpy.array(sorted(cache, key=lambda x: x[0]))
if disp > 0: print ' --- cache2errors --- ', vals
vi = vals[:,1].min()
def renormedfunc(x):
y = function(x)
cache.append([x, y])
if disp > 1: print ' renormed:', x, y, y - (vi + 1)
return y - (vi + 1)
vals[:,1] -= vi + 1
lowmask = vals[:,1] < 0
highmask = vals[:,1] > 0
indices = numpy.arange(len(vals))
b, vb = vals[indices[lowmask][ 0],:]
c, vc = vals[indices[lowmask][-1],:]
if any(vals[:,0][highmask] < b):
if disp > 0: print 'already have bracket'
a, va = vals[indices[highmask][vals[:,0][highmask] < b][-1],:]
else:
a = b
va = vb
while b > -50:
a = b - max(vals[-1,0] - vals[0,0], 1)
va = renormedfunc(a)
if disp > 0: print 'going further left: %.1f [%.1f] --> %.1f [%.1f]' % (b, vb, a, va)
if va > 0:
if disp > 0: print 'found outer part'
break
else:
# need to go further
b = a
vb = va
if disp > 0: print 'left bracket', a, b, va, vb
if va > 0 and vb < 0:
leftroot = scipy.optimize.brentq(renormedfunc, a, b, rtol=ftol)
else:
if disp > 0: print 'WARNING: border problem found.'
leftroot = a
if disp > 0: print 'left root', leftroot
if any(vals[:,0][highmask] > c):
if disp > 0: print 'already have bracket'
d, vd = vals[indices[highmask][vals[:,0][highmask] > c][ 0],:]
else:
d = c
vd = vc
while c < 50:
d = c + max(vals[-1,0] - vals[0,0], 1)
vd = renormedfunc(d)
if disp > 0: print 'going further right: %.1f [%.1f] --> %.1f [%.1f]' % (c, vc, d, vd)
if vd > 0:
if disp > 0: print 'found outer part'
break
else:
# need to go further
c = d
vc = vd
if disp > 0: print 'right bracket', c, d, vc, vd
if vd > 0 and vc < 0:
rightroot = scipy.optimize.brentq(renormedfunc, c, d, rtol=ftol)
else:
if disp > 0: print 'WARNING: border problem found.'
rightroot = d
if disp > 0: print 'right root', rightroot
assert leftroot < rightroot
if disp > 2:
fullvals = numpy.array(sorted(cache, key=lambda x: x[0]))
fullvals[:,1] -= vi + 1
plt.figure()
plt.plot(fullvals[:,0], fullvals[:,1], 's')
plt.plot(vals[:,0], vals[:,1], 'o')
plt.xlim(a, d)
plt.ylim(min(va, vb, vc, vd), max(va, vb, vc, vd))
ymin, ymax = plt.ylim()
plt.vlines([leftroot, rightroot], ymin, ymax, linestyles='dotted')
plt.savefig('cache_brent.pdf')
return leftroot, rightroot | [
"def",
"cache2errors",
"(",
"function",
",",
"cache",
",",
"disp",
"=",
"0",
",",
"ftol",
"=",
"0.05",
")",
":",
"vals",
"=",
"numpy",
".",
"array",
"(",
"sorted",
"(",
"cache",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")",
")",
"if",
"disp",
">",
"0",
":",
"print",
"' --- cache2errors --- '",
",",
"vals",
"vi",
"=",
"vals",
"[",
":",
",",
"1",
"]",
".",
"min",
"(",
")",
"def",
"renormedfunc",
"(",
"x",
")",
":",
"y",
"=",
"function",
"(",
"x",
")",
"cache",
".",
"append",
"(",
"[",
"x",
",",
"y",
"]",
")",
"if",
"disp",
">",
"1",
":",
"print",
"' renormed:'",
",",
"x",
",",
"y",
",",
"y",
"-",
"(",
"vi",
"+",
"1",
")",
"return",
"y",
"-",
"(",
"vi",
"+",
"1",
")",
"vals",
"[",
":",
",",
"1",
"]",
"-=",
"vi",
"+",
"1",
"lowmask",
"=",
"vals",
"[",
":",
",",
"1",
"]",
"<",
"0",
"highmask",
"=",
"vals",
"[",
":",
",",
"1",
"]",
">",
"0",
"indices",
"=",
"numpy",
".",
"arange",
"(",
"len",
"(",
"vals",
")",
")",
"b",
",",
"vb",
"=",
"vals",
"[",
"indices",
"[",
"lowmask",
"]",
"[",
"0",
"]",
",",
":",
"]",
"c",
",",
"vc",
"=",
"vals",
"[",
"indices",
"[",
"lowmask",
"]",
"[",
"-",
"1",
"]",
",",
":",
"]",
"if",
"any",
"(",
"vals",
"[",
":",
",",
"0",
"]",
"[",
"highmask",
"]",
"<",
"b",
")",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'already have bracket'",
"a",
",",
"va",
"=",
"vals",
"[",
"indices",
"[",
"highmask",
"]",
"[",
"vals",
"[",
":",
",",
"0",
"]",
"[",
"highmask",
"]",
"<",
"b",
"]",
"[",
"-",
"1",
"]",
",",
":",
"]",
"else",
":",
"a",
"=",
"b",
"va",
"=",
"vb",
"while",
"b",
">",
"-",
"50",
":",
"a",
"=",
"b",
"-",
"max",
"(",
"vals",
"[",
"-",
"1",
",",
"0",
"]",
"-",
"vals",
"[",
"0",
",",
"0",
"]",
",",
"1",
")",
"va",
"=",
"renormedfunc",
"(",
"a",
")",
"if",
"disp",
">",
"0",
":",
"print",
"'going further left: %.1f [%.1f] --> %.1f [%.1f]'",
"%",
"(",
"b",
",",
"vb",
",",
"a",
",",
"va",
")",
"if",
"va",
">",
"0",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'found outer part'",
"break",
"else",
":",
"# need to go further",
"b",
"=",
"a",
"vb",
"=",
"va",
"if",
"disp",
">",
"0",
":",
"print",
"'left bracket'",
",",
"a",
",",
"b",
",",
"va",
",",
"vb",
"if",
"va",
">",
"0",
"and",
"vb",
"<",
"0",
":",
"leftroot",
"=",
"scipy",
".",
"optimize",
".",
"brentq",
"(",
"renormedfunc",
",",
"a",
",",
"b",
",",
"rtol",
"=",
"ftol",
")",
"else",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'WARNING: border problem found.'",
"leftroot",
"=",
"a",
"if",
"disp",
">",
"0",
":",
"print",
"'left root'",
",",
"leftroot",
"if",
"any",
"(",
"vals",
"[",
":",
",",
"0",
"]",
"[",
"highmask",
"]",
">",
"c",
")",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'already have bracket'",
"d",
",",
"vd",
"=",
"vals",
"[",
"indices",
"[",
"highmask",
"]",
"[",
"vals",
"[",
":",
",",
"0",
"]",
"[",
"highmask",
"]",
">",
"c",
"]",
"[",
"0",
"]",
",",
":",
"]",
"else",
":",
"d",
"=",
"c",
"vd",
"=",
"vc",
"while",
"c",
"<",
"50",
":",
"d",
"=",
"c",
"+",
"max",
"(",
"vals",
"[",
"-",
"1",
",",
"0",
"]",
"-",
"vals",
"[",
"0",
",",
"0",
"]",
",",
"1",
")",
"vd",
"=",
"renormedfunc",
"(",
"d",
")",
"if",
"disp",
">",
"0",
":",
"print",
"'going further right: %.1f [%.1f] --> %.1f [%.1f]'",
"%",
"(",
"c",
",",
"vc",
",",
"d",
",",
"vd",
")",
"if",
"vd",
">",
"0",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'found outer part'",
"break",
"else",
":",
"# need to go further",
"c",
"=",
"d",
"vc",
"=",
"vd",
"if",
"disp",
">",
"0",
":",
"print",
"'right bracket'",
",",
"c",
",",
"d",
",",
"vc",
",",
"vd",
"if",
"vd",
">",
"0",
"and",
"vc",
"<",
"0",
":",
"rightroot",
"=",
"scipy",
".",
"optimize",
".",
"brentq",
"(",
"renormedfunc",
",",
"c",
",",
"d",
",",
"rtol",
"=",
"ftol",
")",
"else",
":",
"if",
"disp",
">",
"0",
":",
"print",
"'WARNING: border problem found.'",
"rightroot",
"=",
"d",
"if",
"disp",
">",
"0",
":",
"print",
"'right root'",
",",
"rightroot",
"assert",
"leftroot",
"<",
"rightroot",
"if",
"disp",
">",
"2",
":",
"fullvals",
"=",
"numpy",
".",
"array",
"(",
"sorted",
"(",
"cache",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")",
")",
"fullvals",
"[",
":",
",",
"1",
"]",
"-=",
"vi",
"+",
"1",
"plt",
".",
"figure",
"(",
")",
"plt",
".",
"plot",
"(",
"fullvals",
"[",
":",
",",
"0",
"]",
",",
"fullvals",
"[",
":",
",",
"1",
"]",
",",
"'s'",
")",
"plt",
".",
"plot",
"(",
"vals",
"[",
":",
",",
"0",
"]",
",",
"vals",
"[",
":",
",",
"1",
"]",
",",
"'o'",
")",
"plt",
".",
"xlim",
"(",
"a",
",",
"d",
")",
"plt",
".",
"ylim",
"(",
"min",
"(",
"va",
",",
"vb",
",",
"vc",
",",
"vd",
")",
",",
"max",
"(",
"va",
",",
"vb",
",",
"vc",
",",
"vd",
")",
")",
"ymin",
",",
"ymax",
"=",
"plt",
".",
"ylim",
"(",
")",
"plt",
".",
"vlines",
"(",
"[",
"leftroot",
",",
"rightroot",
"]",
",",
"ymin",
",",
"ymax",
",",
"linestyles",
"=",
"'dotted'",
")",
"plt",
".",
"savefig",
"(",
"'cache_brent.pdf'",
")",
"return",
"leftroot",
",",
"rightroot"
] | This function will attempt to identify 1 sigma errors, assuming your
function is a chi^2. For this, the 1-sigma is bracketed.
If you were smart enough to build a cache list of [x,y] into your function,
you can pass it here. The values bracketing 1 sigma will be used as
starting values.
If no such values exist, e.g. because all values were very close to the
optimum (good starting values), the bracket is expanded. | [
"This",
"function",
"will",
"attempt",
"to",
"identify",
"1",
"sigma",
"errors",
"assuming",
"your",
"function",
"is",
"a",
"chi^2",
".",
"For",
"this",
"the",
"1",
"-",
"sigma",
"is",
"bracketed",
".",
"If",
"you",
"were",
"smart",
"enough",
"to",
"build",
"a",
"cache",
"list",
"of",
"[",
"x",
"y",
"]",
"into",
"your",
"function",
"you",
"can",
"pass",
"it",
"here",
".",
"The",
"values",
"bracketing",
"1",
"sigma",
"will",
"be",
"used",
"as",
"starting",
"values",
".",
"If",
"no",
"such",
"values",
"exist",
"e",
".",
"g",
".",
"because",
"all",
"values",
"were",
"very",
"close",
"to",
"the",
"optimum",
"(",
"good",
"starting",
"values",
")",
"the",
"bracket",
"is",
"expanded",
"."
] | 11b721ea001625ad7820f71ff684723c71216646 | https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/optimize1d.py#L324-L415 | valid |
pip-services/pip-services-commons-python | pip_services_commons/count/Timing.py | Timing.end_timing | def end_timing(self):
"""
Completes measuring time interval and updates counter.
"""
if self._callback != None:
elapsed = time.clock() * 1000 - self._start
self._callback.end_timing(self._counter, elapsed) | python | def end_timing(self):
"""
Completes measuring time interval and updates counter.
"""
if self._callback != None:
elapsed = time.clock() * 1000 - self._start
self._callback.end_timing(self._counter, elapsed) | [
"def",
"end_timing",
"(",
"self",
")",
":",
"if",
"self",
".",
"_callback",
"!=",
"None",
":",
"elapsed",
"=",
"time",
".",
"clock",
"(",
")",
"*",
"1000",
"-",
"self",
".",
"_start",
"self",
".",
"_callback",
".",
"end_timing",
"(",
"self",
".",
"_counter",
",",
"elapsed",
")"
] | Completes measuring time interval and updates counter. | [
"Completes",
"measuring",
"time",
"interval",
"and",
"updates",
"counter",
"."
] | 2205b18c45c60372966c62c1f23ac4fbc31e11b3 | https://github.com/pip-services/pip-services-commons-python/blob/2205b18c45c60372966c62c1f23ac4fbc31e11b3/pip_services_commons/count/Timing.py#L37-L44 | valid |
ibelie/typy | typy/google/protobuf/internal/well_known_types.py | Duration.ToJsonString | def ToJsonString(self):
"""Converts Duration to string format.
Returns:
A string converted from self. The string format will contains
3, 6, or 9 fractional digits depending on the precision required to
represent the exact Duration value. For example: "1s", "1.010s",
"1.000000100s", "-3.100s"
"""
if self.seconds < 0 or self.nanos < 0:
result = '-'
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
nanos = (0 - self.nanos) % 1e9
else:
result = ''
seconds = self.seconds + int(self.nanos // 1e9)
nanos = self.nanos % 1e9
result += '%d' % seconds
if (nanos % 1e9) == 0:
# If there are 0 fractional digits, the fractional
# point '.' should be omitted when serializing.
return result + 's'
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return result + '.%03ds' % (nanos / 1e6)
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return result + '.%06ds' % (nanos / 1e3)
# Serialize 9 fractional digits.
return result + '.%09ds' % nanos | python | def ToJsonString(self):
"""Converts Duration to string format.
Returns:
A string converted from self. The string format will contains
3, 6, or 9 fractional digits depending on the precision required to
represent the exact Duration value. For example: "1s", "1.010s",
"1.000000100s", "-3.100s"
"""
if self.seconds < 0 or self.nanos < 0:
result = '-'
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
nanos = (0 - self.nanos) % 1e9
else:
result = ''
seconds = self.seconds + int(self.nanos // 1e9)
nanos = self.nanos % 1e9
result += '%d' % seconds
if (nanos % 1e9) == 0:
# If there are 0 fractional digits, the fractional
# point '.' should be omitted when serializing.
return result + 's'
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return result + '.%03ds' % (nanos / 1e6)
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return result + '.%06ds' % (nanos / 1e3)
# Serialize 9 fractional digits.
return result + '.%09ds' % nanos | [
"def",
"ToJsonString",
"(",
"self",
")",
":",
"if",
"self",
".",
"seconds",
"<",
"0",
"or",
"self",
".",
"nanos",
"<",
"0",
":",
"result",
"=",
"'-'",
"seconds",
"=",
"-",
"self",
".",
"seconds",
"+",
"int",
"(",
"(",
"0",
"-",
"self",
".",
"nanos",
")",
"//",
"1e9",
")",
"nanos",
"=",
"(",
"0",
"-",
"self",
".",
"nanos",
")",
"%",
"1e9",
"else",
":",
"result",
"=",
"''",
"seconds",
"=",
"self",
".",
"seconds",
"+",
"int",
"(",
"self",
".",
"nanos",
"//",
"1e9",
")",
"nanos",
"=",
"self",
".",
"nanos",
"%",
"1e9",
"result",
"+=",
"'%d'",
"%",
"seconds",
"if",
"(",
"nanos",
"%",
"1e9",
")",
"==",
"0",
":",
"# If there are 0 fractional digits, the fractional",
"# point '.' should be omitted when serializing.",
"return",
"result",
"+",
"'s'",
"if",
"(",
"nanos",
"%",
"1e6",
")",
"==",
"0",
":",
"# Serialize 3 fractional digits.",
"return",
"result",
"+",
"'.%03ds'",
"%",
"(",
"nanos",
"/",
"1e6",
")",
"if",
"(",
"nanos",
"%",
"1e3",
")",
"==",
"0",
":",
"# Serialize 6 fractional digits.",
"return",
"result",
"+",
"'.%06ds'",
"%",
"(",
"nanos",
"/",
"1e3",
")",
"# Serialize 9 fractional digits.",
"return",
"result",
"+",
"'.%09ds'",
"%",
"nanos"
] | Converts Duration to string format.
Returns:
A string converted from self. The string format will contains
3, 6, or 9 fractional digits depending on the precision required to
represent the exact Duration value. For example: "1s", "1.010s",
"1.000000100s", "-3.100s" | [
"Converts",
"Duration",
"to",
"string",
"format",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/well_known_types.py#L241-L270 | valid |
ibelie/typy | typy/google/protobuf/internal/well_known_types.py | Duration.FromJsonString | def FromJsonString(self, value):
"""Converts a string to Duration.
Args:
value: A string to be converted. The string must end with 's'. Any
fractional digits (or none) are accepted as long as they fit into
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ParseError: On parsing problems.
"""
if len(value) < 1 or value[-1] != 's':
raise ParseError(
'Duration must end with letter "s": {0}.'.format(value))
try:
pos = value.find('.')
if pos == -1:
self.seconds = int(value[:-1])
self.nanos = 0
else:
self.seconds = int(value[:pos])
if value[0] == '-':
self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
else:
self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
except ValueError:
raise ParseError(
'Couldn\'t parse duration: {0}.'.format(value)) | python | def FromJsonString(self, value):
"""Converts a string to Duration.
Args:
value: A string to be converted. The string must end with 's'. Any
fractional digits (or none) are accepted as long as they fit into
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ParseError: On parsing problems.
"""
if len(value) < 1 or value[-1] != 's':
raise ParseError(
'Duration must end with letter "s": {0}.'.format(value))
try:
pos = value.find('.')
if pos == -1:
self.seconds = int(value[:-1])
self.nanos = 0
else:
self.seconds = int(value[:pos])
if value[0] == '-':
self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
else:
self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
except ValueError:
raise ParseError(
'Couldn\'t parse duration: {0}.'.format(value)) | [
"def",
"FromJsonString",
"(",
"self",
",",
"value",
")",
":",
"if",
"len",
"(",
"value",
")",
"<",
"1",
"or",
"value",
"[",
"-",
"1",
"]",
"!=",
"'s'",
":",
"raise",
"ParseError",
"(",
"'Duration must end with letter \"s\": {0}.'",
".",
"format",
"(",
"value",
")",
")",
"try",
":",
"pos",
"=",
"value",
".",
"find",
"(",
"'.'",
")",
"if",
"pos",
"==",
"-",
"1",
":",
"self",
".",
"seconds",
"=",
"int",
"(",
"value",
"[",
":",
"-",
"1",
"]",
")",
"self",
".",
"nanos",
"=",
"0",
"else",
":",
"self",
".",
"seconds",
"=",
"int",
"(",
"value",
"[",
":",
"pos",
"]",
")",
"if",
"value",
"[",
"0",
"]",
"==",
"'-'",
":",
"self",
".",
"nanos",
"=",
"int",
"(",
"round",
"(",
"float",
"(",
"'-0{0}'",
".",
"format",
"(",
"value",
"[",
"pos",
":",
"-",
"1",
"]",
")",
")",
"*",
"1e9",
")",
")",
"else",
":",
"self",
".",
"nanos",
"=",
"int",
"(",
"round",
"(",
"float",
"(",
"'0{0}'",
".",
"format",
"(",
"value",
"[",
"pos",
":",
"-",
"1",
"]",
")",
")",
"*",
"1e9",
")",
")",
"except",
"ValueError",
":",
"raise",
"ParseError",
"(",
"'Couldn\\'t parse duration: {0}.'",
".",
"format",
"(",
"value",
")",
")"
] | Converts a string to Duration.
Args:
value: A string to be converted. The string must end with 's'. Any
fractional digits (or none) are accepted as long as they fit into
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ParseError: On parsing problems. | [
"Converts",
"a",
"string",
"to",
"Duration",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/well_known_types.py#L272-L299 | valid |
ibelie/typy | typy/google/protobuf/internal/well_known_types.py | FieldMask.FromJsonString | def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
self.Clear()
for path in value.split(','):
self.paths.append(path) | python | def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
self.Clear()
for path in value.split(','):
self.paths.append(path) | [
"def",
"FromJsonString",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"Clear",
"(",
")",
"for",
"path",
"in",
"value",
".",
"split",
"(",
"','",
")",
":",
"self",
".",
"paths",
".",
"append",
"(",
"path",
")"
] | Converts string to FieldMask according to proto3 JSON spec. | [
"Converts",
"string",
"to",
"FieldMask",
"according",
"to",
"proto3",
"JSON",
"spec",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/well_known_types.py#L384-L388 | valid |
zvoase/django-relax | relax/couchdb/shortcuts.py | get_doc | def get_doc(doc_id, db_name, server_url='http://127.0.0.1:5984/', rev=None):
"""Return a CouchDB document, given its ID, revision and database name."""
db = get_server(server_url)[db_name]
if rev:
headers, response = db.resource.get(doc_id, rev=rev)
return couchdb.client.Document(response)
return db[doc_id] | python | def get_doc(doc_id, db_name, server_url='http://127.0.0.1:5984/', rev=None):
"""Return a CouchDB document, given its ID, revision and database name."""
db = get_server(server_url)[db_name]
if rev:
headers, response = db.resource.get(doc_id, rev=rev)
return couchdb.client.Document(response)
return db[doc_id] | [
"def",
"get_doc",
"(",
"doc_id",
",",
"db_name",
",",
"server_url",
"=",
"'http://127.0.0.1:5984/'",
",",
"rev",
"=",
"None",
")",
":",
"db",
"=",
"get_server",
"(",
"server_url",
")",
"[",
"db_name",
"]",
"if",
"rev",
":",
"headers",
",",
"response",
"=",
"db",
".",
"resource",
".",
"get",
"(",
"doc_id",
",",
"rev",
"=",
"rev",
")",
"return",
"couchdb",
".",
"client",
".",
"Document",
"(",
"response",
")",
"return",
"db",
"[",
"doc_id",
"]"
] | Return a CouchDB document, given its ID, revision and database name. | [
"Return",
"a",
"CouchDB",
"document",
"given",
"its",
"ID",
"revision",
"and",
"database",
"name",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/shortcuts.py#L20-L26 | valid |
zvoase/django-relax | relax/couchdb/shortcuts.py | get_or_create_db | def get_or_create_db(db_name, server_url='http://127.0.0.1:5984/'):
"""Return an (optionally existing) CouchDB database instance."""
server = get_server(server_url)
if db_name in server:
return server[db_name]
return server.create(db_name) | python | def get_or_create_db(db_name, server_url='http://127.0.0.1:5984/'):
"""Return an (optionally existing) CouchDB database instance."""
server = get_server(server_url)
if db_name in server:
return server[db_name]
return server.create(db_name) | [
"def",
"get_or_create_db",
"(",
"db_name",
",",
"server_url",
"=",
"'http://127.0.0.1:5984/'",
")",
":",
"server",
"=",
"get_server",
"(",
"server_url",
")",
"if",
"db_name",
"in",
"server",
":",
"return",
"server",
"[",
"db_name",
"]",
"return",
"server",
".",
"create",
"(",
"db_name",
")"
] | Return an (optionally existing) CouchDB database instance. | [
"Return",
"an",
"(",
"optionally",
"existing",
")",
"CouchDB",
"database",
"instance",
"."
] | 10bb37bf3a512b290816856a6877c17fa37e930f | https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/shortcuts.py#L28-L33 | valid |
crazy-canux/arguspy | setup.py | read | def read(readme):
"""Give reST format README for pypi."""
extend = os.path.splitext(readme)[1]
if (extend == '.rst'):
import codecs
return codecs.open(readme, 'r', 'utf-8').read()
elif (extend == '.md'):
import pypandoc
return pypandoc.convert(readme, 'rst') | python | def read(readme):
"""Give reST format README for pypi."""
extend = os.path.splitext(readme)[1]
if (extend == '.rst'):
import codecs
return codecs.open(readme, 'r', 'utf-8').read()
elif (extend == '.md'):
import pypandoc
return pypandoc.convert(readme, 'rst') | [
"def",
"read",
"(",
"readme",
")",
":",
"extend",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"readme",
")",
"[",
"1",
"]",
"if",
"(",
"extend",
"==",
"'.rst'",
")",
":",
"import",
"codecs",
"return",
"codecs",
".",
"open",
"(",
"readme",
",",
"'r'",
",",
"'utf-8'",
")",
".",
"read",
"(",
")",
"elif",
"(",
"extend",
"==",
"'.md'",
")",
":",
"import",
"pypandoc",
"return",
"pypandoc",
".",
"convert",
"(",
"readme",
",",
"'rst'",
")"
] | Give reST format README for pypi. | [
"Give",
"reST",
"format",
"README",
"for",
"pypi",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/setup.py#L29-L37 | valid |
crazy-canux/arguspy | scripts/check_mssql.py | main | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'sql':
plugin.sql_handle()
elif plugin.args.option == 'database-used':
plugin.database_used_handle()
elif plugin.args.option == 'databaselog-used':
plugin.database_log_used_handle()
else:
plugin.unknown("Unknown actions.") | python | def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'sql':
plugin.sql_handle()
elif plugin.args.option == 'database-used':
plugin.database_used_handle()
elif plugin.args.option == 'databaselog-used':
plugin.database_log_used_handle()
else:
plugin.unknown("Unknown actions.") | [
"def",
"main",
"(",
")",
":",
"plugin",
"=",
"Register",
"(",
")",
"if",
"plugin",
".",
"args",
".",
"option",
"==",
"'sql'",
":",
"plugin",
".",
"sql_handle",
"(",
")",
"elif",
"plugin",
".",
"args",
".",
"option",
"==",
"'database-used'",
":",
"plugin",
".",
"database_used_handle",
"(",
")",
"elif",
"plugin",
".",
"args",
".",
"option",
"==",
"'databaselog-used'",
":",
"plugin",
".",
"database_log_used_handle",
"(",
")",
"else",
":",
"plugin",
".",
"unknown",
"(",
"\"Unknown actions.\"",
")"
] | Register your own mode and handle method here. | [
"Register",
"your",
"own",
"mode",
"and",
"handle",
"method",
"here",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_mssql.py#L475-L485 | valid |
manicmaniac/headlessvim | headlessvim/arguments.py | Parser.parse | def parse(self, args):
"""
:param args: arguments
:type args: None or string or list of string
:return: formatted arguments if specified else ``self.default_args``
:rtype: list of string
"""
if args is None:
args = self._default_args
if isinstance(args, six.string_types):
args = shlex.split(args)
return args | python | def parse(self, args):
"""
:param args: arguments
:type args: None or string or list of string
:return: formatted arguments if specified else ``self.default_args``
:rtype: list of string
"""
if args is None:
args = self._default_args
if isinstance(args, six.string_types):
args = shlex.split(args)
return args | [
"def",
"parse",
"(",
"self",
",",
"args",
")",
":",
"if",
"args",
"is",
"None",
":",
"args",
"=",
"self",
".",
"_default_args",
"if",
"isinstance",
"(",
"args",
",",
"six",
".",
"string_types",
")",
":",
"args",
"=",
"shlex",
".",
"split",
"(",
"args",
")",
"return",
"args"
] | :param args: arguments
:type args: None or string or list of string
:return: formatted arguments if specified else ``self.default_args``
:rtype: list of string | [
":",
"param",
"args",
":",
"arguments",
":",
"type",
"args",
":",
"None",
"or",
"string",
"or",
"list",
"of",
"string",
":",
"return",
":",
"formatted",
"arguments",
"if",
"specified",
"else",
"self",
".",
"default_args",
":",
"rtype",
":",
"list",
"of",
"string"
] | 3e4657f95d981ddf21fd285b7e1b9da2154f9cb9 | https://github.com/manicmaniac/headlessvim/blob/3e4657f95d981ddf21fd285b7e1b9da2154f9cb9/headlessvim/arguments.py#L24-L35 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient._request | def _request(self, method, *relative_path_parts, **kwargs):
"""Sends an HTTP request to the REST API and receives the requested data.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses
"""
uri = self._create_api_uri(*relative_path_parts)
if method == 'get':
response = get(uri, auth=self.auth, params=kwargs.get('params', None))
elif method == 'post':
response = post(uri, auth=self.auth, json=kwargs.get('data', None))
else:
response = delete(uri, auth=self.auth, json=kwargs.get('data', None))
return self._handle_response(response).json() | python | def _request(self, method, *relative_path_parts, **kwargs):
"""Sends an HTTP request to the REST API and receives the requested data.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses
"""
uri = self._create_api_uri(*relative_path_parts)
if method == 'get':
response = get(uri, auth=self.auth, params=kwargs.get('params', None))
elif method == 'post':
response = post(uri, auth=self.auth, json=kwargs.get('data', None))
else:
response = delete(uri, auth=self.auth, json=kwargs.get('data', None))
return self._handle_response(response).json() | [
"def",
"_request",
"(",
"self",
",",
"method",
",",
"*",
"relative_path_parts",
",",
"*",
"*",
"kwargs",
")",
":",
"uri",
"=",
"self",
".",
"_create_api_uri",
"(",
"*",
"relative_path_parts",
")",
"if",
"method",
"==",
"'get'",
":",
"response",
"=",
"get",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"params",
"=",
"kwargs",
".",
"get",
"(",
"'params'",
",",
"None",
")",
")",
"elif",
"method",
"==",
"'post'",
":",
"response",
"=",
"post",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"json",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
")",
"else",
":",
"response",
"=",
"delete",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"json",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
")",
"return",
"self",
".",
"_handle_response",
"(",
"response",
")",
".",
"json",
"(",
")"
] | Sends an HTTP request to the REST API and receives the requested data.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses | [
"Sends",
"an",
"HTTP",
"request",
"to",
"the",
"REST",
"API",
"and",
"receives",
"the",
"requested",
"data",
"."
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L72-L89 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient._place_order | def _place_order(self,
side,
product_id='BTC-USD',
client_oid=None,
type=None,
stp=None,
price=None,
size=None,
funds=None,
time_in_force=None,
cancel_after=None,
post_only=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
data = {
'side':side,
'product_id':product_id,
'client_oid':client_oid,
'type':type,
'stp':stp,
'price':price,
'size':size,
'funds':funds,
'time_in_force':time_in_force,
'cancel_after':cancel_after,
'post_only':post_only
}
return self._post('orders', data=data) | python | def _place_order(self,
side,
product_id='BTC-USD',
client_oid=None,
type=None,
stp=None,
price=None,
size=None,
funds=None,
time_in_force=None,
cancel_after=None,
post_only=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
data = {
'side':side,
'product_id':product_id,
'client_oid':client_oid,
'type':type,
'stp':stp,
'price':price,
'size':size,
'funds':funds,
'time_in_force':time_in_force,
'cancel_after':cancel_after,
'post_only':post_only
}
return self._post('orders', data=data) | [
"def",
"_place_order",
"(",
"self",
",",
"side",
",",
"product_id",
"=",
"'BTC-USD'",
",",
"client_oid",
"=",
"None",
",",
"type",
"=",
"None",
",",
"stp",
"=",
"None",
",",
"price",
"=",
"None",
",",
"size",
"=",
"None",
",",
"funds",
"=",
"None",
",",
"time_in_force",
"=",
"None",
",",
"cancel_after",
"=",
"None",
",",
"post_only",
"=",
"None",
")",
":",
"data",
"=",
"{",
"'side'",
":",
"side",
",",
"'product_id'",
":",
"product_id",
",",
"'client_oid'",
":",
"client_oid",
",",
"'type'",
":",
"type",
",",
"'stp'",
":",
"stp",
",",
"'price'",
":",
"price",
",",
"'size'",
":",
"size",
",",
"'funds'",
":",
"funds",
",",
"'time_in_force'",
":",
"time_in_force",
",",
"'cancel_after'",
":",
"cancel_after",
",",
"'post_only'",
":",
"post_only",
"}",
"return",
"self",
".",
"_post",
"(",
"'orders'",
",",
"data",
"=",
"data",
")"
] | `<https://docs.exchange.coinbase.com/#orders>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#orders",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L107-L133 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient.place_limit_order | def place_limit_order(self,
side,
price,
size,
product_id='BTC-USD',
client_oid=None,
stp=None,
time_in_force=None,
cancel_after=None,
post_only=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
return self._place_order(side,
product_id=product_id,
client_oid=client_oid,
type='limit',
stp=stp,
price=price,
size=size,
time_in_force=time_in_force,
cancel_after=cancel_after,
post_only=post_only) | python | def place_limit_order(self,
side,
price,
size,
product_id='BTC-USD',
client_oid=None,
stp=None,
time_in_force=None,
cancel_after=None,
post_only=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
return self._place_order(side,
product_id=product_id,
client_oid=client_oid,
type='limit',
stp=stp,
price=price,
size=size,
time_in_force=time_in_force,
cancel_after=cancel_after,
post_only=post_only) | [
"def",
"place_limit_order",
"(",
"self",
",",
"side",
",",
"price",
",",
"size",
",",
"product_id",
"=",
"'BTC-USD'",
",",
"client_oid",
"=",
"None",
",",
"stp",
"=",
"None",
",",
"time_in_force",
"=",
"None",
",",
"cancel_after",
"=",
"None",
",",
"post_only",
"=",
"None",
")",
":",
"return",
"self",
".",
"_place_order",
"(",
"side",
",",
"product_id",
"=",
"product_id",
",",
"client_oid",
"=",
"client_oid",
",",
"type",
"=",
"'limit'",
",",
"stp",
"=",
"stp",
",",
"price",
"=",
"price",
",",
"size",
"=",
"size",
",",
"time_in_force",
"=",
"time_in_force",
",",
"cancel_after",
"=",
"cancel_after",
",",
"post_only",
"=",
"post_only",
")"
] | `<https://docs.exchange.coinbase.com/#orders>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#orders",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L135-L155 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient.place_market_order | def place_market_order(self,
side,
product_id='BTC-USD',
size=None,
funds=None,
client_oid=None,
stp=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
return self._place_order(type='market',
side=size,
product_id=product_id,
size=size,
funds=funds,
client_oid=client_oid,
stp=stp) | python | def place_market_order(self,
side,
product_id='BTC-USD',
size=None,
funds=None,
client_oid=None,
stp=None):
"""`<https://docs.exchange.coinbase.com/#orders>`_"""
return self._place_order(type='market',
side=size,
product_id=product_id,
size=size,
funds=funds,
client_oid=client_oid,
stp=stp) | [
"def",
"place_market_order",
"(",
"self",
",",
"side",
",",
"product_id",
"=",
"'BTC-USD'",
",",
"size",
"=",
"None",
",",
"funds",
"=",
"None",
",",
"client_oid",
"=",
"None",
",",
"stp",
"=",
"None",
")",
":",
"return",
"self",
".",
"_place_order",
"(",
"type",
"=",
"'market'",
",",
"side",
"=",
"size",
",",
"product_id",
"=",
"product_id",
",",
"size",
"=",
"size",
",",
"funds",
"=",
"funds",
",",
"client_oid",
"=",
"client_oid",
",",
"stp",
"=",
"stp",
")"
] | `<https://docs.exchange.coinbase.com/#orders>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#orders",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L157-L171 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient._deposit_withdraw | def _deposit_withdraw(self, type, amount, coinbase_account_id):
"""`<https://docs.exchange.coinbase.com/#depositwithdraw>`_"""
data = {
'type':type,
'amount':amount,
'coinbase_account_id':coinbase_account_id
}
return self._post('transfers', data=data) | python | def _deposit_withdraw(self, type, amount, coinbase_account_id):
"""`<https://docs.exchange.coinbase.com/#depositwithdraw>`_"""
data = {
'type':type,
'amount':amount,
'coinbase_account_id':coinbase_account_id
}
return self._post('transfers', data=data) | [
"def",
"_deposit_withdraw",
"(",
"self",
",",
"type",
",",
"amount",
",",
"coinbase_account_id",
")",
":",
"data",
"=",
"{",
"'type'",
":",
"type",
",",
"'amount'",
":",
"amount",
",",
"'coinbase_account_id'",
":",
"coinbase_account_id",
"}",
"return",
"self",
".",
"_post",
"(",
"'transfers'",
",",
"data",
"=",
"data",
")"
] | `<https://docs.exchange.coinbase.com/#depositwithdraw>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#depositwithdraw",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L193-L200 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient._new_report | def _new_report(self,
type,
start_date,
end_date,
product_id='BTC-USD',
account_id=None,
format=None,
email=None):
"""`<https://docs.exchange.coinbase.com/#create-a-new-report>`_"""
data = {
'type':type,
'start_date':self._format_iso_time(start_date),
'end_date':self._format_iso_time(end_date),
'product_id':product_id,
'account_id':account_id,
'format':format,
'email':email
}
return self._post('reports', data=data) | python | def _new_report(self,
type,
start_date,
end_date,
product_id='BTC-USD',
account_id=None,
format=None,
email=None):
"""`<https://docs.exchange.coinbase.com/#create-a-new-report>`_"""
data = {
'type':type,
'start_date':self._format_iso_time(start_date),
'end_date':self._format_iso_time(end_date),
'product_id':product_id,
'account_id':account_id,
'format':format,
'email':email
}
return self._post('reports', data=data) | [
"def",
"_new_report",
"(",
"self",
",",
"type",
",",
"start_date",
",",
"end_date",
",",
"product_id",
"=",
"'BTC-USD'",
",",
"account_id",
"=",
"None",
",",
"format",
"=",
"None",
",",
"email",
"=",
"None",
")",
":",
"data",
"=",
"{",
"'type'",
":",
"type",
",",
"'start_date'",
":",
"self",
".",
"_format_iso_time",
"(",
"start_date",
")",
",",
"'end_date'",
":",
"self",
".",
"_format_iso_time",
"(",
"end_date",
")",
",",
"'product_id'",
":",
"product_id",
",",
"'account_id'",
":",
"account_id",
",",
"'format'",
":",
"format",
",",
"'email'",
":",
"email",
"}",
"return",
"self",
".",
"_post",
"(",
"'reports'",
",",
"data",
"=",
"data",
")"
] | `<https://docs.exchange.coinbase.com/#create-a-new-report>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#create",
"-",
"a",
"-",
"new",
"-",
"report",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L210-L228 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivateClient.new_fills_report | def new_fills_report(self,
start_date,
end_date,
account_id=None,
product_id='BTC-USD',
format=None,
email=None):
"""`<https://docs.exchange.coinbase.com/#create-a-new-report>`_"""
return self._new_report(start_date,
'fills',
end_date,
account_id,
product_id,
format,
email) | python | def new_fills_report(self,
start_date,
end_date,
account_id=None,
product_id='BTC-USD',
format=None,
email=None):
"""`<https://docs.exchange.coinbase.com/#create-a-new-report>`_"""
return self._new_report(start_date,
'fills',
end_date,
account_id,
product_id,
format,
email) | [
"def",
"new_fills_report",
"(",
"self",
",",
"start_date",
",",
"end_date",
",",
"account_id",
"=",
"None",
",",
"product_id",
"=",
"'BTC-USD'",
",",
"format",
"=",
"None",
",",
"email",
"=",
"None",
")",
":",
"return",
"self",
".",
"_new_report",
"(",
"start_date",
",",
"'fills'",
",",
"end_date",
",",
"account_id",
",",
"product_id",
",",
"format",
",",
"email",
")"
] | `<https://docs.exchange.coinbase.com/#create-a-new-report>`_ | [
"<https",
":",
"//",
"docs",
".",
"exchange",
".",
"coinbase",
".",
"com",
"/",
"#create",
"-",
"a",
"-",
"new",
"-",
"report",
">",
"_"
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L230-L244 | valid |
agsimeonov/cbexchange | cbexchange/private.py | PrivatePaginationClient._request | def _request(self, method, *relative_path_parts, **kwargs):
"""Sends an HTTP request to the REST API and receives the requested data.
Additionally sets up pagination cursors.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses
"""
uri = self._create_api_uri(*relative_path_parts)
if method == 'get':
response = get(uri, auth=self.auth, params=kwargs.get('params', None))
elif method == 'post':
response = post(uri, auth=self.auth, json=kwargs.get('data', None))
else:
response = delete(uri, auth=self.auth, json=kwargs.get('data', None))
self.is_initial = False
self.before_cursor = response.headers.get('cb-before', None)
self.after_cursor = response.headers.get('cb-after', None)
return self._handle_response(response).json() | python | def _request(self, method, *relative_path_parts, **kwargs):
"""Sends an HTTP request to the REST API and receives the requested data.
Additionally sets up pagination cursors.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses
"""
uri = self._create_api_uri(*relative_path_parts)
if method == 'get':
response = get(uri, auth=self.auth, params=kwargs.get('params', None))
elif method == 'post':
response = post(uri, auth=self.auth, json=kwargs.get('data', None))
else:
response = delete(uri, auth=self.auth, json=kwargs.get('data', None))
self.is_initial = False
self.before_cursor = response.headers.get('cb-before', None)
self.after_cursor = response.headers.get('cb-after', None)
return self._handle_response(response).json() | [
"def",
"_request",
"(",
"self",
",",
"method",
",",
"*",
"relative_path_parts",
",",
"*",
"*",
"kwargs",
")",
":",
"uri",
"=",
"self",
".",
"_create_api_uri",
"(",
"*",
"relative_path_parts",
")",
"if",
"method",
"==",
"'get'",
":",
"response",
"=",
"get",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"params",
"=",
"kwargs",
".",
"get",
"(",
"'params'",
",",
"None",
")",
")",
"elif",
"method",
"==",
"'post'",
":",
"response",
"=",
"post",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"json",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
")",
"else",
":",
"response",
"=",
"delete",
"(",
"uri",
",",
"auth",
"=",
"self",
".",
"auth",
",",
"json",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"None",
")",
")",
"self",
".",
"is_initial",
"=",
"False",
"self",
".",
"before_cursor",
"=",
"response",
".",
"headers",
".",
"get",
"(",
"'cb-before'",
",",
"None",
")",
"self",
".",
"after_cursor",
"=",
"response",
".",
"headers",
".",
"get",
"(",
"'cb-after'",
",",
"None",
")",
"return",
"self",
".",
"_handle_response",
"(",
"response",
")",
".",
"json",
"(",
")"
] | Sends an HTTP request to the REST API and receives the requested data.
Additionally sets up pagination cursors.
:param str method: HTTP method name
:param relative_path_parts: the relative paths for the request URI
:param kwargs: argument keywords
:returns: requested data
:raises APIError: for non-2xx responses | [
"Sends",
"an",
"HTTP",
"request",
"to",
"the",
"REST",
"API",
"and",
"receives",
"the",
"requested",
"data",
".",
"Additionally",
"sets",
"up",
"pagination",
"cursors",
"."
] | e3762f77583f89cf7b4f501ab3c7675fc7d30ab3 | https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/private.py#L274-L295 | valid |
charlesthomas/proauth2 | proauth2/data_stores/async_mongo_ds.py | DataStore.fetch | def fetch(self, collection, **kwargs):
'''
return one record from the collection whose parameters match kwargs
---
kwargs should be a dictionary whose keys match column names (in
traditional SQL / fields in NoSQL) and whose values are the values of
those fields.
e.g. kwargs={name='my application name',client_id=12345}
'''
callback = kwargs.pop('callback')
data = yield Op(self.db[collection].find_one, kwargs)
callback(data) | python | def fetch(self, collection, **kwargs):
'''
return one record from the collection whose parameters match kwargs
---
kwargs should be a dictionary whose keys match column names (in
traditional SQL / fields in NoSQL) and whose values are the values of
those fields.
e.g. kwargs={name='my application name',client_id=12345}
'''
callback = kwargs.pop('callback')
data = yield Op(self.db[collection].find_one, kwargs)
callback(data) | [
"def",
"fetch",
"(",
"self",
",",
"collection",
",",
"*",
"*",
"kwargs",
")",
":",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
")",
"data",
"=",
"yield",
"Op",
"(",
"self",
".",
"db",
"[",
"collection",
"]",
".",
"find_one",
",",
"kwargs",
")",
"callback",
"(",
"data",
")"
] | return one record from the collection whose parameters match kwargs
---
kwargs should be a dictionary whose keys match column names (in
traditional SQL / fields in NoSQL) and whose values are the values of
those fields.
e.g. kwargs={name='my application name',client_id=12345} | [
"return",
"one",
"record",
"from",
"the",
"collection",
"whose",
"parameters",
"match",
"kwargs",
"---",
"kwargs",
"should",
"be",
"a",
"dictionary",
"whose",
"keys",
"match",
"column",
"names",
"(",
"in",
"traditional",
"SQL",
"/",
"fields",
"in",
"NoSQL",
")",
"and",
"whose",
"values",
"are",
"the",
"values",
"of",
"those",
"fields",
".",
"e",
".",
"g",
".",
"kwargs",
"=",
"{",
"name",
"=",
"my",
"application",
"name",
"client_id",
"=",
"12345",
"}"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/async_mongo_ds.py#L31-L42 | valid |
charlesthomas/proauth2 | proauth2/data_stores/async_mongo_ds.py | DataStore.remove | def remove(self, collection, **kwargs):
'''
remove records from collection whose parameters match kwargs
'''
callback = kwargs.pop('callback')
yield Op(self.db[collection].remove, kwargs)
callback() | python | def remove(self, collection, **kwargs):
'''
remove records from collection whose parameters match kwargs
'''
callback = kwargs.pop('callback')
yield Op(self.db[collection].remove, kwargs)
callback() | [
"def",
"remove",
"(",
"self",
",",
"collection",
",",
"*",
"*",
"kwargs",
")",
":",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
")",
"yield",
"Op",
"(",
"self",
".",
"db",
"[",
"collection",
"]",
".",
"remove",
",",
"kwargs",
")",
"callback",
"(",
")"
] | remove records from collection whose parameters match kwargs | [
"remove",
"records",
"from",
"collection",
"whose",
"parameters",
"match",
"kwargs"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/async_mongo_ds.py#L45-L51 | valid |
charlesthomas/proauth2 | proauth2/data_stores/async_mongo_ds.py | DataStore.store | def store(self, collection, **kwargs):
'''
validate the passed values in kwargs based on the collection,
store them in the mongodb collection
'''
callback = kwargs.pop('callback')
key = validate(collection, **kwargs)
data = yield Task(self.fetch, collection, **{key: kwargs[key]})
if data is not None:
raise Proauth2Error('duplicate_key')
yield Op(self.db[collection].insert, kwargs)
callback() | python | def store(self, collection, **kwargs):
'''
validate the passed values in kwargs based on the collection,
store them in the mongodb collection
'''
callback = kwargs.pop('callback')
key = validate(collection, **kwargs)
data = yield Task(self.fetch, collection, **{key: kwargs[key]})
if data is not None:
raise Proauth2Error('duplicate_key')
yield Op(self.db[collection].insert, kwargs)
callback() | [
"def",
"store",
"(",
"self",
",",
"collection",
",",
"*",
"*",
"kwargs",
")",
":",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
")",
"key",
"=",
"validate",
"(",
"collection",
",",
"*",
"*",
"kwargs",
")",
"data",
"=",
"yield",
"Task",
"(",
"self",
".",
"fetch",
",",
"collection",
",",
"*",
"*",
"{",
"key",
":",
"kwargs",
"[",
"key",
"]",
"}",
")",
"if",
"data",
"is",
"not",
"None",
":",
"raise",
"Proauth2Error",
"(",
"'duplicate_key'",
")",
"yield",
"Op",
"(",
"self",
".",
"db",
"[",
"collection",
"]",
".",
"insert",
",",
"kwargs",
")",
"callback",
"(",
")"
] | validate the passed values in kwargs based on the collection,
store them in the mongodb collection | [
"validate",
"the",
"passed",
"values",
"in",
"kwargs",
"based",
"on",
"the",
"collection",
"store",
"them",
"in",
"the",
"mongodb",
"collection"
] | f88c8df966a1802414047ed304d02df1dd520097 | https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/async_mongo_ds.py#L54-L65 | valid |
nilp0inter/trelloapi | trelloapi/api.py | generate_api | def generate_api(version):
"""
Generates a factory function to instantiate the API with the given
version.
"""
def get_partial_api(key, token=None):
return TrelloAPI(ENDPOINTS[version], version, key, token=token)
get_partial_api.__doc__ = \
"""Interfaz REST con Trello. Versión {}""".format(version)
return get_partial_api | python | def generate_api(version):
"""
Generates a factory function to instantiate the API with the given
version.
"""
def get_partial_api(key, token=None):
return TrelloAPI(ENDPOINTS[version], version, key, token=token)
get_partial_api.__doc__ = \
"""Interfaz REST con Trello. Versión {}""".format(version)
return get_partial_api | [
"def",
"generate_api",
"(",
"version",
")",
":",
"def",
"get_partial_api",
"(",
"key",
",",
"token",
"=",
"None",
")",
":",
"return",
"TrelloAPI",
"(",
"ENDPOINTS",
"[",
"version",
"]",
",",
"version",
",",
"key",
",",
"token",
"=",
"token",
")",
"get_partial_api",
".",
"__doc__",
"=",
"\"\"\"Interfaz REST con Trello. Versión {}\"\"\".",
"f",
"ormat(",
"v",
"ersion)",
"",
"return",
"get_partial_api"
] | Generates a factory function to instantiate the API with the given
version. | [
"Generates",
"a",
"factory",
"function",
"to",
"instantiate",
"the",
"API",
"with",
"the",
"given",
"version",
"."
] | 88f4135832548ea71598d50a73943890e1cf9e20 | https://github.com/nilp0inter/trelloapi/blob/88f4135832548ea71598d50a73943890e1cf9e20/trelloapi/api.py#L131-L143 | valid |
nilp0inter/trelloapi | trelloapi/api.py | TrelloAPI._url | def _url(self):
"""
Resolve the URL to this point.
>>> trello = TrelloAPIV1('APIKEY')
>>> trello.batch._url
'1/batch'
>>> trello.boards(board_id='BOARD_ID')._url
'1/boards/BOARD_ID'
>>> trello.boards(board_id='BOARD_ID')(field='FIELD')._url
'1/boards/BOARD_ID/FIELD'
>>> trello.boards(board_id='BOARD_ID').cards(filter='FILTER')._url
'1/boards/BOARD_ID/cards/FILTER'
"""
if self._api_arg:
mypart = str(self._api_arg)
else:
mypart = self._name
if self._parent:
return '/'.join(filter(None, [self._parent._url, mypart]))
else:
return mypart | python | def _url(self):
"""
Resolve the URL to this point.
>>> trello = TrelloAPIV1('APIKEY')
>>> trello.batch._url
'1/batch'
>>> trello.boards(board_id='BOARD_ID')._url
'1/boards/BOARD_ID'
>>> trello.boards(board_id='BOARD_ID')(field='FIELD')._url
'1/boards/BOARD_ID/FIELD'
>>> trello.boards(board_id='BOARD_ID').cards(filter='FILTER')._url
'1/boards/BOARD_ID/cards/FILTER'
"""
if self._api_arg:
mypart = str(self._api_arg)
else:
mypart = self._name
if self._parent:
return '/'.join(filter(None, [self._parent._url, mypart]))
else:
return mypart | [
"def",
"_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api_arg",
":",
"mypart",
"=",
"str",
"(",
"self",
".",
"_api_arg",
")",
"else",
":",
"mypart",
"=",
"self",
".",
"_name",
"if",
"self",
".",
"_parent",
":",
"return",
"'/'",
".",
"join",
"(",
"filter",
"(",
"None",
",",
"[",
"self",
".",
"_parent",
".",
"_url",
",",
"mypart",
"]",
")",
")",
"else",
":",
"return",
"mypart"
] | Resolve the URL to this point.
>>> trello = TrelloAPIV1('APIKEY')
>>> trello.batch._url
'1/batch'
>>> trello.boards(board_id='BOARD_ID')._url
'1/boards/BOARD_ID'
>>> trello.boards(board_id='BOARD_ID')(field='FIELD')._url
'1/boards/BOARD_ID/FIELD'
>>> trello.boards(board_id='BOARD_ID').cards(filter='FILTER')._url
'1/boards/BOARD_ID/cards/FILTER' | [
"Resolve",
"the",
"URL",
"to",
"this",
"point",
"."
] | 88f4135832548ea71598d50a73943890e1cf9e20 | https://github.com/nilp0inter/trelloapi/blob/88f4135832548ea71598d50a73943890e1cf9e20/trelloapi/api.py#L67-L90 | valid |
nilp0inter/trelloapi | trelloapi/api.py | TrelloAPI._api_call | def _api_call(self, method_name, *args, **kwargs):
"""
Makes the HTTP request.
"""
params = kwargs.setdefault('params', {})
params.update({'key': self._apikey})
if self._token is not None:
params.update({'token': self._token})
http_method = getattr(requests, method_name)
return http_method(TRELLO_URL + self._url, *args, **kwargs) | python | def _api_call(self, method_name, *args, **kwargs):
"""
Makes the HTTP request.
"""
params = kwargs.setdefault('params', {})
params.update({'key': self._apikey})
if self._token is not None:
params.update({'token': self._token})
http_method = getattr(requests, method_name)
return http_method(TRELLO_URL + self._url, *args, **kwargs) | [
"def",
"_api_call",
"(",
"self",
",",
"method_name",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"params",
"=",
"kwargs",
".",
"setdefault",
"(",
"'params'",
",",
"{",
"}",
")",
"params",
".",
"update",
"(",
"{",
"'key'",
":",
"self",
".",
"_apikey",
"}",
")",
"if",
"self",
".",
"_token",
"is",
"not",
"None",
":",
"params",
".",
"update",
"(",
"{",
"'token'",
":",
"self",
".",
"_token",
"}",
")",
"http_method",
"=",
"getattr",
"(",
"requests",
",",
"method_name",
")",
"return",
"http_method",
"(",
"TRELLO_URL",
"+",
"self",
".",
"_url",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Makes the HTTP request. | [
"Makes",
"the",
"HTTP",
"request",
"."
] | 88f4135832548ea71598d50a73943890e1cf9e20 | https://github.com/nilp0inter/trelloapi/blob/88f4135832548ea71598d50a73943890e1cf9e20/trelloapi/api.py#L92-L103 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | Merge | def Merge(text, message, allow_unknown_extension=False,
allow_field_number=False):
"""Parses an text representation of a protocol message into a message.
Like Parse(), but allows repeated values for a non-repeated field, and uses
the last one.
Args:
text: Message text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems.
"""
return MergeLines(text.split('\n'), message, allow_unknown_extension,
allow_field_number) | python | def Merge(text, message, allow_unknown_extension=False,
allow_field_number=False):
"""Parses an text representation of a protocol message into a message.
Like Parse(), but allows repeated values for a non-repeated field, and uses
the last one.
Args:
text: Message text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems.
"""
return MergeLines(text.split('\n'), message, allow_unknown_extension,
allow_field_number) | [
"def",
"Merge",
"(",
"text",
",",
"message",
",",
"allow_unknown_extension",
"=",
"False",
",",
"allow_field_number",
"=",
"False",
")",
":",
"return",
"MergeLines",
"(",
"text",
".",
"split",
"(",
"'\\n'",
")",
",",
"message",
",",
"allow_unknown_extension",
",",
"allow_field_number",
")"
] | Parses an text representation of a protocol message into a message.
Like Parse(), but allows repeated values for a non-repeated field, and uses
the last one.
Args:
text: Message text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems. | [
"Parses",
"an",
"text",
"representation",
"of",
"a",
"protocol",
"message",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L348-L369 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | ParseLines | def ParseLines(lines, message, allow_unknown_extension=False,
allow_field_number=False):
"""Parses an text representation of a protocol message into a message.
Args:
lines: An iterable of lines of a message's text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems.
"""
parser = _Parser(allow_unknown_extension, allow_field_number)
return parser.ParseLines(lines, message) | python | def ParseLines(lines, message, allow_unknown_extension=False,
allow_field_number=False):
"""Parses an text representation of a protocol message into a message.
Args:
lines: An iterable of lines of a message's text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems.
"""
parser = _Parser(allow_unknown_extension, allow_field_number)
return parser.ParseLines(lines, message) | [
"def",
"ParseLines",
"(",
"lines",
",",
"message",
",",
"allow_unknown_extension",
"=",
"False",
",",
"allow_field_number",
"=",
"False",
")",
":",
"parser",
"=",
"_Parser",
"(",
"allow_unknown_extension",
",",
"allow_field_number",
")",
"return",
"parser",
".",
"ParseLines",
"(",
"lines",
",",
"message",
")"
] | Parses an text representation of a protocol message into a message.
Args:
lines: An iterable of lines of a message's text representation.
message: A protocol buffer message to merge into.
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
Returns:
The same message passed as argument.
Raises:
ParseError: On text parsing problems. | [
"Parses",
"an",
"text",
"representation",
"of",
"a",
"protocol",
"message",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L372-L390 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _SkipFieldValue | def _SkipFieldValue(tokenizer):
"""Skips over a field value.
Args:
tokenizer: A tokenizer to parse the field name and values.
Raises:
ParseError: In case an invalid field value is found.
"""
# String/bytes tokens can come in multiple adjacent string literals.
# If we can consume one, consume as many as we can.
if tokenizer.TryConsumeByteString():
while tokenizer.TryConsumeByteString():
pass
return
if (not tokenizer.TryConsumeIdentifier() and
not tokenizer.TryConsumeInt64() and
not tokenizer.TryConsumeUint64() and
not tokenizer.TryConsumeFloat()):
raise ParseError('Invalid field value: ' + tokenizer.token) | python | def _SkipFieldValue(tokenizer):
"""Skips over a field value.
Args:
tokenizer: A tokenizer to parse the field name and values.
Raises:
ParseError: In case an invalid field value is found.
"""
# String/bytes tokens can come in multiple adjacent string literals.
# If we can consume one, consume as many as we can.
if tokenizer.TryConsumeByteString():
while tokenizer.TryConsumeByteString():
pass
return
if (not tokenizer.TryConsumeIdentifier() and
not tokenizer.TryConsumeInt64() and
not tokenizer.TryConsumeUint64() and
not tokenizer.TryConsumeFloat()):
raise ParseError('Invalid field value: ' + tokenizer.token) | [
"def",
"_SkipFieldValue",
"(",
"tokenizer",
")",
":",
"# String/bytes tokens can come in multiple adjacent string literals.",
"# If we can consume one, consume as many as we can.",
"if",
"tokenizer",
".",
"TryConsumeByteString",
"(",
")",
":",
"while",
"tokenizer",
".",
"TryConsumeByteString",
"(",
")",
":",
"pass",
"return",
"if",
"(",
"not",
"tokenizer",
".",
"TryConsumeIdentifier",
"(",
")",
"and",
"not",
"tokenizer",
".",
"TryConsumeInt64",
"(",
")",
"and",
"not",
"tokenizer",
".",
"TryConsumeUint64",
"(",
")",
"and",
"not",
"tokenizer",
".",
"TryConsumeFloat",
"(",
")",
")",
":",
"raise",
"ParseError",
"(",
"'Invalid field value: '",
"+",
"tokenizer",
".",
"token",
")"
] | Skips over a field value.
Args:
tokenizer: A tokenizer to parse the field name and values.
Raises:
ParseError: In case an invalid field value is found. | [
"Skips",
"over",
"a",
"field",
"value",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L739-L759 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | ParseInteger | def ParseInteger(text, is_signed=False, is_long=False):
"""Parses an integer.
Args:
text: The text to parse.
is_signed: True if a signed integer must be parsed.
is_long: True if a long integer must be parsed.
Returns:
The integer value.
Raises:
ValueError: Thrown Iff the text is not a valid integer.
"""
# Do the actual parsing. Exception handling is propagated to caller.
try:
# We force 32-bit values to int and 64-bit values to long to make
# alternate implementations where the distinction is more significant
# (e.g. the C++ implementation) simpler.
if is_long:
result = long(text, 0)
else:
result = int(text, 0)
except ValueError:
raise ValueError('Couldn\'t parse integer: %s' % text)
# Check if the integer is sane. Exceptions handled by callers.
checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
checker.CheckValue(result)
return result | python | def ParseInteger(text, is_signed=False, is_long=False):
"""Parses an integer.
Args:
text: The text to parse.
is_signed: True if a signed integer must be parsed.
is_long: True if a long integer must be parsed.
Returns:
The integer value.
Raises:
ValueError: Thrown Iff the text is not a valid integer.
"""
# Do the actual parsing. Exception handling is propagated to caller.
try:
# We force 32-bit values to int and 64-bit values to long to make
# alternate implementations where the distinction is more significant
# (e.g. the C++ implementation) simpler.
if is_long:
result = long(text, 0)
else:
result = int(text, 0)
except ValueError:
raise ValueError('Couldn\'t parse integer: %s' % text)
# Check if the integer is sane. Exceptions handled by callers.
checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
checker.CheckValue(result)
return result | [
"def",
"ParseInteger",
"(",
"text",
",",
"is_signed",
"=",
"False",
",",
"is_long",
"=",
"False",
")",
":",
"# Do the actual parsing. Exception handling is propagated to caller.",
"try",
":",
"# We force 32-bit values to int and 64-bit values to long to make",
"# alternate implementations where the distinction is more significant",
"# (e.g. the C++ implementation) simpler.",
"if",
"is_long",
":",
"result",
"=",
"long",
"(",
"text",
",",
"0",
")",
"else",
":",
"result",
"=",
"int",
"(",
"text",
",",
"0",
")",
"except",
"ValueError",
":",
"raise",
"ValueError",
"(",
"'Couldn\\'t parse integer: %s'",
"%",
"text",
")",
"# Check if the integer is sane. Exceptions handled by callers.",
"checker",
"=",
"_INTEGER_CHECKERS",
"[",
"2",
"*",
"int",
"(",
"is_long",
")",
"+",
"int",
"(",
"is_signed",
")",
"]",
"checker",
".",
"CheckValue",
"(",
"result",
")",
"return",
"result"
] | Parses an integer.
Args:
text: The text to parse.
is_signed: True if a signed integer must be parsed.
is_long: True if a long integer must be parsed.
Returns:
The integer value.
Raises:
ValueError: Thrown Iff the text is not a valid integer. | [
"Parses",
"an",
"integer",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L1102-L1131 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Printer.PrintMessage | def PrintMessage(self, message):
"""Convert protobuf message to text format.
Args:
message: The protocol buffers message.
"""
fields = message.ListFields()
if self.use_index_order:
fields.sort(key=lambda x: x[0].index)
for field, value in fields:
if _IsMapEntry(field):
for key in sorted(value):
# This is slow for maps with submessage entires because it copies the
# entire tree. Unfortunately this would take significant refactoring
# of this file to work around.
#
# TODO(haberman): refactor and optimize if this becomes an issue.
entry_submsg = field.message_type._concrete_class(
key=key, value=value[key])
self.PrintField(field, entry_submsg)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
for element in value:
self.PrintField(field, element)
else:
self.PrintField(field, value) | python | def PrintMessage(self, message):
"""Convert protobuf message to text format.
Args:
message: The protocol buffers message.
"""
fields = message.ListFields()
if self.use_index_order:
fields.sort(key=lambda x: x[0].index)
for field, value in fields:
if _IsMapEntry(field):
for key in sorted(value):
# This is slow for maps with submessage entires because it copies the
# entire tree. Unfortunately this would take significant refactoring
# of this file to work around.
#
# TODO(haberman): refactor and optimize if this becomes an issue.
entry_submsg = field.message_type._concrete_class(
key=key, value=value[key])
self.PrintField(field, entry_submsg)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
for element in value:
self.PrintField(field, element)
else:
self.PrintField(field, value) | [
"def",
"PrintMessage",
"(",
"self",
",",
"message",
")",
":",
"fields",
"=",
"message",
".",
"ListFields",
"(",
")",
"if",
"self",
".",
"use_index_order",
":",
"fields",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
".",
"index",
")",
"for",
"field",
",",
"value",
"in",
"fields",
":",
"if",
"_IsMapEntry",
"(",
"field",
")",
":",
"for",
"key",
"in",
"sorted",
"(",
"value",
")",
":",
"# This is slow for maps with submessage entires because it copies the",
"# entire tree. Unfortunately this would take significant refactoring",
"# of this file to work around.",
"#",
"# TODO(haberman): refactor and optimize if this becomes an issue.",
"entry_submsg",
"=",
"field",
".",
"message_type",
".",
"_concrete_class",
"(",
"key",
"=",
"key",
",",
"value",
"=",
"value",
"[",
"key",
"]",
")",
"self",
".",
"PrintField",
"(",
"field",
",",
"entry_submsg",
")",
"elif",
"field",
".",
"label",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"LABEL_REPEATED",
":",
"for",
"element",
"in",
"value",
":",
"self",
".",
"PrintField",
"(",
"field",
",",
"element",
")",
"else",
":",
"self",
".",
"PrintField",
"(",
"field",
",",
"value",
")"
] | Convert protobuf message to text format.
Args:
message: The protocol buffers message. | [
"Convert",
"protobuf",
"message",
"to",
"text",
"format",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L208-L232 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Printer.PrintFieldValue | def PrintFieldValue(self, field, value):
"""Print a single field value (not including name).
For repeated fields, the value should be a single element.
Args:
field: The descriptor of the field to be printed.
value: The value of the field.
"""
out = self.out
if self.pointy_brackets:
openb = '<'
closeb = '>'
else:
openb = '{'
closeb = '}'
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
if self.as_one_line:
out.write(' %s ' % openb)
self.PrintMessage(value)
out.write(closeb)
else:
out.write(' %s\n' % openb)
self.indent += 2
self.PrintMessage(value)
self.indent -= 2
out.write(' ' * self.indent + closeb)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
enum_value = field.enum_type.values_by_number.get(value, None)
if enum_value is not None:
out.write(enum_value.name)
else:
out.write(str(value))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
out.write('\"')
if isinstance(value, six.text_type):
out_value = value.encode('utf-8')
else:
out_value = value
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
# We need to escape non-UTF8 chars in TYPE_BYTES field.
out_as_utf8 = False
else:
out_as_utf8 = self.as_utf8
out.write(text_encoding.CEscape(out_value, out_as_utf8))
out.write('\"')
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
if value:
out.write('true')
else:
out.write('false')
elif field.cpp_type in _FLOAT_TYPES and self.float_format is not None:
out.write('{1:{0}}'.format(self.float_format, value))
else:
out.write(str(value)) | python | def PrintFieldValue(self, field, value):
"""Print a single field value (not including name).
For repeated fields, the value should be a single element.
Args:
field: The descriptor of the field to be printed.
value: The value of the field.
"""
out = self.out
if self.pointy_brackets:
openb = '<'
closeb = '>'
else:
openb = '{'
closeb = '}'
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
if self.as_one_line:
out.write(' %s ' % openb)
self.PrintMessage(value)
out.write(closeb)
else:
out.write(' %s\n' % openb)
self.indent += 2
self.PrintMessage(value)
self.indent -= 2
out.write(' ' * self.indent + closeb)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
enum_value = field.enum_type.values_by_number.get(value, None)
if enum_value is not None:
out.write(enum_value.name)
else:
out.write(str(value))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
out.write('\"')
if isinstance(value, six.text_type):
out_value = value.encode('utf-8')
else:
out_value = value
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
# We need to escape non-UTF8 chars in TYPE_BYTES field.
out_as_utf8 = False
else:
out_as_utf8 = self.as_utf8
out.write(text_encoding.CEscape(out_value, out_as_utf8))
out.write('\"')
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
if value:
out.write('true')
else:
out.write('false')
elif field.cpp_type in _FLOAT_TYPES and self.float_format is not None:
out.write('{1:{0}}'.format(self.float_format, value))
else:
out.write(str(value)) | [
"def",
"PrintFieldValue",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"out",
"=",
"self",
".",
"out",
"if",
"self",
".",
"pointy_brackets",
":",
"openb",
"=",
"'<'",
"closeb",
"=",
"'>'",
"else",
":",
"openb",
"=",
"'{'",
"closeb",
"=",
"'}'",
"if",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"if",
"self",
".",
"as_one_line",
":",
"out",
".",
"write",
"(",
"' %s '",
"%",
"openb",
")",
"self",
".",
"PrintMessage",
"(",
"value",
")",
"out",
".",
"write",
"(",
"closeb",
")",
"else",
":",
"out",
".",
"write",
"(",
"' %s\\n'",
"%",
"openb",
")",
"self",
".",
"indent",
"+=",
"2",
"self",
".",
"PrintMessage",
"(",
"value",
")",
"self",
".",
"indent",
"-=",
"2",
"out",
".",
"write",
"(",
"' '",
"*",
"self",
".",
"indent",
"+",
"closeb",
")",
"elif",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_ENUM",
":",
"enum_value",
"=",
"field",
".",
"enum_type",
".",
"values_by_number",
".",
"get",
"(",
"value",
",",
"None",
")",
"if",
"enum_value",
"is",
"not",
"None",
":",
"out",
".",
"write",
"(",
"enum_value",
".",
"name",
")",
"else",
":",
"out",
".",
"write",
"(",
"str",
"(",
"value",
")",
")",
"elif",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_STRING",
":",
"out",
".",
"write",
"(",
"'\\\"'",
")",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"text_type",
")",
":",
"out_value",
"=",
"value",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"out_value",
"=",
"value",
"if",
"field",
".",
"type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"TYPE_BYTES",
":",
"# We need to escape non-UTF8 chars in TYPE_BYTES field.",
"out_as_utf8",
"=",
"False",
"else",
":",
"out_as_utf8",
"=",
"self",
".",
"as_utf8",
"out",
".",
"write",
"(",
"text_encoding",
".",
"CEscape",
"(",
"out_value",
",",
"out_as_utf8",
")",
")",
"out",
".",
"write",
"(",
"'\\\"'",
")",
"elif",
"field",
".",
"cpp_type",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_BOOL",
":",
"if",
"value",
":",
"out",
".",
"write",
"(",
"'true'",
")",
"else",
":",
"out",
".",
"write",
"(",
"'false'",
")",
"elif",
"field",
".",
"cpp_type",
"in",
"_FLOAT_TYPES",
"and",
"self",
".",
"float_format",
"is",
"not",
"None",
":",
"out",
".",
"write",
"(",
"'{1:{0}}'",
".",
"format",
"(",
"self",
".",
"float_format",
",",
"value",
")",
")",
"else",
":",
"out",
".",
"write",
"(",
"str",
"(",
"value",
")",
")"
] | Print a single field value (not including name).
For repeated fields, the value should be a single element.
Args:
field: The descriptor of the field to be printed.
value: The value of the field. | [
"Print",
"a",
"single",
"field",
"value",
"(",
"not",
"including",
"name",
")",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L267-L322 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Parser._ParseOrMerge | def _ParseOrMerge(self, lines, message):
"""Converts an text representation of a protocol message into a message.
Args:
lines: Lines of a message's text representation.
message: A protocol buffer message to merge into.
Raises:
ParseError: On text parsing problems.
"""
tokenizer = _Tokenizer(lines)
while not tokenizer.AtEnd():
self._MergeField(tokenizer, message) | python | def _ParseOrMerge(self, lines, message):
"""Converts an text representation of a protocol message into a message.
Args:
lines: Lines of a message's text representation.
message: A protocol buffer message to merge into.
Raises:
ParseError: On text parsing problems.
"""
tokenizer = _Tokenizer(lines)
while not tokenizer.AtEnd():
self._MergeField(tokenizer, message) | [
"def",
"_ParseOrMerge",
"(",
"self",
",",
"lines",
",",
"message",
")",
":",
"tokenizer",
"=",
"_Tokenizer",
"(",
"lines",
")",
"while",
"not",
"tokenizer",
".",
"AtEnd",
"(",
")",
":",
"self",
".",
"_MergeField",
"(",
"tokenizer",
",",
"message",
")"
] | Converts an text representation of a protocol message into a message.
Args:
lines: Lines of a message's text representation.
message: A protocol buffer message to merge into.
Raises:
ParseError: On text parsing problems. | [
"Converts",
"an",
"text",
"representation",
"of",
"a",
"protocol",
"message",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L443-L455 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Parser._MergeMessageField | def _MergeMessageField(self, tokenizer, message, field):
"""Merges a single scalar field into a message.
Args:
tokenizer: A tokenizer to parse the field value.
message: The message of which field is a member.
field: The descriptor of the field to be merged.
Raises:
ParseError: In case of text parsing problems.
"""
is_map_entry = _IsMapEntry(field)
if tokenizer.TryConsume('<'):
end_token = '>'
else:
tokenizer.Consume('{')
end_token = '}'
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if field.is_extension:
sub_message = message.Extensions[field].add()
elif is_map_entry:
# pylint: disable=protected-access
sub_message = field.message_type._concrete_class()
else:
sub_message = getattr(message, field.name).add()
else:
if field.is_extension:
sub_message = message.Extensions[field]
else:
sub_message = getattr(message, field.name)
sub_message.SetInParent()
while not tokenizer.TryConsume(end_token):
if tokenizer.AtEnd():
raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
self._MergeField(tokenizer, sub_message)
if is_map_entry:
value_cpptype = field.message_type.fields_by_name['value'].cpp_type
if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
value = getattr(message, field.name)[sub_message.key]
value.MergeFrom(sub_message.value)
else:
getattr(message, field.name)[sub_message.key] = sub_message.value | python | def _MergeMessageField(self, tokenizer, message, field):
"""Merges a single scalar field into a message.
Args:
tokenizer: A tokenizer to parse the field value.
message: The message of which field is a member.
field: The descriptor of the field to be merged.
Raises:
ParseError: In case of text parsing problems.
"""
is_map_entry = _IsMapEntry(field)
if tokenizer.TryConsume('<'):
end_token = '>'
else:
tokenizer.Consume('{')
end_token = '}'
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if field.is_extension:
sub_message = message.Extensions[field].add()
elif is_map_entry:
# pylint: disable=protected-access
sub_message = field.message_type._concrete_class()
else:
sub_message = getattr(message, field.name).add()
else:
if field.is_extension:
sub_message = message.Extensions[field]
else:
sub_message = getattr(message, field.name)
sub_message.SetInParent()
while not tokenizer.TryConsume(end_token):
if tokenizer.AtEnd():
raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
self._MergeField(tokenizer, sub_message)
if is_map_entry:
value_cpptype = field.message_type.fields_by_name['value'].cpp_type
if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
value = getattr(message, field.name)[sub_message.key]
value.MergeFrom(sub_message.value)
else:
getattr(message, field.name)[sub_message.key] = sub_message.value | [
"def",
"_MergeMessageField",
"(",
"self",
",",
"tokenizer",
",",
"message",
",",
"field",
")",
":",
"is_map_entry",
"=",
"_IsMapEntry",
"(",
"field",
")",
"if",
"tokenizer",
".",
"TryConsume",
"(",
"'<'",
")",
":",
"end_token",
"=",
"'>'",
"else",
":",
"tokenizer",
".",
"Consume",
"(",
"'{'",
")",
"end_token",
"=",
"'}'",
"if",
"field",
".",
"label",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"LABEL_REPEATED",
":",
"if",
"field",
".",
"is_extension",
":",
"sub_message",
"=",
"message",
".",
"Extensions",
"[",
"field",
"]",
".",
"add",
"(",
")",
"elif",
"is_map_entry",
":",
"# pylint: disable=protected-access",
"sub_message",
"=",
"field",
".",
"message_type",
".",
"_concrete_class",
"(",
")",
"else",
":",
"sub_message",
"=",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
".",
"add",
"(",
")",
"else",
":",
"if",
"field",
".",
"is_extension",
":",
"sub_message",
"=",
"message",
".",
"Extensions",
"[",
"field",
"]",
"else",
":",
"sub_message",
"=",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
"sub_message",
".",
"SetInParent",
"(",
")",
"while",
"not",
"tokenizer",
".",
"TryConsume",
"(",
"end_token",
")",
":",
"if",
"tokenizer",
".",
"AtEnd",
"(",
")",
":",
"raise",
"tokenizer",
".",
"ParseErrorPreviousToken",
"(",
"'Expected \"%s\".'",
"%",
"(",
"end_token",
",",
")",
")",
"self",
".",
"_MergeField",
"(",
"tokenizer",
",",
"sub_message",
")",
"if",
"is_map_entry",
":",
"value_cpptype",
"=",
"field",
".",
"message_type",
".",
"fields_by_name",
"[",
"'value'",
"]",
".",
"cpp_type",
"if",
"value_cpptype",
"==",
"descriptor",
".",
"FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"value",
"=",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
"[",
"sub_message",
".",
"key",
"]",
"value",
".",
"MergeFrom",
"(",
"sub_message",
".",
"value",
")",
"else",
":",
"getattr",
"(",
"message",
",",
"field",
".",
"name",
")",
"[",
"sub_message",
".",
"key",
"]",
"=",
"sub_message",
".",
"value"
] | Merges a single scalar field into a message.
Args:
tokenizer: A tokenizer to parse the field value.
message: The message of which field is a member.
field: The descriptor of the field to be merged.
Raises:
ParseError: In case of text parsing problems. | [
"Merges",
"a",
"single",
"scalar",
"field",
"into",
"a",
"message",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L566-L611 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Tokenizer.ConsumeIdentifier | def ConsumeIdentifier(self):
"""Consumes protocol message field identifier.
Returns:
Identifier string.
Raises:
ParseError: If an identifier couldn't be consumed.
"""
result = self.token
if not self._IDENTIFIER.match(result):
raise self._ParseError('Expected identifier.')
self.NextToken()
return result | python | def ConsumeIdentifier(self):
"""Consumes protocol message field identifier.
Returns:
Identifier string.
Raises:
ParseError: If an identifier couldn't be consumed.
"""
result = self.token
if not self._IDENTIFIER.match(result):
raise self._ParseError('Expected identifier.')
self.NextToken()
return result | [
"def",
"ConsumeIdentifier",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"token",
"if",
"not",
"self",
".",
"_IDENTIFIER",
".",
"match",
"(",
"result",
")",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"'Expected identifier.'",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] | Consumes protocol message field identifier.
Returns:
Identifier string.
Raises:
ParseError: If an identifier couldn't be consumed. | [
"Consumes",
"protocol",
"message",
"field",
"identifier",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L860-L873 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Tokenizer.ConsumeInt32 | def ConsumeInt32(self):
"""Consumes a signed 32bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=True, is_long=False)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | python | def ConsumeInt32(self):
"""Consumes a signed 32bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=True, is_long=False)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | [
"def",
"ConsumeInt32",
"(",
"self",
")",
":",
"try",
":",
"result",
"=",
"ParseInteger",
"(",
"self",
".",
"token",
",",
"is_signed",
"=",
"True",
",",
"is_long",
"=",
"False",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] | Consumes a signed 32bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed. | [
"Consumes",
"a",
"signed",
"32bit",
"integer",
"number",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L875-L889 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Tokenizer.ConsumeFloat | def ConsumeFloat(self):
"""Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed.
"""
try:
result = ParseFloat(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | python | def ConsumeFloat(self):
"""Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed.
"""
try:
result = ParseFloat(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | [
"def",
"ConsumeFloat",
"(",
"self",
")",
":",
"try",
":",
"result",
"=",
"ParseFloat",
"(",
"self",
".",
"token",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] | Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed. | [
"Consumes",
"an",
"floating",
"point",
"number",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L960-L974 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Tokenizer.ConsumeBool | def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | python | def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | [
"def",
"ConsumeBool",
"(",
"self",
")",
":",
"try",
":",
"result",
"=",
"ParseBool",
"(",
"self",
".",
"token",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] | Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed. | [
"Consumes",
"a",
"boolean",
"value",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L976-L990 | valid |
ibelie/typy | typy/google/protobuf/text_format.py | _Tokenizer._ConsumeSingleByteString | def _ConsumeSingleByteString(self):
"""Consume one token of a string literal.
String literals (whether bytes or text) can come in multiple adjacent
tokens which are automatically concatenated, like in C or Python. This
method only consumes one token.
Returns:
The token parsed.
Raises:
ParseError: When the wrong format data is found.
"""
text = self.token
if len(text) < 1 or text[0] not in _QUOTES:
raise self._ParseError('Expected string but found: %r' % (text,))
if len(text) < 2 or text[-1] != text[0]:
raise self._ParseError('String missing ending quote: %r' % (text,))
try:
result = text_encoding.CUnescape(text[1:-1])
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | python | def _ConsumeSingleByteString(self):
"""Consume one token of a string literal.
String literals (whether bytes or text) can come in multiple adjacent
tokens which are automatically concatenated, like in C or Python. This
method only consumes one token.
Returns:
The token parsed.
Raises:
ParseError: When the wrong format data is found.
"""
text = self.token
if len(text) < 1 or text[0] not in _QUOTES:
raise self._ParseError('Expected string but found: %r' % (text,))
if len(text) < 2 or text[-1] != text[0]:
raise self._ParseError('String missing ending quote: %r' % (text,))
try:
result = text_encoding.CUnescape(text[1:-1])
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result | [
"def",
"_ConsumeSingleByteString",
"(",
"self",
")",
":",
"text",
"=",
"self",
".",
"token",
"if",
"len",
"(",
"text",
")",
"<",
"1",
"or",
"text",
"[",
"0",
"]",
"not",
"in",
"_QUOTES",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"'Expected string but found: %r'",
"%",
"(",
"text",
",",
")",
")",
"if",
"len",
"(",
"text",
")",
"<",
"2",
"or",
"text",
"[",
"-",
"1",
"]",
"!=",
"text",
"[",
"0",
"]",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"'String missing ending quote: %r'",
"%",
"(",
"text",
",",
")",
")",
"try",
":",
"result",
"=",
"text_encoding",
".",
"CUnescape",
"(",
"text",
"[",
"1",
":",
"-",
"1",
"]",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"self",
".",
"_ParseError",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] | Consume one token of a string literal.
String literals (whether bytes or text) can come in multiple adjacent
tokens which are automatically concatenated, like in C or Python. This
method only consumes one token.
Returns:
The token parsed.
Raises:
ParseError: When the wrong format data is found. | [
"Consume",
"one",
"token",
"of",
"a",
"string",
"literal",
"."
] | 3616845fb91459aacd8df6bf82c5d91f4542bee7 | https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/text_format.py#L1028-L1052 | valid |
BlockHub/blockhubdpostools | dpostools/utils.py | timestamp | def timestamp(t = None, forfilename=False):
"""Returns a human-readable timestamp given a Unix timestamp 't' or
for the current time. The Unix timestamp is the number of seconds since
start of epoch (1970-01-01 00:00:00).
When forfilename is True, then spaces and semicolons are replace with
hyphens. The returned string is usable as a (part of a) filename. """
datetimesep = ' '
timesep = ':'
if forfilename:
datetimesep = '-'
timesep = '-'
return time.strftime('%Y-%m-%d' + datetimesep +
'%H' + timesep + '%M' + timesep + '%S',
time.localtime(t)) | python | def timestamp(t = None, forfilename=False):
"""Returns a human-readable timestamp given a Unix timestamp 't' or
for the current time. The Unix timestamp is the number of seconds since
start of epoch (1970-01-01 00:00:00).
When forfilename is True, then spaces and semicolons are replace with
hyphens. The returned string is usable as a (part of a) filename. """
datetimesep = ' '
timesep = ':'
if forfilename:
datetimesep = '-'
timesep = '-'
return time.strftime('%Y-%m-%d' + datetimesep +
'%H' + timesep + '%M' + timesep + '%S',
time.localtime(t)) | [
"def",
"timestamp",
"(",
"t",
"=",
"None",
",",
"forfilename",
"=",
"False",
")",
":",
"datetimesep",
"=",
"' '",
"timesep",
"=",
"':'",
"if",
"forfilename",
":",
"datetimesep",
"=",
"'-'",
"timesep",
"=",
"'-'",
"return",
"time",
".",
"strftime",
"(",
"'%Y-%m-%d'",
"+",
"datetimesep",
"+",
"'%H'",
"+",
"timesep",
"+",
"'%M'",
"+",
"timesep",
"+",
"'%S'",
",",
"time",
".",
"localtime",
"(",
"t",
")",
")"
] | Returns a human-readable timestamp given a Unix timestamp 't' or
for the current time. The Unix timestamp is the number of seconds since
start of epoch (1970-01-01 00:00:00).
When forfilename is True, then spaces and semicolons are replace with
hyphens. The returned string is usable as a (part of a) filename. | [
"Returns",
"a",
"human",
"-",
"readable",
"timestamp",
"given",
"a",
"Unix",
"timestamp",
"t",
"or",
"for",
"the",
"current",
"time",
".",
"The",
"Unix",
"timestamp",
"is",
"the",
"number",
"of",
"seconds",
"since",
"start",
"of",
"epoch",
"(",
"1970",
"-",
"01",
"-",
"01",
"00",
":",
"00",
":",
"00",
")",
".",
"When",
"forfilename",
"is",
"True",
"then",
"spaces",
"and",
"semicolons",
"are",
"replace",
"with",
"hyphens",
".",
"The",
"returned",
"string",
"is",
"usable",
"as",
"a",
"(",
"part",
"of",
"a",
")",
"filename",
"."
] | 27712cd97cd3658ee54a4330ff3135b51a01d7d1 | https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/utils.py#L33-L48 | valid |
BlockHub/blockhubdpostools | dpostools/utils.py | arktimestamp | def arktimestamp(arkt, forfilename=False):
"""Returns a human-readable timestamp given an Ark timestamp 'arct'.
An Ark timestamp is the number of seconds since Genesis block,
2017:03:21 15:55:44."""
t = arkt + time.mktime((2017, 3, 21, 15, 55, 44, 0, 0, 0))
return '%d %s' % (arkt, timestamp(t)) | python | def arktimestamp(arkt, forfilename=False):
"""Returns a human-readable timestamp given an Ark timestamp 'arct'.
An Ark timestamp is the number of seconds since Genesis block,
2017:03:21 15:55:44."""
t = arkt + time.mktime((2017, 3, 21, 15, 55, 44, 0, 0, 0))
return '%d %s' % (arkt, timestamp(t)) | [
"def",
"arktimestamp",
"(",
"arkt",
",",
"forfilename",
"=",
"False",
")",
":",
"t",
"=",
"arkt",
"+",
"time",
".",
"mktime",
"(",
"(",
"2017",
",",
"3",
",",
"21",
",",
"15",
",",
"55",
",",
"44",
",",
"0",
",",
"0",
",",
"0",
")",
")",
"return",
"'%d %s'",
"%",
"(",
"arkt",
",",
"timestamp",
"(",
"t",
")",
")"
] | Returns a human-readable timestamp given an Ark timestamp 'arct'.
An Ark timestamp is the number of seconds since Genesis block,
2017:03:21 15:55:44. | [
"Returns",
"a",
"human",
"-",
"readable",
"timestamp",
"given",
"an",
"Ark",
"timestamp",
"arct",
".",
"An",
"Ark",
"timestamp",
"is",
"the",
"number",
"of",
"seconds",
"since",
"Genesis",
"block",
"2017",
":",
"03",
":",
"21",
"15",
":",
"55",
":",
"44",
"."
] | 27712cd97cd3658ee54a4330ff3135b51a01d7d1 | https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/utils.py#L51-L57 | valid |
BlockHub/blockhubdpostools | dpostools/utils.py | arkt_to_unixt | def arkt_to_unixt(ark_timestamp):
""" convert ark timestamp to unix timestamp"""
res = datetime.datetime(2017, 3, 21, 15, 55, 44) + datetime.timedelta(seconds=ark_timestamp)
return res.timestamp() | python | def arkt_to_unixt(ark_timestamp):
""" convert ark timestamp to unix timestamp"""
res = datetime.datetime(2017, 3, 21, 15, 55, 44) + datetime.timedelta(seconds=ark_timestamp)
return res.timestamp() | [
"def",
"arkt_to_unixt",
"(",
"ark_timestamp",
")",
":",
"res",
"=",
"datetime",
".",
"datetime",
"(",
"2017",
",",
"3",
",",
"21",
",",
"15",
",",
"55",
",",
"44",
")",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"ark_timestamp",
")",
"return",
"res",
".",
"timestamp",
"(",
")"
] | convert ark timestamp to unix timestamp | [
"convert",
"ark",
"timestamp",
"to",
"unix",
"timestamp"
] | 27712cd97cd3658ee54a4330ff3135b51a01d7d1 | https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/utils.py#L65-L68 | valid |
crazy-canux/arguspy | arguspy/mssql_pymssql.py | Mssql.close | def close(self):
"""Close the connection."""
try:
self.conn.close()
self.logger.debug("Close connect succeed.")
except pymssql.Error as e:
self.unknown("Close connect error: %s" % e) | python | def close(self):
"""Close the connection."""
try:
self.conn.close()
self.logger.debug("Close connect succeed.")
except pymssql.Error as e:
self.unknown("Close connect error: %s" % e) | [
"def",
"close",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"conn",
".",
"close",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Close connect succeed.\"",
")",
"except",
"pymssql",
".",
"Error",
"as",
"e",
":",
"self",
".",
"unknown",
"(",
"\"Close connect error: %s\"",
"%",
"e",
")"
] | Close the connection. | [
"Close",
"the",
"connection",
"."
] | e9486b5df61978a990d56bf43de35f3a4cdefcc3 | https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/arguspy/mssql_pymssql.py#L67-L73 | valid |
suryakencana007/baka_model | setup.py | get_version | def get_version():
"""Extract package __version__"""
with open(VERSION_FILE, encoding='utf-8') as fp:
content = fp.read()
match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', content, re.M)
if match:
return match.group(1)
raise RuntimeError("Could not extract package __version__") | python | def get_version():
"""Extract package __version__"""
with open(VERSION_FILE, encoding='utf-8') as fp:
content = fp.read()
match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', content, re.M)
if match:
return match.group(1)
raise RuntimeError("Could not extract package __version__") | [
"def",
"get_version",
"(",
")",
":",
"with",
"open",
"(",
"VERSION_FILE",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"fp",
":",
"content",
"=",
"fp",
".",
"read",
"(",
")",
"match",
"=",
"re",
".",
"search",
"(",
"r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]'",
",",
"content",
",",
"re",
".",
"M",
")",
"if",
"match",
":",
"return",
"match",
".",
"group",
"(",
"1",
")",
"raise",
"RuntimeError",
"(",
"\"Could not extract package __version__\"",
")"
] | Extract package __version__ | [
"Extract",
"package",
"__version__"
] | 915c2da9920e973302f5764ae63799acd5ecf0b7 | https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/setup.py#L89-L96 | valid |
JohannesBuchner/jbopt | jbopt/de.py | de | def de(output_basename, parameter_names, transform, loglikelihood, prior, nsteps=40000, vizfunc=None, printfunc=None, **problem):
"""
**Differential evolution**
via `inspyred <http://inspyred.github.io/>`_
specially tuned. steady state replacement, n-point crossover,
pop size 20, gaussian mutation noise 0.01 & 1e-6.
stores intermediate results (can be used for resume, see seeds)
:param start: start point
:param seeds: list of start points
:param vizfunc: callback to do visualization of current best solution
:param printfunc: callback to summarize current best solution
:param seed: RNG initialization (if set)
"""
import json
import inspyred
import random
prng = random.Random()
if 'seed' in problem:
prng.seed(problem['seed'])
n_params = len(parameter_names)
seeds = problem.get('seeds', [])
if 'start' in problem:
seeds.append(problem['start'])
prefix = output_basename
def viz(candidate, args):
if vizfunc is not None:
vizfunc(candidate)
def print_candidate(candidate, l, args):
if printfunc is not None:
printfunc(cube=candidate, loglikelihood=l)
else:
print l, candidate
def eval_candidate(candidate):
params = transform(candidate)
l = loglikelihood(params)
p = prior(params)
if numpy.isinf(p) and p < 0:
print ' prior rejection'
return -1e300
if numpy.isnan(l):
return -1e300
return l, p
@inspyred.ec.utilities.memoize
@inspyred.ec.evaluators.evaluator
def fitness(candidate, args):
l, p = eval_candidate(candidate)
#print_candidate(candidate, (l + p), args)
return (l + p)
cutoff_store = 10
def solution_archiver(random, population, archive, args):
psize = len(population)
population.sort(reverse=True)
best = population[0].fitness
#print 'BEST: ', best,
all_candidates = sorted(population + archive, reverse=True)
all_fitness = numpy.array([c.fitness for c in all_candidates])
mask = best - all_fitness > cutoff_store / 3
if mask.sum() < 20:
mask = best - all_fitness > cutoff_store
newarchive = [c for i, c in enumerate(all_candidates) if i == 0 or all_fitness[i - 1] != c.fitness]
print 'ARCHIVE: ', len(archive), len(newarchive)
json.dump([{'candidate': [float(f) for f in c.candidate], 'fitness':c.fitness} for c in newarchive],
open(prefix + '_values.json', 'w'), indent=4)
return newarchive
def observer(population, num_generations, num_evaluations, args):
population.sort(reverse=True)
candidate = population[0]
print ('{0} evaluations'.format(num_evaluations)), ' best:',
print_candidate(candidate.candidate, candidate.fitness, args)
if num_evaluations % len(population) == 0 or num_evaluations < len(population) or args.get('force_viz', False):
# for each turnaround of a full generation
viz(candidate.candidate, args)
def generator(random, args):
u = [random.uniform(0, 1) for _ in range(n_params)]
u = [random.gauss(0.5, 0.1) for _ in range(n_params)]
return bounder(u, args)
ea = inspyred.ec.DEA(prng)
ea.terminator = inspyred.ec.terminators.evaluation_termination
ea.archiver = solution_archiver
bounder = inspyred.ec.Bounder(lower_bound=1e-10, upper_bound=1-1e-10)
#bounder = inspyred.ec.Bounder(lower_bound=-20, upper_bound=20)
import copy
from math import log
@inspyred.ec.variators.mutator
def double_exponential_mutation(random, candidate, args):
mut_rate = args.setdefault('mutation_rate', 0.1)
mean = args.setdefault('gaussian_mean', 0.0)
stdev = args.setdefault('gaussian_stdev', 1.0)
scale = log(0.5) / - (stdev)
bounder = args['_ec'].bounder
mutant = copy.copy(candidate)
for i, m in enumerate(mutant):
dice = random.random()
if dice < mut_rate:
sign = (dice < mut_rate / 2) * 2 - 1
delta = -log(random.random()) / scale
mutant[i] += delta * sign
mutant = bounder(mutant, args)
return mutant
def minute_gaussian_mutation(random, candidates, args):
args = dict(args)
args['mutation_rate'] = 1
args['gaussian_stdev'] = 1e-6
return inspyred.ec.variators.gaussian_mutation(random, candidates, args)
ea.variator = [inspyred.ec.variators.n_point_crossover, inspyred.ec.variators.gaussian_mutation, minute_gaussian_mutation]
#ea.variator = [inspyred.ec.variators.n_point_crossover, double_exponential_mutation]
ea.replacer = inspyred.ec.replacers.steady_state_replacement
ea.observer = observer
pop_size = 20
final_pop = ea.evolve(pop_size=pop_size,
max_evaluations=nsteps, maximize=True, seeds=seeds,
gaussian_stdev=0.01, #mutation_rate=0.3,
bounder=bounder, generator=generator, evaluator=fitness,
)
best = max(final_pop)
seeds = [c.candidate for c in ea.archive]
print 'final candidate:', best
return {'start': best.candidate, 'value': best.fitness,
'seeds': seeds, 'method': 'DE'} | python | def de(output_basename, parameter_names, transform, loglikelihood, prior, nsteps=40000, vizfunc=None, printfunc=None, **problem):
"""
**Differential evolution**
via `inspyred <http://inspyred.github.io/>`_
specially tuned. steady state replacement, n-point crossover,
pop size 20, gaussian mutation noise 0.01 & 1e-6.
stores intermediate results (can be used for resume, see seeds)
:param start: start point
:param seeds: list of start points
:param vizfunc: callback to do visualization of current best solution
:param printfunc: callback to summarize current best solution
:param seed: RNG initialization (if set)
"""
import json
import inspyred
import random
prng = random.Random()
if 'seed' in problem:
prng.seed(problem['seed'])
n_params = len(parameter_names)
seeds = problem.get('seeds', [])
if 'start' in problem:
seeds.append(problem['start'])
prefix = output_basename
def viz(candidate, args):
if vizfunc is not None:
vizfunc(candidate)
def print_candidate(candidate, l, args):
if printfunc is not None:
printfunc(cube=candidate, loglikelihood=l)
else:
print l, candidate
def eval_candidate(candidate):
params = transform(candidate)
l = loglikelihood(params)
p = prior(params)
if numpy.isinf(p) and p < 0:
print ' prior rejection'
return -1e300
if numpy.isnan(l):
return -1e300
return l, p
@inspyred.ec.utilities.memoize
@inspyred.ec.evaluators.evaluator
def fitness(candidate, args):
l, p = eval_candidate(candidate)
#print_candidate(candidate, (l + p), args)
return (l + p)
cutoff_store = 10
def solution_archiver(random, population, archive, args):
psize = len(population)
population.sort(reverse=True)
best = population[0].fitness
#print 'BEST: ', best,
all_candidates = sorted(population + archive, reverse=True)
all_fitness = numpy.array([c.fitness for c in all_candidates])
mask = best - all_fitness > cutoff_store / 3
if mask.sum() < 20:
mask = best - all_fitness > cutoff_store
newarchive = [c for i, c in enumerate(all_candidates) if i == 0 or all_fitness[i - 1] != c.fitness]
print 'ARCHIVE: ', len(archive), len(newarchive)
json.dump([{'candidate': [float(f) for f in c.candidate], 'fitness':c.fitness} for c in newarchive],
open(prefix + '_values.json', 'w'), indent=4)
return newarchive
def observer(population, num_generations, num_evaluations, args):
population.sort(reverse=True)
candidate = population[0]
print ('{0} evaluations'.format(num_evaluations)), ' best:',
print_candidate(candidate.candidate, candidate.fitness, args)
if num_evaluations % len(population) == 0 or num_evaluations < len(population) or args.get('force_viz', False):
# for each turnaround of a full generation
viz(candidate.candidate, args)
def generator(random, args):
u = [random.uniform(0, 1) for _ in range(n_params)]
u = [random.gauss(0.5, 0.1) for _ in range(n_params)]
return bounder(u, args)
ea = inspyred.ec.DEA(prng)
ea.terminator = inspyred.ec.terminators.evaluation_termination
ea.archiver = solution_archiver
bounder = inspyred.ec.Bounder(lower_bound=1e-10, upper_bound=1-1e-10)
#bounder = inspyred.ec.Bounder(lower_bound=-20, upper_bound=20)
import copy
from math import log
@inspyred.ec.variators.mutator
def double_exponential_mutation(random, candidate, args):
mut_rate = args.setdefault('mutation_rate', 0.1)
mean = args.setdefault('gaussian_mean', 0.0)
stdev = args.setdefault('gaussian_stdev', 1.0)
scale = log(0.5) / - (stdev)
bounder = args['_ec'].bounder
mutant = copy.copy(candidate)
for i, m in enumerate(mutant):
dice = random.random()
if dice < mut_rate:
sign = (dice < mut_rate / 2) * 2 - 1
delta = -log(random.random()) / scale
mutant[i] += delta * sign
mutant = bounder(mutant, args)
return mutant
def minute_gaussian_mutation(random, candidates, args):
args = dict(args)
args['mutation_rate'] = 1
args['gaussian_stdev'] = 1e-6
return inspyred.ec.variators.gaussian_mutation(random, candidates, args)
ea.variator = [inspyred.ec.variators.n_point_crossover, inspyred.ec.variators.gaussian_mutation, minute_gaussian_mutation]
#ea.variator = [inspyred.ec.variators.n_point_crossover, double_exponential_mutation]
ea.replacer = inspyred.ec.replacers.steady_state_replacement
ea.observer = observer
pop_size = 20
final_pop = ea.evolve(pop_size=pop_size,
max_evaluations=nsteps, maximize=True, seeds=seeds,
gaussian_stdev=0.01, #mutation_rate=0.3,
bounder=bounder, generator=generator, evaluator=fitness,
)
best = max(final_pop)
seeds = [c.candidate for c in ea.archive]
print 'final candidate:', best
return {'start': best.candidate, 'value': best.fitness,
'seeds': seeds, 'method': 'DE'} | [
"def",
"de",
"(",
"output_basename",
",",
"parameter_names",
",",
"transform",
",",
"loglikelihood",
",",
"prior",
",",
"nsteps",
"=",
"40000",
",",
"vizfunc",
"=",
"None",
",",
"printfunc",
"=",
"None",
",",
"*",
"*",
"problem",
")",
":",
"import",
"json",
"import",
"inspyred",
"import",
"random",
"prng",
"=",
"random",
".",
"Random",
"(",
")",
"if",
"'seed'",
"in",
"problem",
":",
"prng",
".",
"seed",
"(",
"problem",
"[",
"'seed'",
"]",
")",
"n_params",
"=",
"len",
"(",
"parameter_names",
")",
"seeds",
"=",
"problem",
".",
"get",
"(",
"'seeds'",
",",
"[",
"]",
")",
"if",
"'start'",
"in",
"problem",
":",
"seeds",
".",
"append",
"(",
"problem",
"[",
"'start'",
"]",
")",
"prefix",
"=",
"output_basename",
"def",
"viz",
"(",
"candidate",
",",
"args",
")",
":",
"if",
"vizfunc",
"is",
"not",
"None",
":",
"vizfunc",
"(",
"candidate",
")",
"def",
"print_candidate",
"(",
"candidate",
",",
"l",
",",
"args",
")",
":",
"if",
"printfunc",
"is",
"not",
"None",
":",
"printfunc",
"(",
"cube",
"=",
"candidate",
",",
"loglikelihood",
"=",
"l",
")",
"else",
":",
"print",
"l",
",",
"candidate",
"def",
"eval_candidate",
"(",
"candidate",
")",
":",
"params",
"=",
"transform",
"(",
"candidate",
")",
"l",
"=",
"loglikelihood",
"(",
"params",
")",
"p",
"=",
"prior",
"(",
"params",
")",
"if",
"numpy",
".",
"isinf",
"(",
"p",
")",
"and",
"p",
"<",
"0",
":",
"print",
"' prior rejection'",
"return",
"-",
"1e300",
"if",
"numpy",
".",
"isnan",
"(",
"l",
")",
":",
"return",
"-",
"1e300",
"return",
"l",
",",
"p",
"@",
"inspyred",
".",
"ec",
".",
"utilities",
".",
"memoize",
"@",
"inspyred",
".",
"ec",
".",
"evaluators",
".",
"evaluator",
"def",
"fitness",
"(",
"candidate",
",",
"args",
")",
":",
"l",
",",
"p",
"=",
"eval_candidate",
"(",
"candidate",
")",
"#print_candidate(candidate, (l + p), args)",
"return",
"(",
"l",
"+",
"p",
")",
"cutoff_store",
"=",
"10",
"def",
"solution_archiver",
"(",
"random",
",",
"population",
",",
"archive",
",",
"args",
")",
":",
"psize",
"=",
"len",
"(",
"population",
")",
"population",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"best",
"=",
"population",
"[",
"0",
"]",
".",
"fitness",
"#print 'BEST: ', best, ",
"all_candidates",
"=",
"sorted",
"(",
"population",
"+",
"archive",
",",
"reverse",
"=",
"True",
")",
"all_fitness",
"=",
"numpy",
".",
"array",
"(",
"[",
"c",
".",
"fitness",
"for",
"c",
"in",
"all_candidates",
"]",
")",
"mask",
"=",
"best",
"-",
"all_fitness",
">",
"cutoff_store",
"/",
"3",
"if",
"mask",
".",
"sum",
"(",
")",
"<",
"20",
":",
"mask",
"=",
"best",
"-",
"all_fitness",
">",
"cutoff_store",
"newarchive",
"=",
"[",
"c",
"for",
"i",
",",
"c",
"in",
"enumerate",
"(",
"all_candidates",
")",
"if",
"i",
"==",
"0",
"or",
"all_fitness",
"[",
"i",
"-",
"1",
"]",
"!=",
"c",
".",
"fitness",
"]",
"print",
"'ARCHIVE: '",
",",
"len",
"(",
"archive",
")",
",",
"len",
"(",
"newarchive",
")",
"json",
".",
"dump",
"(",
"[",
"{",
"'candidate'",
":",
"[",
"float",
"(",
"f",
")",
"for",
"f",
"in",
"c",
".",
"candidate",
"]",
",",
"'fitness'",
":",
"c",
".",
"fitness",
"}",
"for",
"c",
"in",
"newarchive",
"]",
",",
"open",
"(",
"prefix",
"+",
"'_values.json'",
",",
"'w'",
")",
",",
"indent",
"=",
"4",
")",
"return",
"newarchive",
"def",
"observer",
"(",
"population",
",",
"num_generations",
",",
"num_evaluations",
",",
"args",
")",
":",
"population",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"candidate",
"=",
"population",
"[",
"0",
"]",
"print",
"(",
"'{0} evaluations'",
".",
"format",
"(",
"num_evaluations",
")",
")",
",",
"' best:'",
",",
"print_candidate",
"(",
"candidate",
".",
"candidate",
",",
"candidate",
".",
"fitness",
",",
"args",
")",
"if",
"num_evaluations",
"%",
"len",
"(",
"population",
")",
"==",
"0",
"or",
"num_evaluations",
"<",
"len",
"(",
"population",
")",
"or",
"args",
".",
"get",
"(",
"'force_viz'",
",",
"False",
")",
":",
"# for each turnaround of a full generation",
"viz",
"(",
"candidate",
".",
"candidate",
",",
"args",
")",
"def",
"generator",
"(",
"random",
",",
"args",
")",
":",
"u",
"=",
"[",
"random",
".",
"uniform",
"(",
"0",
",",
"1",
")",
"for",
"_",
"in",
"range",
"(",
"n_params",
")",
"]",
"u",
"=",
"[",
"random",
".",
"gauss",
"(",
"0.5",
",",
"0.1",
")",
"for",
"_",
"in",
"range",
"(",
"n_params",
")",
"]",
"return",
"bounder",
"(",
"u",
",",
"args",
")",
"ea",
"=",
"inspyred",
".",
"ec",
".",
"DEA",
"(",
"prng",
")",
"ea",
".",
"terminator",
"=",
"inspyred",
".",
"ec",
".",
"terminators",
".",
"evaluation_termination",
"ea",
".",
"archiver",
"=",
"solution_archiver",
"bounder",
"=",
"inspyred",
".",
"ec",
".",
"Bounder",
"(",
"lower_bound",
"=",
"1e-10",
",",
"upper_bound",
"=",
"1",
"-",
"1e-10",
")",
"#bounder = inspyred.ec.Bounder(lower_bound=-20, upper_bound=20)",
"import",
"copy",
"from",
"math",
"import",
"log",
"@",
"inspyred",
".",
"ec",
".",
"variators",
".",
"mutator",
"def",
"double_exponential_mutation",
"(",
"random",
",",
"candidate",
",",
"args",
")",
":",
"mut_rate",
"=",
"args",
".",
"setdefault",
"(",
"'mutation_rate'",
",",
"0.1",
")",
"mean",
"=",
"args",
".",
"setdefault",
"(",
"'gaussian_mean'",
",",
"0.0",
")",
"stdev",
"=",
"args",
".",
"setdefault",
"(",
"'gaussian_stdev'",
",",
"1.0",
")",
"scale",
"=",
"log",
"(",
"0.5",
")",
"/",
"-",
"(",
"stdev",
")",
"bounder",
"=",
"args",
"[",
"'_ec'",
"]",
".",
"bounder",
"mutant",
"=",
"copy",
".",
"copy",
"(",
"candidate",
")",
"for",
"i",
",",
"m",
"in",
"enumerate",
"(",
"mutant",
")",
":",
"dice",
"=",
"random",
".",
"random",
"(",
")",
"if",
"dice",
"<",
"mut_rate",
":",
"sign",
"=",
"(",
"dice",
"<",
"mut_rate",
"/",
"2",
")",
"*",
"2",
"-",
"1",
"delta",
"=",
"-",
"log",
"(",
"random",
".",
"random",
"(",
")",
")",
"/",
"scale",
"mutant",
"[",
"i",
"]",
"+=",
"delta",
"*",
"sign",
"mutant",
"=",
"bounder",
"(",
"mutant",
",",
"args",
")",
"return",
"mutant",
"def",
"minute_gaussian_mutation",
"(",
"random",
",",
"candidates",
",",
"args",
")",
":",
"args",
"=",
"dict",
"(",
"args",
")",
"args",
"[",
"'mutation_rate'",
"]",
"=",
"1",
"args",
"[",
"'gaussian_stdev'",
"]",
"=",
"1e-6",
"return",
"inspyred",
".",
"ec",
".",
"variators",
".",
"gaussian_mutation",
"(",
"random",
",",
"candidates",
",",
"args",
")",
"ea",
".",
"variator",
"=",
"[",
"inspyred",
".",
"ec",
".",
"variators",
".",
"n_point_crossover",
",",
"inspyred",
".",
"ec",
".",
"variators",
".",
"gaussian_mutation",
",",
"minute_gaussian_mutation",
"]",
"#ea.variator = [inspyred.ec.variators.n_point_crossover, double_exponential_mutation]",
"ea",
".",
"replacer",
"=",
"inspyred",
".",
"ec",
".",
"replacers",
".",
"steady_state_replacement",
"ea",
".",
"observer",
"=",
"observer",
"pop_size",
"=",
"20",
"final_pop",
"=",
"ea",
".",
"evolve",
"(",
"pop_size",
"=",
"pop_size",
",",
"max_evaluations",
"=",
"nsteps",
",",
"maximize",
"=",
"True",
",",
"seeds",
"=",
"seeds",
",",
"gaussian_stdev",
"=",
"0.01",
",",
"#mutation_rate=0.3,",
"bounder",
"=",
"bounder",
",",
"generator",
"=",
"generator",
",",
"evaluator",
"=",
"fitness",
",",
")",
"best",
"=",
"max",
"(",
"final_pop",
")",
"seeds",
"=",
"[",
"c",
".",
"candidate",
"for",
"c",
"in",
"ea",
".",
"archive",
"]",
"print",
"'final candidate:'",
",",
"best",
"return",
"{",
"'start'",
":",
"best",
".",
"candidate",
",",
"'value'",
":",
"best",
".",
"fitness",
",",
"'seeds'",
":",
"seeds",
",",
"'method'",
":",
"'DE'",
"}"
] | **Differential evolution**
via `inspyred <http://inspyred.github.io/>`_
specially tuned. steady state replacement, n-point crossover,
pop size 20, gaussian mutation noise 0.01 & 1e-6.
stores intermediate results (can be used for resume, see seeds)
:param start: start point
:param seeds: list of start points
:param vizfunc: callback to do visualization of current best solution
:param printfunc: callback to summarize current best solution
:param seed: RNG initialization (if set) | [
"**",
"Differential",
"evolution",
"**",
"via",
"inspyred",
"<http",
":",
"//",
"inspyred",
".",
"github",
".",
"io",
"/",
">",
"_",
"specially",
"tuned",
".",
"steady",
"state",
"replacement",
"n",
"-",
"point",
"crossover",
"pop",
"size",
"20",
"gaussian",
"mutation",
"noise",
"0",
".",
"01",
"&",
"1e",
"-",
"6",
".",
"stores",
"intermediate",
"results",
"(",
"can",
"be",
"used",
"for",
"resume",
"see",
"seeds",
")",
":",
"param",
"start",
":",
"start",
"point",
":",
"param",
"seeds",
":",
"list",
"of",
"start",
"points",
":",
"param",
"vizfunc",
":",
"callback",
"to",
"do",
"visualization",
"of",
"current",
"best",
"solution",
":",
"param",
"printfunc",
":",
"callback",
"to",
"summarize",
"current",
"best",
"solution",
":",
"param",
"seed",
":",
"RNG",
"initialization",
"(",
"if",
"set",
")"
] | 11b721ea001625ad7820f71ff684723c71216646 | https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/de.py#L6-L141 | valid |