nwo
stringlengths 5
58
| sha
stringlengths 40
40
| path
stringlengths 5
172
| language
stringclasses 1
value | identifier
stringlengths 1
100
| parameters
stringlengths 2
3.5k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
21.5k
| docstring
stringlengths 2
17k
| docstring_summary
stringlengths 0
6.58k
| docstring_tokens
sequence | function
stringlengths 35
55.6k
| function_tokens
sequence | url
stringlengths 89
269
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/bootstrap/erp5_core/DocumentTemplateItem/portal_components/document.erp5.BusinessProcess.py | python | BusinessProcess.getLatestCompletedTradeStateList | (self, explanation) | return result | Returns the list of completed trade states which predecessor
states are completed and for which no successor state
is completed in the context of given explanation.
explanation -- an Order, Order Line, Delivery or Delivery Line or
Applied Rule which implicitely defines a simulation subtree | Returns the list of completed trade states which predecessor
states are completed and for which no successor state
is completed in the context of given explanation. | [
"Returns",
"the",
"list",
"of",
"completed",
"trade",
"states",
"which",
"predecessor",
"states",
"are",
"completed",
"and",
"for",
"which",
"no",
"successor",
"state",
"is",
"completed",
"in",
"the",
"context",
"of",
"given",
"explanation",
"."
] | def getLatestCompletedTradeStateList(self, explanation):
"""Returns the list of completed trade states which predecessor
states are completed and for which no successor state
is completed in the context of given explanation.
explanation -- an Order, Order Line, Delivery or Delivery Line or
Applied Rule which implicitely defines a simulation subtree
"""
result = set()
for state in self.getCompletedTradeStateList(explanation):
for business_link in state.getPredecessorRelatedValueList():
if not self.isBusinessLinkCompleted(explanation, business_link):
result.add(state)
return result | [
"def",
"getLatestCompletedTradeStateList",
"(",
"self",
",",
"explanation",
")",
":",
"result",
"=",
"set",
"(",
")",
"for",
"state",
"in",
"self",
".",
"getCompletedTradeStateList",
"(",
"explanation",
")",
":",
"for",
"business_link",
"in",
"state",
".",
"getPredecessorRelatedValueList",
"(",
")",
":",
"if",
"not",
"self",
".",
"isBusinessLinkCompleted",
"(",
"explanation",
",",
"business_link",
")",
":",
"result",
".",
"add",
"(",
"state",
")",
"return",
"result"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/bootstrap/erp5_core/DocumentTemplateItem/portal_components/document.erp5.BusinessProcess.py#L478-L491 |
|
robplatek/Massive-Coupon---Open-source-groupon-clone | 1d725356eeb41c6328605cef6e8779ab472db2b4 | facebook/__init__.py | python | Facebook.validate_signature | (self, post, prefix='fb_sig', timeout=None) | Validate parameters passed to an internal Facebook app from Facebook. | Validate parameters passed to an internal Facebook app from Facebook. | [
"Validate",
"parameters",
"passed",
"to",
"an",
"internal",
"Facebook",
"app",
"from",
"Facebook",
"."
] | def validate_signature(self, post, prefix='fb_sig', timeout=None):
"""
Validate parameters passed to an internal Facebook app from Facebook.
"""
args = post.copy()
if prefix not in args:
return None
del args[prefix]
if timeout and '%s_time' % prefix in post and time.time() - float(post['%s_time' % prefix]) > timeout:
return None
args = dict([(key[len(prefix + '_'):], value) for key, value in args.items() if key.startswith(prefix)])
hash = self._hash_args(args)
if hash == post[prefix]:
return args
else:
return None | [
"def",
"validate_signature",
"(",
"self",
",",
"post",
",",
"prefix",
"=",
"'fb_sig'",
",",
"timeout",
"=",
"None",
")",
":",
"args",
"=",
"post",
".",
"copy",
"(",
")",
"if",
"prefix",
"not",
"in",
"args",
":",
"return",
"None",
"del",
"args",
"[",
"prefix",
"]",
"if",
"timeout",
"and",
"'%s_time'",
"%",
"prefix",
"in",
"post",
"and",
"time",
".",
"time",
"(",
")",
"-",
"float",
"(",
"post",
"[",
"'%s_time'",
"%",
"prefix",
"]",
")",
">",
"timeout",
":",
"return",
"None",
"args",
"=",
"dict",
"(",
"[",
"(",
"key",
"[",
"len",
"(",
"prefix",
"+",
"'_'",
")",
":",
"]",
",",
"value",
")",
"for",
"key",
",",
"value",
"in",
"args",
".",
"items",
"(",
")",
"if",
"key",
".",
"startswith",
"(",
"prefix",
")",
"]",
")",
"hash",
"=",
"self",
".",
"_hash_args",
"(",
"args",
")",
"if",
"hash",
"==",
"post",
"[",
"prefix",
"]",
":",
"return",
"args",
"else",
":",
"return",
"None"
] | https://github.com/robplatek/Massive-Coupon---Open-source-groupon-clone/blob/1d725356eeb41c6328605cef6e8779ab472db2b4/facebook/__init__.py#L1337-L1359 |
||
opentraveldata/geobases | e9ef3708155cb320684aa710a11d5a228a7d80c0 | GeoBases/GeoBaseModule.py | python | GeoBase.syncFields | (self, mode='all', sort=True) | Iterate through the collection to look for all available fields.
Then affect the result to ``self.fields``.
If you execute this method, be aware that fields order may
change depending on how dictionaries return their keys.
To have better consistency, we automatically sort the found
fields. You can change this behavior with the ``sort`` parameter.
:param mode: ``'all'`` or ``'any'``, ``'all'`` will look for \
fields shared by all keys, ``'any'`` will look for all \
fields from all keys
:param sort: sort the fields found
:returns: ``None``
>>> from pprint import pprint
>>> pprint(geo_t.fields)
['__key__',
'__dup__',
'__par__',
'__lno__',
'code',
'lines@raw',
'lines',
'name',
'info',
'lat',
'lng',
'__gar__']
Fields synchronisation, common fields for all keys.
>>> geo_t.set('frnic', new_field='Nice Gare SNCF')
>>> geo_t.syncFields(mode='all')
>>> pprint(geo_t.fields) # did not change, except order
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name']
Fields synchronisation, all fields for all keys.
>>> geo_t.syncFields(mode='any')
>>> pprint(geo_t.fields) # notice the new field 'new_field'
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name',
'new_field']
Restore previous state, drop new field and synchronize fields again.
>>> geo_t.delete('frnic', 'new_field')
>>> geo_t.syncFields()
>>> pprint(geo_t.fields)
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name'] | Iterate through the collection to look for all available fields.
Then affect the result to ``self.fields``. | [
"Iterate",
"through",
"the",
"collection",
"to",
"look",
"for",
"all",
"available",
"fields",
".",
"Then",
"affect",
"the",
"result",
"to",
"self",
".",
"fields",
"."
] | def syncFields(self, mode='all', sort=True):
"""
Iterate through the collection to look for all available fields.
Then affect the result to ``self.fields``.
If you execute this method, be aware that fields order may
change depending on how dictionaries return their keys.
To have better consistency, we automatically sort the found
fields. You can change this behavior with the ``sort`` parameter.
:param mode: ``'all'`` or ``'any'``, ``'all'`` will look for \
fields shared by all keys, ``'any'`` will look for all \
fields from all keys
:param sort: sort the fields found
:returns: ``None``
>>> from pprint import pprint
>>> pprint(geo_t.fields)
['__key__',
'__dup__',
'__par__',
'__lno__',
'code',
'lines@raw',
'lines',
'name',
'info',
'lat',
'lng',
'__gar__']
Fields synchronisation, common fields for all keys.
>>> geo_t.set('frnic', new_field='Nice Gare SNCF')
>>> geo_t.syncFields(mode='all')
>>> pprint(geo_t.fields) # did not change, except order
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name']
Fields synchronisation, all fields for all keys.
>>> geo_t.syncFields(mode='any')
>>> pprint(geo_t.fields) # notice the new field 'new_field'
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name',
'new_field']
Restore previous state, drop new field and synchronize fields again.
>>> geo_t.delete('frnic', 'new_field')
>>> geo_t.syncFields()
>>> pprint(geo_t.fields)
['__dup__',
'__gar__',
'__key__',
'__lno__',
'__par__',
'code',
'info',
'lat',
'lines',
'lines@raw',
'lng',
'name']
"""
if mode not in ('all', 'any'):
raise ValueError('mode shoud be in %s, was "%s".' % \
(str(('all', 'any')), mode))
if mode == 'any':
found = set()
for key in self:
found = found | set(self.get(key).keys())
else:
# Fetching first
for key in self:
found = set(self.get(key).keys())
break
else:
found = set()
for key in self:
found = found & set(self.get(key).keys())
if sort:
self.fields = sorted(found)
else:
self.fields = list(found) | [
"def",
"syncFields",
"(",
"self",
",",
"mode",
"=",
"'all'",
",",
"sort",
"=",
"True",
")",
":",
"if",
"mode",
"not",
"in",
"(",
"'all'",
",",
"'any'",
")",
":",
"raise",
"ValueError",
"(",
"'mode shoud be in %s, was \"%s\".'",
"%",
"(",
"str",
"(",
"(",
"'all'",
",",
"'any'",
")",
")",
",",
"mode",
")",
")",
"if",
"mode",
"==",
"'any'",
":",
"found",
"=",
"set",
"(",
")",
"for",
"key",
"in",
"self",
":",
"found",
"=",
"found",
"|",
"set",
"(",
"self",
".",
"get",
"(",
"key",
")",
".",
"keys",
"(",
")",
")",
"else",
":",
"# Fetching first",
"for",
"key",
"in",
"self",
":",
"found",
"=",
"set",
"(",
"self",
".",
"get",
"(",
"key",
")",
".",
"keys",
"(",
")",
")",
"break",
"else",
":",
"found",
"=",
"set",
"(",
")",
"for",
"key",
"in",
"self",
":",
"found",
"=",
"found",
"&",
"set",
"(",
"self",
".",
"get",
"(",
"key",
")",
".",
"keys",
"(",
")",
")",
"if",
"sort",
":",
"self",
".",
"fields",
"=",
"sorted",
"(",
"found",
")",
"else",
":",
"self",
".",
"fields",
"=",
"list",
"(",
"found",
")"
] | https://github.com/opentraveldata/geobases/blob/e9ef3708155cb320684aa710a11d5a228a7d80c0/GeoBases/GeoBaseModule.py#L2414-L2522 |
||
nodejs/http2 | 734ad72e3939e62bcff0f686b8ec426b8aaa22e3 | deps/v8/tools/sanitizers/sancov_formatter.py | python | executables | () | Iterates over executable files in the build directory. | Iterates over executable files in the build directory. | [
"Iterates",
"over",
"executable",
"files",
"in",
"the",
"build",
"directory",
"."
] | def executables():
"""Iterates over executable files in the build directory."""
for f in os.listdir(BUILD_DIR):
file_path = os.path.join(BUILD_DIR, f)
if (os.path.isfile(file_path) and
os.access(file_path, os.X_OK) and
f not in EXE_BLACKLIST):
yield file_path | [
"def",
"executables",
"(",
")",
":",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"BUILD_DIR",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"BUILD_DIR",
",",
"f",
")",
"if",
"(",
"os",
".",
"path",
".",
"isfile",
"(",
"file_path",
")",
"and",
"os",
".",
"access",
"(",
"file_path",
",",
"os",
".",
"X_OK",
")",
"and",
"f",
"not",
"in",
"EXE_BLACKLIST",
")",
":",
"yield",
"file_path"
] | https://github.com/nodejs/http2/blob/734ad72e3939e62bcff0f686b8ec426b8aaa22e3/deps/v8/tools/sanitizers/sancov_formatter.py#L108-L115 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Type/Tool/ComponentTool.py | python | ComponentTool._isBootstrapRequired | (self) | return False | Required by synchronizeDynamicModules() to bootstrap an empty site and
thus create portal_components
XXX-arnau: Only bt5 items for now | Required by synchronizeDynamicModules() to bootstrap an empty site and
thus create portal_components | [
"Required",
"by",
"synchronizeDynamicModules",
"()",
"to",
"bootstrap",
"an",
"empty",
"site",
"and",
"thus",
"create",
"portal_components"
] | def _isBootstrapRequired(self):
"""
Required by synchronizeDynamicModules() to bootstrap an empty site and
thus create portal_components
XXX-arnau: Only bt5 items for now
"""
return False | [
"def",
"_isBootstrapRequired",
"(",
"self",
")",
":",
"return",
"False"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/Tool/ComponentTool.py#L94-L101 |
|
DFIRKuiper/Kuiper | c5b4cb3d535287c360b239b7596e82731954fc77 | kuiper/app/parsers/vol_Parser/volatility/framework/plugins/mac/kevents.py | python | Kevents.list_kernel_events | (cls,
context: interfaces.context.ContextInterface,
layer_name: str,
darwin_symbols: str,
filter_func: Callable[[int], bool] = lambda _: False) | Returns the kernel event filters registered
Return values:
A tuple of 3 elements:
1) The name of the process that registered the filter
2) The process ID of the process that registered the filter
3) The object of the associated kernel event filter | Returns the kernel event filters registered | [
"Returns",
"the",
"kernel",
"event",
"filters",
"registered"
] | def list_kernel_events(cls,
context: interfaces.context.ContextInterface,
layer_name: str,
darwin_symbols: str,
filter_func: Callable[[int], bool] = lambda _: False) -> \
Iterable[Tuple[interfaces.objects.ObjectInterface,
interfaces.objects.ObjectInterface,
interfaces.objects.ObjectInterface]]:
"""
Returns the kernel event filters registered
Return values:
A tuple of 3 elements:
1) The name of the process that registered the filter
2) The process ID of the process that registered the filter
3) The object of the associated kernel event filter
"""
kernel = contexts.Module(context, darwin_symbols, layer_name, 0)
list_tasks = pslist.PsList.get_list_tasks(pslist.PsList.pslist_methods[0])
for task in list_tasks(context, layer_name, darwin_symbols, filter_func):
task_name = utility.array_to_string(task.p_comm)
pid = task.p_pid
for kn in cls._get_task_kevents(kernel, task):
yield task_name, pid, kn | [
"def",
"list_kernel_events",
"(",
"cls",
",",
"context",
":",
"interfaces",
".",
"context",
".",
"ContextInterface",
",",
"layer_name",
":",
"str",
",",
"darwin_symbols",
":",
"str",
",",
"filter_func",
":",
"Callable",
"[",
"[",
"int",
"]",
",",
"bool",
"]",
"=",
"lambda",
"_",
":",
"False",
")",
"->",
"Iterable",
"[",
"Tuple",
"[",
"interfaces",
".",
"objects",
".",
"ObjectInterface",
",",
"interfaces",
".",
"objects",
".",
"ObjectInterface",
",",
"interfaces",
".",
"objects",
".",
"ObjectInterface",
"]",
"]",
":",
"kernel",
"=",
"contexts",
".",
"Module",
"(",
"context",
",",
"darwin_symbols",
",",
"layer_name",
",",
"0",
")",
"list_tasks",
"=",
"pslist",
".",
"PsList",
".",
"get_list_tasks",
"(",
"pslist",
".",
"PsList",
".",
"pslist_methods",
"[",
"0",
"]",
")",
"for",
"task",
"in",
"list_tasks",
"(",
"context",
",",
"layer_name",
",",
"darwin_symbols",
",",
"filter_func",
")",
":",
"task_name",
"=",
"utility",
".",
"array_to_string",
"(",
"task",
".",
"p_comm",
")",
"pid",
"=",
"task",
".",
"p_pid",
"for",
"kn",
"in",
"cls",
".",
"_get_task_kevents",
"(",
"kernel",
",",
"task",
")",
":",
"yield",
"task_name",
",",
"pid",
",",
"kn"
] | https://github.com/DFIRKuiper/Kuiper/blob/c5b4cb3d535287c360b239b7596e82731954fc77/kuiper/app/parsers/vol_Parser/volatility/framework/plugins/mac/kevents.py#L121-L147 |
||
OWASP/SecureTea-Project | ae55082d4a342f10099db4dead23267a517e1a66 | securetea/modes/iot_mode.py | python | IoTMode.start_iot_mode | (self) | Start SecureTea in IoT mode.
Args:
None
Raises:
None
Returns:
None | Start SecureTea in IoT mode. | [
"Start",
"SecureTea",
"in",
"IoT",
"mode",
"."
] | def start_iot_mode(self):
"""
Start SecureTea in IoT mode.
Args:
None
Raises:
None
Returns:
None
"""
# Create / initialize required objects
self.create_objects()
# Create process for the objects
self.create_process()
# Start the process
self.start_process() | [
"def",
"start_iot_mode",
"(",
"self",
")",
":",
"# Create / initialize required objects",
"self",
".",
"create_objects",
"(",
")",
"# Create process for the objects",
"self",
".",
"create_process",
"(",
")",
"# Start the process",
"self",
".",
"start_process",
"(",
")"
] | https://github.com/OWASP/SecureTea-Project/blob/ae55082d4a342f10099db4dead23267a517e1a66/securetea/modes/iot_mode.py#L207-L225 |
||
alexa/alexa-cookbook | bd894e3f3e4d1a20fcb4557bc4c9699118117ce3 | tools/LocalDebugger/python/local_debugger.py | python | _validate_port | (port_number) | return None | Validates the user provided port number.
Verifies port number is within the legal range
- [0, 65535]
:param port_number: Port Number where the socket
connection will be established.
:type port_number: int
:return: None
:raises: ValueError when port is not in legal range [0, 65535] | Validates the user provided port number. | [
"Validates",
"the",
"user",
"provided",
"port",
"number",
"."
] | def _validate_port(port_number):
# type: (int) -> None
"""
Validates the user provided port number.
Verifies port number is within the legal range
- [0, 65535]
:param port_number: Port Number where the socket
connection will be established.
:type port_number: int
:return: None
:raises: ValueError when port is not in legal range [0, 65535]
"""
if(port_number < 0 or port_number > 65535):
raise ValueError('Port out of legal range: {0}. The port number '
'should be in the range [0, 65535]'
.format(port_number))
if(port_number == 0):
print('The TCP server will listen on a port that is free. Check logs '
'to find out what port number is being used')
return None | [
"def",
"_validate_port",
"(",
"port_number",
")",
":",
"# type: (int) -> None",
"if",
"(",
"port_number",
"<",
"0",
"or",
"port_number",
">",
"65535",
")",
":",
"raise",
"ValueError",
"(",
"'Port out of legal range: {0}. The port number '",
"'should be in the range [0, 65535]'",
".",
"format",
"(",
"port_number",
")",
")",
"if",
"(",
"port_number",
"==",
"0",
")",
":",
"print",
"(",
"'The TCP server will listen on a port that is free. Check logs '",
"'to find out what port number is being used'",
")",
"return",
"None"
] | https://github.com/alexa/alexa-cookbook/blob/bd894e3f3e4d1a20fcb4557bc4c9699118117ce3/tools/LocalDebugger/python/local_debugger.py#L46-L67 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/spidershim/spidermonkey/python/mozbuild/mozpack/packager/unpack.py | python | UnpackFinder._unjarize | (self, entry, relpath) | return mozpath.join(base, jar), entry | Transform a manifest entry pointing to chrome data in a jar in one
pointing to the corresponding unpacked path. Return the jar path and
the new entry. | Transform a manifest entry pointing to chrome data in a jar in one
pointing to the corresponding unpacked path. Return the jar path and
the new entry. | [
"Transform",
"a",
"manifest",
"entry",
"pointing",
"to",
"chrome",
"data",
"in",
"a",
"jar",
"in",
"one",
"pointing",
"to",
"the",
"corresponding",
"unpacked",
"path",
".",
"Return",
"the",
"jar",
"path",
"and",
"the",
"new",
"entry",
"."
] | def _unjarize(self, entry, relpath):
'''
Transform a manifest entry pointing to chrome data in a jar in one
pointing to the corresponding unpacked path. Return the jar path and
the new entry.
'''
base = entry.base
jar, relpath = urlparse(relpath).path.split('!', 1)
entry = entry.rebase(mozpath.join(base, 'jar:%s!' % jar)) \
.move(mozpath.join(base, mozpath.splitext(jar)[0])) \
.rebase(base)
return mozpath.join(base, jar), entry | [
"def",
"_unjarize",
"(",
"self",
",",
"entry",
",",
"relpath",
")",
":",
"base",
"=",
"entry",
".",
"base",
"jar",
",",
"relpath",
"=",
"urlparse",
"(",
"relpath",
")",
".",
"path",
".",
"split",
"(",
"'!'",
",",
"1",
")",
"entry",
"=",
"entry",
".",
"rebase",
"(",
"mozpath",
".",
"join",
"(",
"base",
",",
"'jar:%s!'",
"%",
"jar",
")",
")",
".",
"move",
"(",
"mozpath",
".",
"join",
"(",
"base",
",",
"mozpath",
".",
"splitext",
"(",
"jar",
")",
"[",
"0",
"]",
")",
")",
".",
"rebase",
"(",
"base",
")",
"return",
"mozpath",
".",
"join",
"(",
"base",
",",
"jar",
")",
",",
"entry"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/spidershim/spidermonkey/python/mozbuild/mozpack/packager/unpack.py#L165-L176 |
|
wotermelon/toJump | 3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f | lib/mac/systrace/catapult/devil/devil/android/sdk/shared_prefs.py | python | SharedPrefs.SetLong | (self, key, value) | Set a long property. | Set a long property. | [
"Set",
"a",
"long",
"property",
"."
] | def SetLong(self, key, value):
"""Set a long property."""
self._SetPrefValue(key, value, LongPref) | [
"def",
"SetLong",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"_SetPrefValue",
"(",
"key",
",",
"value",
",",
"LongPref",
")"
] | https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/devil/devil/android/sdk/shared_prefs.py#L334-L336 |
||
chris-barry/darkweb-everywhere | be6ce01b26028c8bec99c9d0307f489a6abe31be | deprecated-extension/utils/trivial-validate.py | python | test_unencrypted_to | (tree, filename, from_attrib, to) | return True | Rule redirects to something other than https. | Rule redirects to something other than https. | [
"Rule",
"redirects",
"to",
"something",
"other",
"than",
"https",
"."
] | def test_unencrypted_to(tree, filename, from_attrib, to):
# Rules that redirect to something other than https or http.
# This used to test for http: but testing for lack of https: will
# catch more kinds of mistakes.
# Now warn if the rule author indicates they intended it, with the
# downgrade attribute. Error if this attribute is not present.
"""Rule redirects to something other than https."""
for rule in xpath_rule(tree):
to, downgrade = rule.get("to"), rule.get("downgrade")
if to[:6] != "https:" and to[:5] != "http:":
return False
elif to[:5] == "http:" and downgrade:
if filename in downgrade_allowed_list:
warn("whitelisted downgrade rule in %s redirects to http." % filename)
else:
fail("non-whitelisted downgrade rule in %s redirects to http." % filename)
return False
elif to[:5] == "http:":
fail("non-downgrade rule in %s redirects to http." % filename)
return False
return True | [
"def",
"test_unencrypted_to",
"(",
"tree",
",",
"filename",
",",
"from_attrib",
",",
"to",
")",
":",
"# Rules that redirect to something other than https or http.",
"# This used to test for http: but testing for lack of https: will",
"# catch more kinds of mistakes.",
"# Now warn if the rule author indicates they intended it, with the",
"# downgrade attribute. Error if this attribute is not present.",
"for",
"rule",
"in",
"xpath_rule",
"(",
"tree",
")",
":",
"to",
",",
"downgrade",
"=",
"rule",
".",
"get",
"(",
"\"to\"",
")",
",",
"rule",
".",
"get",
"(",
"\"downgrade\"",
")",
"if",
"to",
"[",
":",
"6",
"]",
"!=",
"\"https:\"",
"and",
"to",
"[",
":",
"5",
"]",
"!=",
"\"http:\"",
":",
"return",
"False",
"elif",
"to",
"[",
":",
"5",
"]",
"==",
"\"http:\"",
"and",
"downgrade",
":",
"if",
"filename",
"in",
"downgrade_allowed_list",
":",
"warn",
"(",
"\"whitelisted downgrade rule in %s redirects to http.\"",
"%",
"filename",
")",
"else",
":",
"fail",
"(",
"\"non-whitelisted downgrade rule in %s redirects to http.\"",
"%",
"filename",
")",
"return",
"False",
"elif",
"to",
"[",
":",
"5",
"]",
"==",
"\"http:\"",
":",
"fail",
"(",
"\"non-downgrade rule in %s redirects to http.\"",
"%",
"filename",
")",
"return",
"False",
"return",
"True"
] | https://github.com/chris-barry/darkweb-everywhere/blob/be6ce01b26028c8bec99c9d0307f489a6abe31be/deprecated-extension/utils/trivial-validate.py#L87-L107 |
|
xixiaoyao/CS224n-winter-together | f1fbcd4db284a804cb9dfc24b65481ba66e7d32c | Assignments/assignment4/geekhch/vocab.py | python | VocabEntry.__init__ | (self, word2id=None) | Init VocabEntry Instance.
@param word2id (dict): dictionary mapping words 2 indices | Init VocabEntry Instance. | [
"Init",
"VocabEntry",
"Instance",
"."
] | def __init__(self, word2id=None):
""" Init VocabEntry Instance.
@param word2id (dict): dictionary mapping words 2 indices
"""
if word2id:
self.word2id = word2id
else:
self.word2id = dict()
self.word2id['<pad>'] = 0 # Pad Token
self.word2id['<s>'] = 1 # Start Token
self.word2id['</s>'] = 2 # End Token
self.word2id['<unk>'] = 3 # Unknown Token
self.unk_id = self.word2id['<unk>']
self.id2word = {v: k for k, v in self.word2id.items()} | [
"def",
"__init__",
"(",
"self",
",",
"word2id",
"=",
"None",
")",
":",
"if",
"word2id",
":",
"self",
".",
"word2id",
"=",
"word2id",
"else",
":",
"self",
".",
"word2id",
"=",
"dict",
"(",
")",
"self",
".",
"word2id",
"[",
"'<pad>'",
"]",
"=",
"0",
"# Pad Token",
"self",
".",
"word2id",
"[",
"'<s>'",
"]",
"=",
"1",
"# Start Token",
"self",
".",
"word2id",
"[",
"'</s>'",
"]",
"=",
"2",
"# End Token",
"self",
".",
"word2id",
"[",
"'<unk>'",
"]",
"=",
"3",
"# Unknown Token",
"self",
".",
"unk_id",
"=",
"self",
".",
"word2id",
"[",
"'<unk>'",
"]",
"self",
".",
"id2word",
"=",
"{",
"v",
":",
"k",
"for",
"k",
",",
"v",
"in",
"self",
".",
"word2id",
".",
"items",
"(",
")",
"}"
] | https://github.com/xixiaoyao/CS224n-winter-together/blob/f1fbcd4db284a804cb9dfc24b65481ba66e7d32c/Assignments/assignment4/geekhch/vocab.py#L35-L48 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ZSQLCatalog/interfaces/search_key.py | python | ISearchKey.buildQuery | (search_value, group=None, logical_operator=None, comparison_operator=None) | Generate queries from given search_value.
logical_operator ('and', 'or', 'not', None)
If no logical operator can be found in search_value, it will use given
default operator.
comparison_operator (string, None)
If given, expresses the comparison between column and value. | Generate queries from given search_value.
logical_operator ('and', 'or', 'not', None)
If no logical operator can be found in search_value, it will use given
default operator.
comparison_operator (string, None)
If given, expresses the comparison between column and value. | [
"Generate",
"queries",
"from",
"given",
"search_value",
".",
"logical_operator",
"(",
"and",
"or",
"not",
"None",
")",
"If",
"no",
"logical",
"operator",
"can",
"be",
"found",
"in",
"search_value",
"it",
"will",
"use",
"given",
"default",
"operator",
".",
"comparison_operator",
"(",
"string",
"None",
")",
"If",
"given",
"expresses",
"the",
"comparison",
"between",
"column",
"and",
"value",
"."
] | def buildQuery(search_value, group=None, logical_operator=None, comparison_operator=None):
"""
Generate queries from given search_value.
logical_operator ('and', 'or', 'not', None)
If no logical operator can be found in search_value, it will use given
default operator.
comparison_operator (string, None)
If given, expresses the comparison between column and value.
""" | [
"def",
"buildQuery",
"(",
"search_value",
",",
"group",
"=",
"None",
",",
"logical_operator",
"=",
"None",
",",
"comparison_operator",
"=",
"None",
")",
":"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ZSQLCatalog/interfaces/search_key.py#L102-L110 |
||
loatheb/leetcode-javascript | 7b4e65cb24b8b3e2812b6d4d3700d5719d87d053 | 016-3sum-closest/3sum-closest.py | python | Solution.threeSumClosest | (self, nums, target) | return result | :type nums: List[int]
:type target: int
:rtype: int | :type nums: List[int]
:type target: int
:rtype: int | [
":",
"type",
"nums",
":",
"List",
"[",
"int",
"]",
":",
"type",
"target",
":",
"int",
":",
"rtype",
":",
"int"
] | def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
nums.sort()
result = sum(nums[:3])
for i in range(len(nums) - 2):
j, k = i+1, len(nums) - 1
while j < k:
s = nums[i] + nums[j] + nums[k]
if s == target:
return s
if abs(s - target) < abs(result - target):
result = s
if s < target:
j += 1
elif s > target:
k -= 1
return result | [
"def",
"threeSumClosest",
"(",
"self",
",",
"nums",
",",
"target",
")",
":",
"nums",
".",
"sort",
"(",
")",
"result",
"=",
"sum",
"(",
"nums",
"[",
":",
"3",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"nums",
")",
"-",
"2",
")",
":",
"j",
",",
"k",
"=",
"i",
"+",
"1",
",",
"len",
"(",
"nums",
")",
"-",
"1",
"while",
"j",
"<",
"k",
":",
"s",
"=",
"nums",
"[",
"i",
"]",
"+",
"nums",
"[",
"j",
"]",
"+",
"nums",
"[",
"k",
"]",
"if",
"s",
"==",
"target",
":",
"return",
"s",
"if",
"abs",
"(",
"s",
"-",
"target",
")",
"<",
"abs",
"(",
"result",
"-",
"target",
")",
":",
"result",
"=",
"s",
"if",
"s",
"<",
"target",
":",
"j",
"+=",
"1",
"elif",
"s",
">",
"target",
":",
"k",
"-=",
"1",
"return",
"result"
] | https://github.com/loatheb/leetcode-javascript/blob/7b4e65cb24b8b3e2812b6d4d3700d5719d87d053/016-3sum-closest/3sum-closest.py#L13-L35 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py | python | DebugProperty.deleter | (self, fdel) | return self | Overriding deleter decorator for the property | Overriding deleter decorator for the property | [
"Overriding",
"deleter",
"decorator",
"for",
"the",
"property"
] | def deleter(self, fdel):
"""Overriding deleter decorator for the property
"""
self.fdel = fdel
return self | [
"def",
"deleter",
"(",
"self",
",",
"fdel",
")",
":",
"self",
".",
"fdel",
"=",
"fdel",
"return",
"self"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py#L104-L108 |
|
prometheus-ar/vot.ar | 72d8fa1ea08fe417b64340b98dff68df8364afdf | msa/voto/controllers/voto.py | python | ControllerVoto._get_partidos | (self) | return partidos | Devuelve las partidos. | Devuelve las partidos. | [
"Devuelve",
"las",
"partidos",
"."
] | def _get_partidos(self):
"""Devuelve las partidos."""
partidos = [agr.full_dict(_image_name) for agr in self.agrupador.all()
if not agr.es_blanco()]
if MEZCLAR_INTERNAS:
shuffle(partidos)
return partidos | [
"def",
"_get_partidos",
"(",
"self",
")",
":",
"partidos",
"=",
"[",
"agr",
".",
"full_dict",
"(",
"_image_name",
")",
"for",
"agr",
"in",
"self",
".",
"agrupador",
".",
"all",
"(",
")",
"if",
"not",
"agr",
".",
"es_blanco",
"(",
")",
"]",
"if",
"MEZCLAR_INTERNAS",
":",
"shuffle",
"(",
"partidos",
")",
"return",
"partidos"
] | https://github.com/prometheus-ar/vot.ar/blob/72d8fa1ea08fe417b64340b98dff68df8364afdf/msa/voto/controllers/voto.py#L548-L555 |
|
square/connect-api-examples | 792666376afbdba2752ccd0bb9caa8f1cd12f69b | connect-examples/oauth/python-aws-chalice/chalicelib/oauthDB.py | python | remove_oauth_record | (merchant_id) | Remove the OAuth record for the specified merchant.
This method should be called after the OAuth tokens of the merchant is revoked.
Parameters
----------
merchant_id : str
The id of the merchant whose OAuth tokens are to be removed. | Remove the OAuth record for the specified merchant. | [
"Remove",
"the",
"OAuth",
"record",
"for",
"the",
"specified",
"merchant",
"."
] | def remove_oauth_record(merchant_id):
'''Remove the OAuth record for the specified merchant.
This method should be called after the OAuth tokens of the merchant is revoked.
Parameters
----------
merchant_id : str
The id of the merchant whose OAuth tokens are to be removed.
'''
delete_item_response = dynamodb_client.delete_item(
TableName = oauth_table_name,
Key = { 'MerchantId': { 'S': merchant_id } },
)
pass | [
"def",
"remove_oauth_record",
"(",
"merchant_id",
")",
":",
"delete_item_response",
"=",
"dynamodb_client",
".",
"delete_item",
"(",
"TableName",
"=",
"oauth_table_name",
",",
"Key",
"=",
"{",
"'MerchantId'",
":",
"{",
"'S'",
":",
"merchant_id",
"}",
"}",
",",
")",
"pass"
] | https://github.com/square/connect-api-examples/blob/792666376afbdba2752ccd0bb9caa8f1cd12f69b/connect-examples/oauth/python-aws-chalice/chalicelib/oauthDB.py#L162-L176 |
||
pinterest/pinball | c54a206cf6e3dbadb056c189f741d75828c02f98 | pinball/ui/data_builder.py | python | DataBuilder.get_token_paths | (self, path) | return result | Get token paths data from the store.
Args:
path: The path is the name prefix of the parent whose direct
children should be returned.
Returns:
List of direct path descendants of the parent. | Get token paths data from the store. | [
"Get",
"token",
"paths",
"data",
"from",
"the",
"store",
"."
] | def get_token_paths(self, path):
"""Get token paths data from the store.
Args:
path: The path is the name prefix of the parent whose direct
children should be returned.
Returns:
List of direct path descendants of the parent.
"""
if not path.startswith(Name.DELIMITER):
raise PinballException('incorrectly formatted path %s' % path)
# TODO(pawel): this is a bit inefficient as it may load names of quite
# a few tokens into the memory.
token_names = self._store.read_token_names(name_prefix=path)
counts = collections.defaultdict(int)
path_len = len(path)
for token_name in token_names:
index = token_name.find(Name.DELIMITER, path_len)
if index == -1:
index = len(token_name)
else:
index += 1
group = token_name[:index]
counts[group] += 1
result = []
for path, count in counts.items():
result.append(TokenPathData(path, count))
return result | [
"def",
"get_token_paths",
"(",
"self",
",",
"path",
")",
":",
"if",
"not",
"path",
".",
"startswith",
"(",
"Name",
".",
"DELIMITER",
")",
":",
"raise",
"PinballException",
"(",
"'incorrectly formatted path %s'",
"%",
"path",
")",
"# TODO(pawel): this is a bit inefficient as it may load names of quite",
"# a few tokens into the memory.",
"token_names",
"=",
"self",
".",
"_store",
".",
"read_token_names",
"(",
"name_prefix",
"=",
"path",
")",
"counts",
"=",
"collections",
".",
"defaultdict",
"(",
"int",
")",
"path_len",
"=",
"len",
"(",
"path",
")",
"for",
"token_name",
"in",
"token_names",
":",
"index",
"=",
"token_name",
".",
"find",
"(",
"Name",
".",
"DELIMITER",
",",
"path_len",
")",
"if",
"index",
"==",
"-",
"1",
":",
"index",
"=",
"len",
"(",
"token_name",
")",
"else",
":",
"index",
"+=",
"1",
"group",
"=",
"token_name",
"[",
":",
"index",
"]",
"counts",
"[",
"group",
"]",
"+=",
"1",
"result",
"=",
"[",
"]",
"for",
"path",
",",
"count",
"in",
"counts",
".",
"items",
"(",
")",
":",
"result",
".",
"append",
"(",
"TokenPathData",
"(",
"path",
",",
"count",
")",
")",
"return",
"result"
] | https://github.com/pinterest/pinball/blob/c54a206cf6e3dbadb056c189f741d75828c02f98/pinball/ui/data_builder.py#L914-L941 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/unclosured/lib/python2.7/telnetlib.py | python | Telnet.read_eager | (self) | return self.read_very_lazy() | Read readily available data.
Raise EOFError if connection closed and no cooked data
available. Return '' if no cooked data available otherwise.
Don't block unless in the midst of an IAC sequence. | Read readily available data. | [
"Read",
"readily",
"available",
"data",
"."
] | def read_eager(self):
"""Read readily available data.
Raise EOFError if connection closed and no cooked data
available. Return '' if no cooked data available otherwise.
Don't block unless in the midst of an IAC sequence.
"""
self.process_rawq()
while not self.cookedq and not self.eof and self.sock_avail():
self.fill_rawq()
self.process_rawq()
return self.read_very_lazy() | [
"def",
"read_eager",
"(",
"self",
")",
":",
"self",
".",
"process_rawq",
"(",
")",
"while",
"not",
"self",
".",
"cookedq",
"and",
"not",
"self",
".",
"eof",
"and",
"self",
".",
"sock_avail",
"(",
")",
":",
"self",
".",
"fill_rawq",
"(",
")",
"self",
".",
"process_rawq",
"(",
")",
"return",
"self",
".",
"read_very_lazy",
"(",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/telnetlib.py#L360-L372 |
|
cBioPortal/cbioportal | e7a279de809e2fd3af02afeea2317135ec81ffcf | core/src/main/scripts/importer/validateData.py | python | GisticGenesValidator.__init__ | (self, *args, **kwargs) | Initialize a GisticGenesValidator with the given parameters. | Initialize a GisticGenesValidator with the given parameters. | [
"Initialize",
"a",
"GisticGenesValidator",
"with",
"the",
"given",
"parameters",
"."
] | def __init__(self, *args, **kwargs):
"""Initialize a GisticGenesValidator with the given parameters."""
super(GisticGenesValidator, self).__init__(*args, **kwargs)
# checkLine() expects particular values here, for the 'amp' column
if not self.meta_dict['reference_genome_id'].startswith('hg'):
if not self.meta_dict['reference_genome_id'].startswith('mm'):
raise RuntimeError(
"GisticGenesValidator requires the metadata field "
"reference_genome_id to start with 'hg' or 'mm'")
if self.meta_dict['genetic_alteration_type'] not in (
'GISTIC_GENES_AMP', 'GISTIC_GENES_DEL'):
raise RuntimeError(
"Genetic alteration type '{}' not supported by "
"GisticGenesValidator.".format(
self.meta_dict['genetic_alteration_type']))
self.chromosome_lengths = self.load_chromosome_lengths(
self.meta_dict['reference_genome_id'],
self.logger.logger
) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"GisticGenesValidator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# checkLine() expects particular values here, for the 'amp' column",
"if",
"not",
"self",
".",
"meta_dict",
"[",
"'reference_genome_id'",
"]",
".",
"startswith",
"(",
"'hg'",
")",
":",
"if",
"not",
"self",
".",
"meta_dict",
"[",
"'reference_genome_id'",
"]",
".",
"startswith",
"(",
"'mm'",
")",
":",
"raise",
"RuntimeError",
"(",
"\"GisticGenesValidator requires the metadata field \"",
"\"reference_genome_id to start with 'hg' or 'mm'\"",
")",
"if",
"self",
".",
"meta_dict",
"[",
"'genetic_alteration_type'",
"]",
"not",
"in",
"(",
"'GISTIC_GENES_AMP'",
",",
"'GISTIC_GENES_DEL'",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Genetic alteration type '{}' not supported by \"",
"\"GisticGenesValidator.\"",
".",
"format",
"(",
"self",
".",
"meta_dict",
"[",
"'genetic_alteration_type'",
"]",
")",
")",
"self",
".",
"chromosome_lengths",
"=",
"self",
".",
"load_chromosome_lengths",
"(",
"self",
".",
"meta_dict",
"[",
"'reference_genome_id'",
"]",
",",
"self",
".",
"logger",
".",
"logger",
")"
] | https://github.com/cBioPortal/cbioportal/blob/e7a279de809e2fd3af02afeea2317135ec81ffcf/core/src/main/scripts/importer/validateData.py#L4005-L4023 |
||
liyaopinner/mxonline_resources | 47f7a752349a4d38c18656486d140db42ea7de33 | DjangoUeditor/views.py | python | list_files | (request) | return HttpResponse(json.dumps(return_info), content_type="application/javascript") | 列出文件 | 列出文件 | [
"列出文件"
] | def list_files(request):
"""列出文件"""
if request.method != "GET":
return HttpResponse(json.dumps(u"{'state:'ERROR'}"), content_type="application/javascript")
# 取得动作
action = request.GET.get("action", "listimage")
allowFiles = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerAllowFiles", []),
"listimage": USettings.UEditorUploadSettings.get("imageManagerAllowFiles", [])
}
listSize = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerListSize", ""),
"listimage": USettings.UEditorUploadSettings.get("imageManagerListSize", "")
}
listpath = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerListPath", ""),
"listimage": USettings.UEditorUploadSettings.get("imageManagerListPath", "")
}
# 取得参数
list_size = int(request.GET.get("size", listSize[action]))
list_start = int(request.GET.get("start", 0))
files = []
root_path = os.path.join(USettings.gSettings.MEDIA_ROOT, listpath[action]).replace("\\", "/")
files = get_files(root_path, root_path, allowFiles[action])
if (len(files) == 0):
return_info = {
"state": u"未找到匹配文件!",
"list": [],
"start": list_start,
"total": 0
}
else:
return_info = {
"state": "SUCCESS",
"list": files[list_start:list_start + list_size],
"start": list_start,
"total": len(files)
}
return HttpResponse(json.dumps(return_info), content_type="application/javascript") | [
"def",
"list_files",
"(",
"request",
")",
":",
"if",
"request",
".",
"method",
"!=",
"\"GET\"",
":",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"u\"{'state:'ERROR'}\"",
")",
",",
"content_type",
"=",
"\"application/javascript\"",
")",
"# 取得动作",
"action",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"\"action\"",
",",
"\"listimage\"",
")",
"allowFiles",
"=",
"{",
"\"listfile\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"fileManagerAllowFiles\"",
",",
"[",
"]",
")",
",",
"\"listimage\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"imageManagerAllowFiles\"",
",",
"[",
"]",
")",
"}",
"listSize",
"=",
"{",
"\"listfile\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"fileManagerListSize\"",
",",
"\"\"",
")",
",",
"\"listimage\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"imageManagerListSize\"",
",",
"\"\"",
")",
"}",
"listpath",
"=",
"{",
"\"listfile\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"fileManagerListPath\"",
",",
"\"\"",
")",
",",
"\"listimage\"",
":",
"USettings",
".",
"UEditorUploadSettings",
".",
"get",
"(",
"\"imageManagerListPath\"",
",",
"\"\"",
")",
"}",
"# 取得参数",
"list_size",
"=",
"int",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"\"size\"",
",",
"listSize",
"[",
"action",
"]",
")",
")",
"list_start",
"=",
"int",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"\"start\"",
",",
"0",
")",
")",
"files",
"=",
"[",
"]",
"root_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"USettings",
".",
"gSettings",
".",
"MEDIA_ROOT",
",",
"listpath",
"[",
"action",
"]",
")",
".",
"replace",
"(",
"\"\\\\\"",
",",
"\"/\"",
")",
"files",
"=",
"get_files",
"(",
"root_path",
",",
"root_path",
",",
"allowFiles",
"[",
"action",
"]",
")",
"if",
"(",
"len",
"(",
"files",
")",
"==",
"0",
")",
":",
"return_info",
"=",
"{",
"\"state\"",
":",
"u\"未找到匹配文件!\",",
"",
"\"list\"",
":",
"[",
"]",
",",
"\"start\"",
":",
"list_start",
",",
"\"total\"",
":",
"0",
"}",
"else",
":",
"return_info",
"=",
"{",
"\"state\"",
":",
"\"SUCCESS\"",
",",
"\"list\"",
":",
"files",
"[",
"list_start",
":",
"list_start",
"+",
"list_size",
"]",
",",
"\"start\"",
":",
"list_start",
",",
"\"total\"",
":",
"len",
"(",
"files",
")",
"}",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"return_info",
")",
",",
"content_type",
"=",
"\"application/javascript\"",
")"
] | https://github.com/liyaopinner/mxonline_resources/blob/47f7a752349a4d38c18656486d140db42ea7de33/DjangoUeditor/views.py#L67-L109 |
|
catmaid/CATMAID | 9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf | django/applications/catmaid/control/point.py | python | PointDetail.post | (request:Request, project_id, point_id) | return Response(serializer.data) | Update one particular point.
Requires at least one field to change.
---
parameters:
- name: project_id
description: Project point is part of
type: integer
paramType: path
required: true
- name: point_id
description: ID of point
type: integer
paramType: path
required: true
- name: location_x
description: X coordinate
type: float
paramType: form
required: false
- name: location_y
description: Y coordinate
type: float
paramType: form
required: false
- name: location_z
description: Z coordinate
type: float
paramType: form
required: false
- name: radius
description: Optional radius
type: float
paramType: form
required: false
- name: confidence
description: Optional confidence in [0,5]
type: integer
paramType: form
required: false | Update one particular point. | [
"Update",
"one",
"particular",
"point",
"."
] | def post(request:Request, project_id, point_id) -> Response:
"""Update one particular point.
Requires at least one field to change.
---
parameters:
- name: project_id
description: Project point is part of
type: integer
paramType: path
required: true
- name: point_id
description: ID of point
type: integer
paramType: path
required: true
- name: location_x
description: X coordinate
type: float
paramType: form
required: false
- name: location_y
description: Y coordinate
type: float
paramType: form
required: false
- name: location_z
description: Z coordinate
type: float
paramType: form
required: false
- name: radius
description: Optional radius
type: float
paramType: form
required: false
- name: confidence
description: Optional confidence in [0,5]
type: integer
paramType: form
required: false
"""
can_edit_or_fail(request.user, point_id, 'point')
updated_fields = {}
if request.POST.has('x'):
updated_fields['location_x'] = float(request.POST.get('x'))
if request.POST.has('y'):
updated_fields['location_y'] = float(request.POST.get('y'))
if request.POST.has('z'):
updated_fields['location_z'] = float(request.POST.get('z'))
if request.POST.has('radius'):
updated_fields['radius'] = float(request.POST.get('radius'))
if request.POST.has('confidence'):
confidence = max(min(int(request.POST.get('confidence')), 5), 0)
updated_fields['confidence'] = confidence
if not updated_fields:
raise ValueError('No field to modify provided')
point = get_object_or_404(Point, pk=point_id, project_id=project_id)
point.update(**updated_fields)
point.save()
serializer = PointSerializer(point)
return Response(serializer.data) | [
"def",
"post",
"(",
"request",
":",
"Request",
",",
"project_id",
",",
"point_id",
")",
"->",
"Response",
":",
"can_edit_or_fail",
"(",
"request",
".",
"user",
",",
"point_id",
",",
"'point'",
")",
"updated_fields",
"=",
"{",
"}",
"if",
"request",
".",
"POST",
".",
"has",
"(",
"'x'",
")",
":",
"updated_fields",
"[",
"'location_x'",
"]",
"=",
"float",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'x'",
")",
")",
"if",
"request",
".",
"POST",
".",
"has",
"(",
"'y'",
")",
":",
"updated_fields",
"[",
"'location_y'",
"]",
"=",
"float",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'y'",
")",
")",
"if",
"request",
".",
"POST",
".",
"has",
"(",
"'z'",
")",
":",
"updated_fields",
"[",
"'location_z'",
"]",
"=",
"float",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'z'",
")",
")",
"if",
"request",
".",
"POST",
".",
"has",
"(",
"'radius'",
")",
":",
"updated_fields",
"[",
"'radius'",
"]",
"=",
"float",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'radius'",
")",
")",
"if",
"request",
".",
"POST",
".",
"has",
"(",
"'confidence'",
")",
":",
"confidence",
"=",
"max",
"(",
"min",
"(",
"int",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'confidence'",
")",
")",
",",
"5",
")",
",",
"0",
")",
"updated_fields",
"[",
"'confidence'",
"]",
"=",
"confidence",
"if",
"not",
"updated_fields",
":",
"raise",
"ValueError",
"(",
"'No field to modify provided'",
")",
"point",
"=",
"get_object_or_404",
"(",
"Point",
",",
"pk",
"=",
"point_id",
",",
"project_id",
"=",
"project_id",
")",
"point",
".",
"update",
"(",
"*",
"*",
"updated_fields",
")",
"point",
".",
"save",
"(",
")",
"serializer",
"=",
"PointSerializer",
"(",
"point",
")",
"return",
"Response",
"(",
"serializer",
".",
"data",
")"
] | https://github.com/catmaid/CATMAID/blob/9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf/django/applications/catmaid/control/point.py#L105-L170 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/crm/models/crm_lead.py | python | Lead._prepare_customer_values | (self, partner_name, is_company=False, parent_id=False) | return res | Extract data from lead to create a partner.
:param name : furtur name of the partner
:param is_company : True if the partner is a company
:param parent_id : id of the parent partner (False if no parent)
:return: dictionary of values to give at res_partner.create() | Extract data from lead to create a partner. | [
"Extract",
"data",
"from",
"lead",
"to",
"create",
"a",
"partner",
"."
] | def _prepare_customer_values(self, partner_name, is_company=False, parent_id=False):
""" Extract data from lead to create a partner.
:param name : furtur name of the partner
:param is_company : True if the partner is a company
:param parent_id : id of the parent partner (False if no parent)
:return: dictionary of values to give at res_partner.create()
"""
email_parts = tools.email_split(self.email_from)
res = {
'name': partner_name,
'user_id': self.env.context.get('default_user_id') or self.user_id.id,
'comment': self.description,
'team_id': self.team_id.id,
'parent_id': parent_id,
'phone': self.phone,
'mobile': self.mobile,
'email': email_parts[0] if email_parts else False,
'title': self.title.id,
'function': self.function,
'street': self.street,
'street2': self.street2,
'zip': self.zip,
'city': self.city,
'country_id': self.country_id.id,
'state_id': self.state_id.id,
'website': self.website,
'is_company': is_company,
'type': 'contact'
}
if self.lang_id:
res['lang'] = self.lang_id.code
return res | [
"def",
"_prepare_customer_values",
"(",
"self",
",",
"partner_name",
",",
"is_company",
"=",
"False",
",",
"parent_id",
"=",
"False",
")",
":",
"email_parts",
"=",
"tools",
".",
"email_split",
"(",
"self",
".",
"email_from",
")",
"res",
"=",
"{",
"'name'",
":",
"partner_name",
",",
"'user_id'",
":",
"self",
".",
"env",
".",
"context",
".",
"get",
"(",
"'default_user_id'",
")",
"or",
"self",
".",
"user_id",
".",
"id",
",",
"'comment'",
":",
"self",
".",
"description",
",",
"'team_id'",
":",
"self",
".",
"team_id",
".",
"id",
",",
"'parent_id'",
":",
"parent_id",
",",
"'phone'",
":",
"self",
".",
"phone",
",",
"'mobile'",
":",
"self",
".",
"mobile",
",",
"'email'",
":",
"email_parts",
"[",
"0",
"]",
"if",
"email_parts",
"else",
"False",
",",
"'title'",
":",
"self",
".",
"title",
".",
"id",
",",
"'function'",
":",
"self",
".",
"function",
",",
"'street'",
":",
"self",
".",
"street",
",",
"'street2'",
":",
"self",
".",
"street2",
",",
"'zip'",
":",
"self",
".",
"zip",
",",
"'city'",
":",
"self",
".",
"city",
",",
"'country_id'",
":",
"self",
".",
"country_id",
".",
"id",
",",
"'state_id'",
":",
"self",
".",
"state_id",
".",
"id",
",",
"'website'",
":",
"self",
".",
"website",
",",
"'is_company'",
":",
"is_company",
",",
"'type'",
":",
"'contact'",
"}",
"if",
"self",
".",
"lang_id",
":",
"res",
"[",
"'lang'",
"]",
"=",
"self",
".",
"lang_id",
".",
"code",
"return",
"res"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/crm/models/crm_lead.py#L1635-L1668 |
|
jam-py/jam-py | 0821492cdff8665928e0f093a4435aa64285a45c | jam/third_party/sqlalchemy/engine/result.py | python | ResultProxy.fetchmany | (self, size=None) | Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``.
After all rows have been exhausted, the underlying DBAPI
cursor resource is released, and the object may be safely
discarded.
Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
exhausted will return
an empty list. After the :meth:`.ResultProxy.close` method is
called, the method will raise :class:`.ResourceClosedError`.
:return: a list of :class:`.Row` objects | Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``. | [
"Fetch",
"many",
"rows",
"just",
"like",
"DB",
"-",
"API",
"cursor",
".",
"fetchmany",
"(",
"size",
"=",
"cursor",
".",
"arraysize",
")",
"."
] | def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``.
After all rows have been exhausted, the underlying DBAPI
cursor resource is released, and the object may be safely
discarded.
Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
exhausted will return
an empty list. After the :meth:`.ResultProxy.close` method is
called, the method will raise :class:`.ResourceClosedError`.
:return: a list of :class:`.Row` objects
"""
try:
l = self.process_rows(self.cursor_strategy.fetchmany(size))
if len(l) == 0:
self._soft_close()
return l
except BaseException as e:
self.connection._handle_dbapi_exception(
e, None, None, self.cursor, self.context
) | [
"def",
"fetchmany",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"try",
":",
"l",
"=",
"self",
".",
"process_rows",
"(",
"self",
".",
"cursor_strategy",
".",
"fetchmany",
"(",
"size",
")",
")",
"if",
"len",
"(",
"l",
")",
"==",
"0",
":",
"self",
".",
"_soft_close",
"(",
")",
"return",
"l",
"except",
"BaseException",
"as",
"e",
":",
"self",
".",
"connection",
".",
"_handle_dbapi_exception",
"(",
"e",
",",
"None",
",",
"None",
",",
"self",
".",
"cursor",
",",
"self",
".",
"context",
")"
] | https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/engine/result.py#L1611-L1636 |
||
scottrogowski/code2flow | 37e45ca4340289f8ceec79b3fe5131c401387c58 | code2flow/ruby.py | python | make_local_variables | (tree_el, parent) | return variables | Given an ast of all the lines in a function, generate a list of
variables in that function. Variables are tokens and what they link to.
In this case, what it links to is just a string. However, that is resolved
later.
Also return variables for the outer scope parent
:param tree_el ast:
:param parent Group:
:rtype: list[Variable] | Given an ast of all the lines in a function, generate a list of
variables in that function. Variables are tokens and what they link to.
In this case, what it links to is just a string. However, that is resolved
later. | [
"Given",
"an",
"ast",
"of",
"all",
"the",
"lines",
"in",
"a",
"function",
"generate",
"a",
"list",
"of",
"variables",
"in",
"that",
"function",
".",
"Variables",
"are",
"tokens",
"and",
"what",
"they",
"link",
"to",
".",
"In",
"this",
"case",
"what",
"it",
"links",
"to",
"is",
"just",
"a",
"string",
".",
"However",
"that",
"is",
"resolved",
"later",
"."
] | def make_local_variables(tree_el, parent):
"""
Given an ast of all the lines in a function, generate a list of
variables in that function. Variables are tokens and what they link to.
In this case, what it links to is just a string. However, that is resolved
later.
Also return variables for the outer scope parent
:param tree_el ast:
:param parent Group:
:rtype: list[Variable]
"""
variables = []
for el in tree_el:
if el[0] == 'lvasgn':
variables.append(process_assign(el))
# Make a 'self' variable for use anywhere we need it that points to the class
if isinstance(parent, Group) and parent.group_type == GROUP_TYPE.CLASS:
variables.append(Variable('self', parent))
variables = list(filter(None, variables))
return variables | [
"def",
"make_local_variables",
"(",
"tree_el",
",",
"parent",
")",
":",
"variables",
"=",
"[",
"]",
"for",
"el",
"in",
"tree_el",
":",
"if",
"el",
"[",
"0",
"]",
"==",
"'lvasgn'",
":",
"variables",
".",
"append",
"(",
"process_assign",
"(",
"el",
")",
")",
"# Make a 'self' variable for use anywhere we need it that points to the class",
"if",
"isinstance",
"(",
"parent",
",",
"Group",
")",
"and",
"parent",
".",
"group_type",
"==",
"GROUP_TYPE",
".",
"CLASS",
":",
"variables",
".",
"append",
"(",
"Variable",
"(",
"'self'",
",",
"parent",
")",
")",
"variables",
"=",
"list",
"(",
"filter",
"(",
"None",
",",
"variables",
")",
")",
"return",
"variables"
] | https://github.com/scottrogowski/code2flow/blob/37e45ca4340289f8ceec79b3fe5131c401387c58/code2flow/ruby.py#L108-L131 |
|
nodejs/node | ac3c33c1646bf46104c15ae035982c06364da9b8 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py | python | MacTool.ExecCodeSignBundle | (self, key, entitlements, provisioning, path, preserve) | Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
2. copy Entitlements.plist from user or SDK next to the bundle,
3. code sign the bundle. | Code sign a bundle. | [
"Code",
"sign",
"a",
"bundle",
"."
] | def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
2. copy Entitlements.plist from user or SDK next to the bundle,
3. code sign the bundle.
"""
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier()
)
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides
)
args = ["codesign", "--force", "--sign", key]
if preserve == "True":
args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
else:
args.extend(["--entitlements", entitlements_path])
args.extend(["--timestamp=none", path])
subprocess.check_call(args) | [
"def",
"ExecCodeSignBundle",
"(",
"self",
",",
"key",
",",
"entitlements",
",",
"provisioning",
",",
"path",
",",
"preserve",
")",
":",
"substitutions",
",",
"overrides",
"=",
"self",
".",
"_InstallProvisioningProfile",
"(",
"provisioning",
",",
"self",
".",
"_GetCFBundleIdentifier",
"(",
")",
")",
"entitlements_path",
"=",
"self",
".",
"_InstallEntitlements",
"(",
"entitlements",
",",
"substitutions",
",",
"overrides",
")",
"args",
"=",
"[",
"\"codesign\"",
",",
"\"--force\"",
",",
"\"--sign\"",
",",
"key",
"]",
"if",
"preserve",
"==",
"\"True\"",
":",
"args",
".",
"extend",
"(",
"[",
"\"--deep\"",
",",
"\"--preserve-metadata=identifier,entitlements\"",
"]",
")",
"else",
":",
"args",
".",
"extend",
"(",
"[",
"\"--entitlements\"",
",",
"entitlements_path",
"]",
")",
"args",
".",
"extend",
"(",
"[",
"\"--timestamp=none\"",
",",
"path",
"]",
")",
"subprocess",
".",
"check_call",
"(",
"args",
")"
] | https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py#L442-L465 |
||
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/core/templatetags/filters.py | python | user_source | (var, obj) | If var is in obj.
:param var: The value to look for.
:type var: str
:param obj: The object to search through.
:type obj: str/list/tuple
:returns: True, False | If var is in obj. | [
"If",
"var",
"is",
"in",
"obj",
"."
] | def user_source(var, obj):
"""
If var is in obj.
:param var: The value to look for.
:type var: str
:param obj: The object to search through.
:type obj: str/list/tuple
:returns: True, False
"""
if var in obj:
return True
else:
return False | [
"def",
"user_source",
"(",
"var",
",",
"obj",
")",
":",
"if",
"var",
"in",
"obj",
":",
"return",
"True",
"else",
":",
"return",
"False"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/core/templatetags/filters.py#L34-L48 |
||
lambdamusic/Ontospy | 534e408372edd392590e12839c32a403430aac23 | ontospy/core/entities.py | python | OntoClass.__init__ | (self, uri, rdftype=None, namespaces=None,
ext_model=False, pref_title="qname", pref_lang="en") | ... | ... | [
"..."
] | def __init__(self, uri, rdftype=None, namespaces=None,
ext_model=False, pref_title="qname", pref_lang="en"):
"""
...
"""
super().__init__(uri, rdftype, namespaces, ext_model,
pref_title=pref_title, pref_lang=pref_lang)
self.slug = "class-" + slugify(self.qname)
self.domain_of = []
self.range_of = []
self.domain_of_inferred = []
self.range_of_inferred = []
self.ontology = None
self._instances = False # calc on demand at runtime
self.sparqlHelper = None # the original graph the class derives from
self.shapedProperties = [
] | [
"def",
"__init__",
"(",
"self",
",",
"uri",
",",
"rdftype",
"=",
"None",
",",
"namespaces",
"=",
"None",
",",
"ext_model",
"=",
"False",
",",
"pref_title",
"=",
"\"qname\"",
",",
"pref_lang",
"=",
"\"en\"",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"uri",
",",
"rdftype",
",",
"namespaces",
",",
"ext_model",
",",
"pref_title",
"=",
"pref_title",
",",
"pref_lang",
"=",
"pref_lang",
")",
"self",
".",
"slug",
"=",
"\"class-\"",
"+",
"slugify",
"(",
"self",
".",
"qname",
")",
"self",
".",
"domain_of",
"=",
"[",
"]",
"self",
".",
"range_of",
"=",
"[",
"]",
"self",
".",
"domain_of_inferred",
"=",
"[",
"]",
"self",
".",
"range_of_inferred",
"=",
"[",
"]",
"self",
".",
"ontology",
"=",
"None",
"self",
".",
"_instances",
"=",
"False",
"# calc on demand at runtime",
"self",
".",
"sparqlHelper",
"=",
"None",
"# the original graph the class derives from",
"self",
".",
"shapedProperties",
"=",
"[",
"]"
] | https://github.com/lambdamusic/Ontospy/blob/534e408372edd392590e12839c32a403430aac23/ontospy/core/entities.py#L324-L340 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/inspect.py | python | getargspec | (func) | return ArgSpec(args, varargs, varkw, func.func_defaults) | Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments. | Get the names and default values of a function's arguments. | [
"Get",
"the",
"names",
"and",
"default",
"values",
"of",
"a",
"function",
"s",
"arguments",
"."
] | def getargspec(func):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
"""
if ismethod(func):
func = func.im_func
if not isfunction(func):
raise TypeError('{!r} is not a Python function'.format(func))
args, varargs, varkw = getargs(func.func_code)
return ArgSpec(args, varargs, varkw, func.func_defaults) | [
"def",
"getargspec",
"(",
"func",
")",
":",
"if",
"ismethod",
"(",
"func",
")",
":",
"func",
"=",
"func",
".",
"im_func",
"if",
"not",
"isfunction",
"(",
"func",
")",
":",
"raise",
"TypeError",
"(",
"'{!r} is not a Python function'",
".",
"format",
"(",
"func",
")",
")",
"args",
",",
"varargs",
",",
"varkw",
"=",
"getargs",
"(",
"func",
".",
"func_code",
")",
"return",
"ArgSpec",
"(",
"args",
",",
"varargs",
",",
"varkw",
",",
"func",
".",
"func_defaults",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/inspect.py#L801-L815 |
|
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py | python | Tool._GetSpecification | (self) | return ['Tool', self._attrs] | Creates an element for the tool.
Returns:
A new xml.dom.Element for the tool. | Creates an element for the tool. | [
"Creates",
"an",
"element",
"for",
"the",
"tool",
"."
] | def _GetSpecification(self):
"""Creates an element for the tool.
Returns:
A new xml.dom.Element for the tool.
"""
return ['Tool', self._attrs] | [
"def",
"_GetSpecification",
"(",
"self",
")",
":",
"return",
"[",
"'Tool'",
",",
"self",
".",
"_attrs",
"]"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py#L26-L32 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/mimify.py | python | mimify_part | (ifile, ofile, is_mime) | Convert an 8bit part of a MIME mail message to quoted-printable. | Convert an 8bit part of a MIME mail message to quoted-printable. | [
"Convert",
"an",
"8bit",
"part",
"of",
"a",
"MIME",
"mail",
"message",
"to",
"quoted",
"-",
"printable",
"."
] | def mimify_part(ifile, ofile, is_mime):
"""Convert an 8bit part of a MIME mail message to quoted-printable."""
has_cte = is_qp = is_base64 = 0
multipart = None
must_quote_body = must_quote_header = has_iso_chars = 0
header = []
header_end = ''
message = []
message_end = ''
# read header
hfile = HeaderFile(ifile)
while 1:
line = hfile.readline()
if not line:
break
if not must_quote_header and iso_char.search(line):
must_quote_header = 1
if mv.match(line):
is_mime = 1
if cte.match(line):
has_cte = 1
if qp.match(line):
is_qp = 1
elif base64_re.match(line):
is_base64 = 1
mp_res = mp.match(line)
if mp_res:
multipart = '--' + mp_res.group(1)
if he.match(line):
header_end = line
break
header.append(line)
# read body
while 1:
line = ifile.readline()
if not line:
break
if multipart:
if line == multipart + '--\n':
message_end = line
break
if line == multipart + '\n':
message_end = line
break
if is_base64:
message.append(line)
continue
if is_qp:
while line[-2:] == '=\n':
line = line[:-2]
newline = ifile.readline()
if newline[:len(QUOTE)] == QUOTE:
newline = newline[len(QUOTE):]
line = line + newline
line = mime_decode(line)
message.append(line)
if not has_iso_chars:
if iso_char.search(line):
has_iso_chars = must_quote_body = 1
if not must_quote_body:
if len(line) > MAXLEN:
must_quote_body = 1
# convert and output header and body
for line in header:
if must_quote_header:
line = mime_encode_header(line)
chrset_res = chrset.match(line)
if chrset_res:
if has_iso_chars:
# change us-ascii into iso-8859-1
if chrset_res.group(2).lower() == 'us-ascii':
line = '%s%s%s' % (chrset_res.group(1),
CHARSET,
chrset_res.group(3))
else:
# change iso-8859-* into us-ascii
line = '%sus-ascii%s' % chrset_res.group(1, 3)
if has_cte and cte.match(line):
line = 'Content-Transfer-Encoding: '
if is_base64:
line = line + 'base64\n'
elif must_quote_body:
line = line + 'quoted-printable\n'
else:
line = line + '7bit\n'
ofile.write(line)
if (must_quote_header or must_quote_body) and not is_mime:
ofile.write('Mime-Version: 1.0\n')
ofile.write('Content-Type: text/plain; ')
if has_iso_chars:
ofile.write('charset="%s"\n' % CHARSET)
else:
ofile.write('charset="us-ascii"\n')
if must_quote_body and not has_cte:
ofile.write('Content-Transfer-Encoding: quoted-printable\n')
ofile.write(header_end)
for line in message:
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line)
ofile.write(message_end)
line = message_end
while multipart:
if line == multipart + '--\n':
# read bit after the end of the last part
while 1:
line = ifile.readline()
if not line:
return
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line)
if line == multipart + '\n':
nifile = File(ifile, multipart)
mimify_part(nifile, ofile, 1)
line = nifile.peek
if not line:
# premature end of file
break
ofile.write(line)
continue
# unexpectedly no multipart separator--copy rest of file
while 1:
line = ifile.readline()
if not line:
return
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line) | [
"def",
"mimify_part",
"(",
"ifile",
",",
"ofile",
",",
"is_mime",
")",
":",
"has_cte",
"=",
"is_qp",
"=",
"is_base64",
"=",
"0",
"multipart",
"=",
"None",
"must_quote_body",
"=",
"must_quote_header",
"=",
"has_iso_chars",
"=",
"0",
"header",
"=",
"[",
"]",
"header_end",
"=",
"''",
"message",
"=",
"[",
"]",
"message_end",
"=",
"''",
"# read header",
"hfile",
"=",
"HeaderFile",
"(",
"ifile",
")",
"while",
"1",
":",
"line",
"=",
"hfile",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"if",
"not",
"must_quote_header",
"and",
"iso_char",
".",
"search",
"(",
"line",
")",
":",
"must_quote_header",
"=",
"1",
"if",
"mv",
".",
"match",
"(",
"line",
")",
":",
"is_mime",
"=",
"1",
"if",
"cte",
".",
"match",
"(",
"line",
")",
":",
"has_cte",
"=",
"1",
"if",
"qp",
".",
"match",
"(",
"line",
")",
":",
"is_qp",
"=",
"1",
"elif",
"base64_re",
".",
"match",
"(",
"line",
")",
":",
"is_base64",
"=",
"1",
"mp_res",
"=",
"mp",
".",
"match",
"(",
"line",
")",
"if",
"mp_res",
":",
"multipart",
"=",
"'--'",
"+",
"mp_res",
".",
"group",
"(",
"1",
")",
"if",
"he",
".",
"match",
"(",
"line",
")",
":",
"header_end",
"=",
"line",
"break",
"header",
".",
"append",
"(",
"line",
")",
"# read body",
"while",
"1",
":",
"line",
"=",
"ifile",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"if",
"multipart",
":",
"if",
"line",
"==",
"multipart",
"+",
"'--\\n'",
":",
"message_end",
"=",
"line",
"break",
"if",
"line",
"==",
"multipart",
"+",
"'\\n'",
":",
"message_end",
"=",
"line",
"break",
"if",
"is_base64",
":",
"message",
".",
"append",
"(",
"line",
")",
"continue",
"if",
"is_qp",
":",
"while",
"line",
"[",
"-",
"2",
":",
"]",
"==",
"'=\\n'",
":",
"line",
"=",
"line",
"[",
":",
"-",
"2",
"]",
"newline",
"=",
"ifile",
".",
"readline",
"(",
")",
"if",
"newline",
"[",
":",
"len",
"(",
"QUOTE",
")",
"]",
"==",
"QUOTE",
":",
"newline",
"=",
"newline",
"[",
"len",
"(",
"QUOTE",
")",
":",
"]",
"line",
"=",
"line",
"+",
"newline",
"line",
"=",
"mime_decode",
"(",
"line",
")",
"message",
".",
"append",
"(",
"line",
")",
"if",
"not",
"has_iso_chars",
":",
"if",
"iso_char",
".",
"search",
"(",
"line",
")",
":",
"has_iso_chars",
"=",
"must_quote_body",
"=",
"1",
"if",
"not",
"must_quote_body",
":",
"if",
"len",
"(",
"line",
")",
">",
"MAXLEN",
":",
"must_quote_body",
"=",
"1",
"# convert and output header and body",
"for",
"line",
"in",
"header",
":",
"if",
"must_quote_header",
":",
"line",
"=",
"mime_encode_header",
"(",
"line",
")",
"chrset_res",
"=",
"chrset",
".",
"match",
"(",
"line",
")",
"if",
"chrset_res",
":",
"if",
"has_iso_chars",
":",
"# change us-ascii into iso-8859-1",
"if",
"chrset_res",
".",
"group",
"(",
"2",
")",
".",
"lower",
"(",
")",
"==",
"'us-ascii'",
":",
"line",
"=",
"'%s%s%s'",
"%",
"(",
"chrset_res",
".",
"group",
"(",
"1",
")",
",",
"CHARSET",
",",
"chrset_res",
".",
"group",
"(",
"3",
")",
")",
"else",
":",
"# change iso-8859-* into us-ascii",
"line",
"=",
"'%sus-ascii%s'",
"%",
"chrset_res",
".",
"group",
"(",
"1",
",",
"3",
")",
"if",
"has_cte",
"and",
"cte",
".",
"match",
"(",
"line",
")",
":",
"line",
"=",
"'Content-Transfer-Encoding: '",
"if",
"is_base64",
":",
"line",
"=",
"line",
"+",
"'base64\\n'",
"elif",
"must_quote_body",
":",
"line",
"=",
"line",
"+",
"'quoted-printable\\n'",
"else",
":",
"line",
"=",
"line",
"+",
"'7bit\\n'",
"ofile",
".",
"write",
"(",
"line",
")",
"if",
"(",
"must_quote_header",
"or",
"must_quote_body",
")",
"and",
"not",
"is_mime",
":",
"ofile",
".",
"write",
"(",
"'Mime-Version: 1.0\\n'",
")",
"ofile",
".",
"write",
"(",
"'Content-Type: text/plain; '",
")",
"if",
"has_iso_chars",
":",
"ofile",
".",
"write",
"(",
"'charset=\"%s\"\\n'",
"%",
"CHARSET",
")",
"else",
":",
"ofile",
".",
"write",
"(",
"'charset=\"us-ascii\"\\n'",
")",
"if",
"must_quote_body",
"and",
"not",
"has_cte",
":",
"ofile",
".",
"write",
"(",
"'Content-Transfer-Encoding: quoted-printable\\n'",
")",
"ofile",
".",
"write",
"(",
"header_end",
")",
"for",
"line",
"in",
"message",
":",
"if",
"must_quote_body",
":",
"line",
"=",
"mime_encode",
"(",
"line",
",",
"0",
")",
"ofile",
".",
"write",
"(",
"line",
")",
"ofile",
".",
"write",
"(",
"message_end",
")",
"line",
"=",
"message_end",
"while",
"multipart",
":",
"if",
"line",
"==",
"multipart",
"+",
"'--\\n'",
":",
"# read bit after the end of the last part",
"while",
"1",
":",
"line",
"=",
"ifile",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"return",
"if",
"must_quote_body",
":",
"line",
"=",
"mime_encode",
"(",
"line",
",",
"0",
")",
"ofile",
".",
"write",
"(",
"line",
")",
"if",
"line",
"==",
"multipart",
"+",
"'\\n'",
":",
"nifile",
"=",
"File",
"(",
"ifile",
",",
"multipart",
")",
"mimify_part",
"(",
"nifile",
",",
"ofile",
",",
"1",
")",
"line",
"=",
"nifile",
".",
"peek",
"if",
"not",
"line",
":",
"# premature end of file",
"break",
"ofile",
".",
"write",
"(",
"line",
")",
"continue",
"# unexpectedly no multipart separator--copy rest of file",
"while",
"1",
":",
"line",
"=",
"ifile",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"return",
"if",
"must_quote_body",
":",
"line",
"=",
"mime_encode",
"(",
"line",
",",
"0",
")",
"ofile",
".",
"write",
"(",
"line",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/mimify.py#L280-L413 |
||
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | tools/gyp/pylib/gyp/MSVSUserFile.py | python | Writer.__init__ | (self, user_file_path, version, name) | Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file. | Initializes the user file. | [
"Initializes",
"the",
"user",
"file",
"."
] | def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {} | [
"def",
"__init__",
"(",
"self",
",",
"user_file_path",
",",
"version",
",",
"name",
")",
":",
"self",
".",
"user_file_path",
"=",
"user_file_path",
"self",
".",
"version",
"=",
"version",
"self",
".",
"name",
"=",
"name",
"self",
".",
"configurations",
"=",
"{",
"}"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/tools/gyp/pylib/gyp/MSVSUserFile.py#L57-L68 |
||
lukevink/hass-config-lajv | cc435372da788fdbeb28c370fe10d6b4090d5244 | custom_components/hacs/operational/setup_actions/load_hacs_repository.py | python | async_load_hacs_repository | () | return True | Load HACS repositroy. | Load HACS repositroy. | [
"Load",
"HACS",
"repositroy",
"."
] | async def async_load_hacs_repository():
"""Load HACS repositroy."""
hacs = get_hacs()
hacs.log.info("Setup task %s", HacsSetupTask.HACS_REPO)
try:
repository = hacs.get_by_name("hacs/integration")
if repository is None:
await register_repository("hacs/integration", "integration")
repository = hacs.get_by_name("hacs/integration")
if repository is None:
raise HacsException("Unknown error")
repository.data.installed = True
repository.data.installed_version = INTEGRATION_VERSION
repository.data.new = False
hacs.repo = repository.repository_object
hacs.data_repo, _ = await get_repository(
hacs.session, hacs.configuration.token, "hacs/default", None
)
except HacsException as exception:
if "403" in f"{exception}":
hacs.log.critical("GitHub API is ratelimited, or the token is wrong.")
else:
hacs.log.critical(f"[{exception}] - Could not load HACS!")
return False
return True | [
"async",
"def",
"async_load_hacs_repository",
"(",
")",
":",
"hacs",
"=",
"get_hacs",
"(",
")",
"hacs",
".",
"log",
".",
"info",
"(",
"\"Setup task %s\"",
",",
"HacsSetupTask",
".",
"HACS_REPO",
")",
"try",
":",
"repository",
"=",
"hacs",
".",
"get_by_name",
"(",
"\"hacs/integration\"",
")",
"if",
"repository",
"is",
"None",
":",
"await",
"register_repository",
"(",
"\"hacs/integration\"",
",",
"\"integration\"",
")",
"repository",
"=",
"hacs",
".",
"get_by_name",
"(",
"\"hacs/integration\"",
")",
"if",
"repository",
"is",
"None",
":",
"raise",
"HacsException",
"(",
"\"Unknown error\"",
")",
"repository",
".",
"data",
".",
"installed",
"=",
"True",
"repository",
".",
"data",
".",
"installed_version",
"=",
"INTEGRATION_VERSION",
"repository",
".",
"data",
".",
"new",
"=",
"False",
"hacs",
".",
"repo",
"=",
"repository",
".",
"repository_object",
"hacs",
".",
"data_repo",
",",
"_",
"=",
"await",
"get_repository",
"(",
"hacs",
".",
"session",
",",
"hacs",
".",
"configuration",
".",
"token",
",",
"\"hacs/default\"",
",",
"None",
")",
"except",
"HacsException",
"as",
"exception",
":",
"if",
"\"403\"",
"in",
"f\"{exception}\"",
":",
"hacs",
".",
"log",
".",
"critical",
"(",
"\"GitHub API is ratelimited, or the token is wrong.\"",
")",
"else",
":",
"hacs",
".",
"log",
".",
"critical",
"(",
"f\"[{exception}] - Could not load HACS!\"",
")",
"return",
"False",
"return",
"True"
] | https://github.com/lukevink/hass-config-lajv/blob/cc435372da788fdbeb28c370fe10d6b4090d5244/custom_components/hacs/operational/setup_actions/load_hacs_repository.py#L13-L38 |
|
retspen/webvirtcloud | ba06af977511d7874c7ce0208e3a9b6bb496c22b | vrtManager/connection.py | python | wvmConnect.get_hostdev_modes | (self, arch, machine) | return util.get_xml_path(self.get_dom_cap_xml(arch, machine), func=get_hostdev_list) | :param arch: architecture
:param machine:
:return. available nodedev modes | :param arch: architecture
:param machine:
:return. available nodedev modes | [
":",
"param",
"arch",
":",
"architecture",
":",
"param",
"machine",
":",
":",
"return",
".",
"available",
"nodedev",
"modes"
] | def get_hostdev_modes(self, arch, machine):
"""
:param arch: architecture
:param machine:
:return. available nodedev modes
"""
def get_hostdev_list(ctx):
return [v.text for v in ctx.xpath("/domainCapabilities/devices/hostdev/enum[@name='mode']/value")]
return util.get_xml_path(self.get_dom_cap_xml(arch, machine), func=get_hostdev_list) | [
"def",
"get_hostdev_modes",
"(",
"self",
",",
"arch",
",",
"machine",
")",
":",
"def",
"get_hostdev_list",
"(",
"ctx",
")",
":",
"return",
"[",
"v",
".",
"text",
"for",
"v",
"in",
"ctx",
".",
"xpath",
"(",
"\"/domainCapabilities/devices/hostdev/enum[@name='mode']/value\"",
")",
"]",
"return",
"util",
".",
"get_xml_path",
"(",
"self",
".",
"get_dom_cap_xml",
"(",
"arch",
",",
"machine",
")",
",",
"func",
"=",
"get_hostdev_list",
")"
] | https://github.com/retspen/webvirtcloud/blob/ba06af977511d7874c7ce0208e3a9b6bb496c22b/vrtManager/connection.py#L720-L730 |
|
arschles/go-in-5-minutes | c02918d1def999b2d59c060818e8adb735e24719 | episode24/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings._DefaultSdkRoot | (self) | return '' | Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed. | Returns the default SDKROOT to use. | [
"Returns",
"the",
"default",
"SDKROOT",
"to",
"use",
"."
] | def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
xcode_version, xcode_build = XcodeVersion()
if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return '' | [
"def",
"_DefaultSdkRoot",
"(",
"self",
")",
":",
"xcode_version",
",",
"xcode_build",
"=",
"XcodeVersion",
"(",
")",
"if",
"xcode_version",
"<",
"'0500'",
":",
"return",
"''",
"default_sdk_path",
"=",
"self",
".",
"_XcodeSdkPath",
"(",
"''",
")",
"default_sdk_root",
"=",
"XcodeSettings",
".",
"_sdk_root_cache",
".",
"get",
"(",
"default_sdk_path",
")",
"if",
"default_sdk_root",
":",
"return",
"default_sdk_root",
"try",
":",
"all_sdks",
"=",
"GetStdout",
"(",
"[",
"'xcodebuild'",
",",
"'-showsdks'",
"]",
")",
"except",
":",
"# If xcodebuild fails, there will be no valid SDKs",
"return",
"''",
"for",
"line",
"in",
"all_sdks",
".",
"splitlines",
"(",
")",
":",
"items",
"=",
"line",
".",
"split",
"(",
")",
"if",
"len",
"(",
"items",
")",
">=",
"3",
"and",
"items",
"[",
"-",
"2",
"]",
"==",
"'-sdk'",
":",
"sdk_root",
"=",
"items",
"[",
"-",
"1",
"]",
"sdk_path",
"=",
"self",
".",
"_XcodeSdkPath",
"(",
"sdk_root",
")",
"if",
"sdk_path",
"==",
"default_sdk_path",
":",
"return",
"sdk_root",
"return",
"''"
] | https://github.com/arschles/go-in-5-minutes/blob/c02918d1def999b2d59c060818e8adb735e24719/episode24/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L1105-L1131 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/hw_escpos/escpos/escpos.py | python | StyleStack.get | (self,style) | return None | what's the value of a style at the current stack level | what's the value of a style at the current stack level | [
"what",
"s",
"the",
"value",
"of",
"a",
"style",
"at",
"the",
"current",
"stack",
"level"
] | def get(self,style):
""" what's the value of a style at the current stack level"""
level = len(self.stack) -1
while level >= 0:
if style in self.stack[level]:
return self.stack[level][style]
else:
level = level - 1
return None | [
"def",
"get",
"(",
"self",
",",
"style",
")",
":",
"level",
"=",
"len",
"(",
"self",
".",
"stack",
")",
"-",
"1",
"while",
"level",
">=",
"0",
":",
"if",
"style",
"in",
"self",
".",
"stack",
"[",
"level",
"]",
":",
"return",
"self",
".",
"stack",
"[",
"level",
"]",
"[",
"style",
"]",
"else",
":",
"level",
"=",
"level",
"-",
"1",
"return",
"None"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/hw_escpos/escpos/escpos.py#L125-L133 |
|
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/raw_data/handlers.py | python | update_raw_data_tool_details | (_id, details, analyst) | Update the RawData tool details.
:param _id: ObjectId of the RawData to update.
:type _id: str
:param details: The detail to set.
:type detail: str
:param analyst: The user updating the details.
:type analyst: str
:returns: None
:raises: ValidationError | Update the RawData tool details. | [
"Update",
"the",
"RawData",
"tool",
"details",
"."
] | def update_raw_data_tool_details(_id, details, analyst):
"""
Update the RawData tool details.
:param _id: ObjectId of the RawData to update.
:type _id: str
:param details: The detail to set.
:type detail: str
:param analyst: The user updating the details.
:type analyst: str
:returns: None
:raises: ValidationError
"""
raw_data = RawData.objects(id=_id).first()
raw_data.tool.details = details
try:
raw_data.save(username=analyst)
return None
except ValidationError, e:
return e | [
"def",
"update_raw_data_tool_details",
"(",
"_id",
",",
"details",
",",
"analyst",
")",
":",
"raw_data",
"=",
"RawData",
".",
"objects",
"(",
"id",
"=",
"_id",
")",
".",
"first",
"(",
")",
"raw_data",
".",
"tool",
".",
"details",
"=",
"details",
"try",
":",
"raw_data",
".",
"save",
"(",
"username",
"=",
"analyst",
")",
"return",
"None",
"except",
"ValidationError",
",",
"e",
":",
"return",
"e"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/raw_data/handlers.py#L484-L504 |
||
korolr/dotfiles | 8e46933503ecb8d8651739ffeb1d2d4f0f5c6524 | .config/sublime-text-3/Packages/mdpopups/st3/mdpopups/mdx/superfences.py | python | CodeStash.clear_stash | (self) | Clear the stash. | Clear the stash. | [
"Clear",
"the",
"stash",
"."
] | def clear_stash(self):
"""Clear the stash."""
self.stash = {} | [
"def",
"clear_stash",
"(",
"self",
")",
":",
"self",
".",
"stash",
"=",
"{",
"}"
] | https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Packages/mdpopups/st3/mdpopups/mdx/superfences.py#L110-L113 |
||
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/website_sale/models/sale_order.py | python | SaleOrder._cart_find_product_line | (self, product_id=None, line_id=None, **kwargs) | return self.env['sale.order.line'].sudo().search(domain) | Find the cart line matching the given parameters.
If a product_id is given, the line will match the product only if the
line also has the same special attributes: `no_variant` attributes and
`is_custom` values. | Find the cart line matching the given parameters. | [
"Find",
"the",
"cart",
"line",
"matching",
"the",
"given",
"parameters",
"."
] | def _cart_find_product_line(self, product_id=None, line_id=None, **kwargs):
"""Find the cart line matching the given parameters.
If a product_id is given, the line will match the product only if the
line also has the same special attributes: `no_variant` attributes and
`is_custom` values.
"""
self.ensure_one()
product = self.env['product.product'].browse(product_id)
# split lines with the same product if it has untracked attributes
if product and (product.product_tmpl_id.has_dynamic_attributes() or product.product_tmpl_id._has_no_variant_attributes()) and not line_id:
return self.env['sale.order.line']
domain = [('order_id', '=', self.id), ('product_id', '=', product_id)]
if line_id:
domain += [('id', '=', line_id)]
else:
domain += [('product_custom_attribute_value_ids', '=', False)]
return self.env['sale.order.line'].sudo().search(domain) | [
"def",
"_cart_find_product_line",
"(",
"self",
",",
"product_id",
"=",
"None",
",",
"line_id",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"ensure_one",
"(",
")",
"product",
"=",
"self",
".",
"env",
"[",
"'product.product'",
"]",
".",
"browse",
"(",
"product_id",
")",
"# split lines with the same product if it has untracked attributes",
"if",
"product",
"and",
"(",
"product",
".",
"product_tmpl_id",
".",
"has_dynamic_attributes",
"(",
")",
"or",
"product",
".",
"product_tmpl_id",
".",
"_has_no_variant_attributes",
"(",
")",
")",
"and",
"not",
"line_id",
":",
"return",
"self",
".",
"env",
"[",
"'sale.order.line'",
"]",
"domain",
"=",
"[",
"(",
"'order_id'",
",",
"'='",
",",
"self",
".",
"id",
")",
",",
"(",
"'product_id'",
",",
"'='",
",",
"product_id",
")",
"]",
"if",
"line_id",
":",
"domain",
"+=",
"[",
"(",
"'id'",
",",
"'='",
",",
"line_id",
")",
"]",
"else",
":",
"domain",
"+=",
"[",
"(",
"'product_custom_attribute_value_ids'",
",",
"'='",
",",
"False",
")",
"]",
"return",
"self",
".",
"env",
"[",
"'sale.order.line'",
"]",
".",
"sudo",
"(",
")",
".",
"search",
"(",
"domain",
")"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/website_sale/models/sale_order.py#L83-L103 |
|
Juniper/wistar | 39638acccbb84a673cb55ee85c685867491e27c4 | common/lib/imageUtils.py | python | get_image_list | () | return image_list | Get a list of images from the configured deployment backend
:return: list of image_detail dicts | Get a list of images from the configured deployment backend
:return: list of image_detail dicts | [
"Get",
"a",
"list",
"of",
"images",
"from",
"the",
"configured",
"deployment",
"backend",
":",
"return",
":",
"list",
"of",
"image_detail",
"dicts"
] | def get_image_list():
"""
Get a list of images from the configured deployment backend
:return: list of image_detail dicts
"""
logger.debug('---- imageUtils get_image_list ----')
image_list = list()
if configuration.deployment_backend == "openstack":
if openstackUtils.connect_to_openstack():
images = openstackUtils.get_glance_image_list()
for glance_detail in images:
image_detail = get_image_detail_from_glance_image(glance_detail)
image_list.append(image_detail)
else:
images = Image.objects.all()
for local_detail in images:
image_detail = get_image_detail_from_local_image(local_detail)
image_list.append(image_detail)
return image_list | [
"def",
"get_image_list",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"'---- imageUtils get_image_list ----'",
")",
"image_list",
"=",
"list",
"(",
")",
"if",
"configuration",
".",
"deployment_backend",
"==",
"\"openstack\"",
":",
"if",
"openstackUtils",
".",
"connect_to_openstack",
"(",
")",
":",
"images",
"=",
"openstackUtils",
".",
"get_glance_image_list",
"(",
")",
"for",
"glance_detail",
"in",
"images",
":",
"image_detail",
"=",
"get_image_detail_from_glance_image",
"(",
"glance_detail",
")",
"image_list",
".",
"append",
"(",
"image_detail",
")",
"else",
":",
"images",
"=",
"Image",
".",
"objects",
".",
"all",
"(",
")",
"for",
"local_detail",
"in",
"images",
":",
"image_detail",
"=",
"get_image_detail_from_local_image",
"(",
"local_detail",
")",
"image_list",
".",
"append",
"(",
"image_detail",
")",
"return",
"image_list"
] | https://github.com/Juniper/wistar/blob/39638acccbb84a673cb55ee85c685867491e27c4/common/lib/imageUtils.py#L180-L202 |
|
metajack/profxmpp | 3735b352af090f8ba68cb888926a5c6efde4093a | ch12/attach/boshclient.py | python | BOSHClient.buildBody | (self, child=None) | return body | Build a BOSH body. | Build a BOSH body. | [
"Build",
"a",
"BOSH",
"body",
"."
] | def buildBody(self, child=None):
"""Build a BOSH body.
"""
body = domish.Element(("http://jabber.org/protocol/httpbind", "body"))
body['content'] = 'text/xml; charset=utf-8'
self.rid = self.rid + 1
body['rid'] = str(self.rid)
body['sid'] = str(self.sid)
body['xml:lang'] = 'en'
if child is not None:
body.addChild(child)
return body | [
"def",
"buildBody",
"(",
"self",
",",
"child",
"=",
"None",
")",
":",
"body",
"=",
"domish",
".",
"Element",
"(",
"(",
"\"http://jabber.org/protocol/httpbind\"",
",",
"\"body\"",
")",
")",
"body",
"[",
"'content'",
"]",
"=",
"'text/xml; charset=utf-8'",
"self",
".",
"rid",
"=",
"self",
".",
"rid",
"+",
"1",
"body",
"[",
"'rid'",
"]",
"=",
"str",
"(",
"self",
".",
"rid",
")",
"body",
"[",
"'sid'",
"]",
"=",
"str",
"(",
"self",
".",
"sid",
")",
"body",
"[",
"'xml:lang'",
"]",
"=",
"'en'",
"if",
"child",
"is",
"not",
"None",
":",
"body",
".",
"addChild",
"(",
"child",
")",
"return",
"body"
] | https://github.com/metajack/profxmpp/blob/3735b352af090f8ba68cb888926a5c6efde4093a/ch12/attach/boshclient.py#L31-L45 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py | python | HexPattern.__new__ | (cls, pattern) | return object.__new__(cls, pattern) | If the pattern is completely static (no wildcards are present) a
L{BytePattern} is created instead. That's because searching for a
fixed byte pattern is faster than searching for a regular expression. | If the pattern is completely static (no wildcards are present) a
L{BytePattern} is created instead. That's because searching for a
fixed byte pattern is faster than searching for a regular expression. | [
"If",
"the",
"pattern",
"is",
"completely",
"static",
"(",
"no",
"wildcards",
"are",
"present",
")",
"a",
"L",
"{",
"BytePattern",
"}",
"is",
"created",
"instead",
".",
"That",
"s",
"because",
"searching",
"for",
"a",
"fixed",
"byte",
"pattern",
"is",
"faster",
"than",
"searching",
"for",
"a",
"regular",
"expression",
"."
] | def __new__(cls, pattern):
"""
If the pattern is completely static (no wildcards are present) a
L{BytePattern} is created instead. That's because searching for a
fixed byte pattern is faster than searching for a regular expression.
"""
if '?' not in pattern:
return BytePattern( HexInput.hexadecimal(pattern) )
return object.__new__(cls, pattern) | [
"def",
"__new__",
"(",
"cls",
",",
"pattern",
")",
":",
"if",
"'?'",
"not",
"in",
"pattern",
":",
"return",
"BytePattern",
"(",
"HexInput",
".",
"hexadecimal",
"(",
"pattern",
")",
")",
"return",
"object",
".",
"__new__",
"(",
"cls",
",",
"pattern",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py#L383-L391 |
|
ErosZy/SecurityWorker | c69a852fdb34eb156dd448090767b924e724d603 | 3rdparty/jerry/targets/curie_bsp/setup.py | python | find_sources | (root_dir, sub_dir) | return matches | Find .c and .S files inside the @root_dir/@sub_dir directory.
Note: the returned paths will be relative to the @root_dir directory. | Find .c and .S files inside the | [
"Find",
".",
"c",
"and",
".",
"S",
"files",
"inside",
"the"
] | def find_sources(root_dir, sub_dir):
"""
Find .c and .S files inside the @root_dir/@sub_dir directory.
Note: the returned paths will be relative to the @root_dir directory.
"""
src_dir = os.path.join(root_dir, sub_dir)
matches = []
for root, dirnames, filenames in os.walk(src_dir):
for filename in fnmatch.filter(filenames, '*.[c|S]'):
file_path = os.path.join(root, filename)
relative_path = os.path.relpath(file_path, root_dir)
matches.append(relative_path)
return matches | [
"def",
"find_sources",
"(",
"root_dir",
",",
"sub_dir",
")",
":",
"src_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root_dir",
",",
"sub_dir",
")",
"matches",
"=",
"[",
"]",
"for",
"root",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"src_dir",
")",
":",
"for",
"filename",
"in",
"fnmatch",
".",
"filter",
"(",
"filenames",
",",
"'*.[c|S]'",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"filename",
")",
"relative_path",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"file_path",
",",
"root_dir",
")",
"matches",
".",
"append",
"(",
"relative_path",
")",
"return",
"matches"
] | https://github.com/ErosZy/SecurityWorker/blob/c69a852fdb34eb156dd448090767b924e724d603/3rdparty/jerry/targets/curie_bsp/setup.py#L53-L67 |
|
demi6od/ChromeFuzzer | 4eaf1084d5f8fd20897706edf8b67bfbbd6380fc | PocSmplify/pydbg-master/pydbg.py | python | pydbg.process_snapshot | (self, mem_only=False) | return self.ret_self() | Take memory / context snapshot of the debuggee. All threads must be suspended before calling this routine.
@raise pdx: An exception is raised on failure.
@rtype: pydbg
@return: Self | Take memory / context snapshot of the debuggee. All threads must be suspended before calling this routine. | [
"Take",
"memory",
"/",
"context",
"snapshot",
"of",
"the",
"debuggee",
".",
"All",
"threads",
"must",
"be",
"suspended",
"before",
"calling",
"this",
"routine",
"."
] | def process_snapshot (self, mem_only=False):
'''
Take memory / context snapshot of the debuggee. All threads must be suspended before calling this routine.
@raise pdx: An exception is raised on failure.
@rtype: pydbg
@return: Self
'''
self._log("taking debuggee snapshot")
do_not_snapshot = [PAGE_READONLY, PAGE_EXECUTE_READ, PAGE_GUARD, PAGE_NOACCESS]
cursor = 0
# reset the internal snapshot data structure lists.
self.memory_snapshot_blocks = []
self.memory_snapshot_contexts = []
if not mem_only:
# enumerate the running threads and save a copy of their contexts.
for thread_id in self.enumerate_threads():
context = self.get_thread_context(None, thread_id)
self.memory_snapshot_contexts.append(memory_snapshot_context(thread_id, context))
self._log("saving thread context of thread id: %08x" % thread_id)
# scan through the entire memory range and save a copy of suitable memory blocks.
while cursor < 0xFFFFFFFF:
save_block = True
try:
mbi = self.virtual_query(cursor)
except:
break
# do not snapshot blocks of memory that match the following characteristics.
# XXX - might want to drop the MEM_IMAGE check to accomodate for self modifying code.
if mbi.State != MEM_COMMIT or mbi.Type == MEM_IMAGE:
save_block = False
for has_protection in do_not_snapshot:
if mbi.Protect & has_protection:
save_block = False
break
if save_block:
self._log("Adding %08x +%d to memory snapsnot." % (mbi.BaseAddress, mbi.RegionSize))
# read the raw bytes from the memory block.
data = self.read_process_memory(mbi.BaseAddress, mbi.RegionSize)
self.memory_snapshot_blocks.append(memory_snapshot_block(mbi, data))
cursor += mbi.RegionSize
return self.ret_self() | [
"def",
"process_snapshot",
"(",
"self",
",",
"mem_only",
"=",
"False",
")",
":",
"self",
".",
"_log",
"(",
"\"taking debuggee snapshot\"",
")",
"do_not_snapshot",
"=",
"[",
"PAGE_READONLY",
",",
"PAGE_EXECUTE_READ",
",",
"PAGE_GUARD",
",",
"PAGE_NOACCESS",
"]",
"cursor",
"=",
"0",
"# reset the internal snapshot data structure lists.",
"self",
".",
"memory_snapshot_blocks",
"=",
"[",
"]",
"self",
".",
"memory_snapshot_contexts",
"=",
"[",
"]",
"if",
"not",
"mem_only",
":",
"# enumerate the running threads and save a copy of their contexts.",
"for",
"thread_id",
"in",
"self",
".",
"enumerate_threads",
"(",
")",
":",
"context",
"=",
"self",
".",
"get_thread_context",
"(",
"None",
",",
"thread_id",
")",
"self",
".",
"memory_snapshot_contexts",
".",
"append",
"(",
"memory_snapshot_context",
"(",
"thread_id",
",",
"context",
")",
")",
"self",
".",
"_log",
"(",
"\"saving thread context of thread id: %08x\"",
"%",
"thread_id",
")",
"# scan through the entire memory range and save a copy of suitable memory blocks.",
"while",
"cursor",
"<",
"0xFFFFFFFF",
":",
"save_block",
"=",
"True",
"try",
":",
"mbi",
"=",
"self",
".",
"virtual_query",
"(",
"cursor",
")",
"except",
":",
"break",
"# do not snapshot blocks of memory that match the following characteristics.",
"# XXX - might want to drop the MEM_IMAGE check to accomodate for self modifying code.",
"if",
"mbi",
".",
"State",
"!=",
"MEM_COMMIT",
"or",
"mbi",
".",
"Type",
"==",
"MEM_IMAGE",
":",
"save_block",
"=",
"False",
"for",
"has_protection",
"in",
"do_not_snapshot",
":",
"if",
"mbi",
".",
"Protect",
"&",
"has_protection",
":",
"save_block",
"=",
"False",
"break",
"if",
"save_block",
":",
"self",
".",
"_log",
"(",
"\"Adding %08x +%d to memory snapsnot.\"",
"%",
"(",
"mbi",
".",
"BaseAddress",
",",
"mbi",
".",
"RegionSize",
")",
")",
"# read the raw bytes from the memory block.",
"data",
"=",
"self",
".",
"read_process_memory",
"(",
"mbi",
".",
"BaseAddress",
",",
"mbi",
".",
"RegionSize",
")",
"self",
".",
"memory_snapshot_blocks",
".",
"append",
"(",
"memory_snapshot_block",
"(",
"mbi",
",",
"data",
")",
")",
"cursor",
"+=",
"mbi",
".",
"RegionSize",
"return",
"self",
".",
"ret_self",
"(",
")"
] | https://github.com/demi6od/ChromeFuzzer/blob/4eaf1084d5f8fd20897706edf8b67bfbbd6380fc/PocSmplify/pydbg-master/pydbg.py#L2686-L2742 |
|
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | tools/closure_linter/gflags.py | python | DEFINE_flag | (flag, flag_values=FLAGS) | Registers a 'Flag' object with a 'FlagValues' object.
By default, the global FLAGS 'FlagValue' object is used.
Typical users will use one of the more specialized DEFINE_xxx
functions, such as DEFINE_string or DEFINE_integer. But developers
who need to create Flag objects themselves should use this function
to register their flags. | Registers a 'Flag' object with a 'FlagValues' object. | [
"Registers",
"a",
"Flag",
"object",
"with",
"a",
"FlagValues",
"object",
"."
] | def DEFINE_flag(flag, flag_values=FLAGS):
"""Registers a 'Flag' object with a 'FlagValues' object.
By default, the global FLAGS 'FlagValue' object is used.
Typical users will use one of the more specialized DEFINE_xxx
functions, such as DEFINE_string or DEFINE_integer. But developers
who need to create Flag objects themselves should use this function
to register their flags.
"""
# copying the reference to flag_values prevents pychecker warnings
fv = flag_values
fv[flag.name] = flag
# Tell flag_values who's defining the flag.
if isinstance(flag_values, FlagValues):
# Regarding the above isinstance test: some users pass funny
# values of flag_values (e.g., {}) in order to avoid the flag
# registration (in the past, there used to be a flag_values ==
# FLAGS test here) and redefine flags with the same name (e.g.,
# debug). To avoid breaking their code, we perform the
# registration only if flag_values is a real FlagValues object.
flag_values._RegisterFlagByModule(_GetCallingModule(), flag) | [
"def",
"DEFINE_flag",
"(",
"flag",
",",
"flag_values",
"=",
"FLAGS",
")",
":",
"# copying the reference to flag_values prevents pychecker warnings",
"fv",
"=",
"flag_values",
"fv",
"[",
"flag",
".",
"name",
"]",
"=",
"flag",
"# Tell flag_values who's defining the flag.",
"if",
"isinstance",
"(",
"flag_values",
",",
"FlagValues",
")",
":",
"# Regarding the above isinstance test: some users pass funny",
"# values of flag_values (e.g., {}) in order to avoid the flag",
"# registration (in the past, there used to be a flag_values ==",
"# FLAGS test here) and redefine flags with the same name (e.g.,",
"# debug). To avoid breaking their code, we perform the",
"# registration only if flag_values is a real FlagValues object.",
"flag_values",
".",
"_RegisterFlagByModule",
"(",
"_GetCallingModule",
"(",
")",
",",
"flag",
")"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/tools/closure_linter/gflags.py#L1818-L1839 |
||
facebookarchive/nuclide | 2a2a0a642d136768b7d2a6d35a652dc5fb77d70a | modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py | python | Handle.close | (self) | Closes the Win32 handle. | Closes the Win32 handle. | [
"Closes",
"the",
"Win32",
"handle",
"."
] | def close(self):
"""
Closes the Win32 handle.
"""
if self.bOwnership and self.value not in (None, INVALID_HANDLE_VALUE):
if Handle.__bLeakDetection: # XXX DEBUG
print("CLOSE HANDLE (%d) %r" % (self.value, self))
try:
self._close()
finally:
self._value = None | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"bOwnership",
"and",
"self",
".",
"value",
"not",
"in",
"(",
"None",
",",
"INVALID_HANDLE_VALUE",
")",
":",
"if",
"Handle",
".",
"__bLeakDetection",
":",
"# XXX DEBUG",
"print",
"(",
"\"CLOSE HANDLE (%d) %r\"",
"%",
"(",
"self",
".",
"value",
",",
"self",
")",
")",
"try",
":",
"self",
".",
"_close",
"(",
")",
"finally",
":",
"self",
".",
"_value",
"=",
"None"
] | https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py#L683-L693 |
||
retspen/webvirtcloud | ba06af977511d7874c7ce0208e3a9b6bb496c22b | vrtManager/create.py | python | wvmCreate.get_storages_images | (self) | return images | Function return all images on all storages | Function return all images on all storages | [
"Function",
"return",
"all",
"images",
"on",
"all",
"storages"
] | def get_storages_images(self):
"""
Function return all images on all storages
"""
images = list()
storages = self.get_storages(only_actives=True)
for storage in storages:
stg = self.get_storage(storage)
try:
stg.refresh(0)
except Exception:
pass
for img in stg.listVolumes():
if img.lower().endswith(".iso"):
pass
else:
images.append(img)
return images | [
"def",
"get_storages_images",
"(",
"self",
")",
":",
"images",
"=",
"list",
"(",
")",
"storages",
"=",
"self",
".",
"get_storages",
"(",
"only_actives",
"=",
"True",
")",
"for",
"storage",
"in",
"storages",
":",
"stg",
"=",
"self",
".",
"get_storage",
"(",
"storage",
")",
"try",
":",
"stg",
".",
"refresh",
"(",
"0",
")",
"except",
"Exception",
":",
"pass",
"for",
"img",
"in",
"stg",
".",
"listVolumes",
"(",
")",
":",
"if",
"img",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\".iso\"",
")",
":",
"pass",
"else",
":",
"images",
".",
"append",
"(",
"img",
")",
"return",
"images"
] | https://github.com/retspen/webvirtcloud/blob/ba06af977511d7874c7ce0208e3a9b6bb496c22b/vrtManager/create.py#L29-L46 |
|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/Sqlmap/lib/core/option.py | python | _setRequestFromFile | () | This function checks if the way to make a HTTP request is through supplied
textual file, parses it and saves the information into the knowledge base. | This function checks if the way to make a HTTP request is through supplied
textual file, parses it and saves the information into the knowledge base. | [
"This",
"function",
"checks",
"if",
"the",
"way",
"to",
"make",
"a",
"HTTP",
"request",
"is",
"through",
"supplied",
"textual",
"file",
"parses",
"it",
"and",
"saves",
"the",
"information",
"into",
"the",
"knowledge",
"base",
"."
] | def _setRequestFromFile():
"""
This function checks if the way to make a HTTP request is through supplied
textual file, parses it and saves the information into the knowledge base.
"""
if not conf.requestFile:
return
addedTargetUrls = set()
conf.requestFile = os.path.expanduser(conf.requestFile)
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
logger.info(infoMsg)
if not os.path.isfile(conf.requestFile):
errMsg = "the specified HTTP request file "
errMsg += "does not exist"
raise SqlmapFilePathException(errMsg)
_feedTargetsDict(conf.requestFile, addedTargetUrls) | [
"def",
"_setRequestFromFile",
"(",
")",
":",
"if",
"not",
"conf",
".",
"requestFile",
":",
"return",
"addedTargetUrls",
"=",
"set",
"(",
")",
"conf",
".",
"requestFile",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"conf",
".",
"requestFile",
")",
"infoMsg",
"=",
"\"parsing HTTP request from '%s'\"",
"%",
"conf",
".",
"requestFile",
"logger",
".",
"info",
"(",
"infoMsg",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"conf",
".",
"requestFile",
")",
":",
"errMsg",
"=",
"\"the specified HTTP request file \"",
"errMsg",
"+=",
"\"does not exist\"",
"raise",
"SqlmapFilePathException",
"(",
"errMsg",
")",
"_feedTargetsDict",
"(",
"conf",
".",
"requestFile",
",",
"addedTargetUrls",
")"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/Sqlmap/lib/core/option.py#L487-L508 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/python/tree.py | python | ImportName.get_defined_names | (self) | return [alias or path[0] for path, alias in self._dotted_as_names()] | Returns the a list of `Name` that the import defines. The defined names
is always the first name after `import` or in case an alias - `as` - is
present that name is returned. | Returns the a list of `Name` that the import defines. The defined names
is always the first name after `import` or in case an alias - `as` - is
present that name is returned. | [
"Returns",
"the",
"a",
"list",
"of",
"Name",
"that",
"the",
"import",
"defines",
".",
"The",
"defined",
"names",
"is",
"always",
"the",
"first",
"name",
"after",
"import",
"or",
"in",
"case",
"an",
"alias",
"-",
"as",
"-",
"is",
"present",
"that",
"name",
"is",
"returned",
"."
] | def get_defined_names(self):
"""
Returns the a list of `Name` that the import defines. The defined names
is always the first name after `import` or in case an alias - `as` - is
present that name is returned.
"""
return [alias or path[0] for path, alias in self._dotted_as_names()] | [
"def",
"get_defined_names",
"(",
"self",
")",
":",
"return",
"[",
"alias",
"or",
"path",
"[",
"0",
"]",
"for",
"path",
",",
"alias",
"in",
"self",
".",
"_dotted_as_names",
"(",
")",
"]"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/python/tree.py#L908-L914 |
|
ansible/awx | 15c7a3f85b5e948f011c67111c4433a38c4544e9 | tools/scripts/compilemessages.py | python | is_protected_type | (obj) | return isinstance(obj, _PROTECTED_TYPES) | Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True). | Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True). | [
"Determine",
"if",
"the",
"object",
"instance",
"is",
"of",
"a",
"protected",
"type",
".",
"Objects",
"of",
"protected",
"types",
"are",
"preserved",
"as",
"-",
"is",
"when",
"passed",
"to",
"force_text",
"(",
"strings_only",
"=",
"True",
")",
"."
] | def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES) | [
"def",
"is_protected_type",
"(",
"obj",
")",
":",
"return",
"isinstance",
"(",
"obj",
",",
"_PROTECTED_TYPES",
")"
] | https://github.com/ansible/awx/blob/15c7a3f85b5e948f011c67111c4433a38c4544e9/tools/scripts/compilemessages.py#L72-L77 |
|
Sefaria/Sefaria-Project | 506752f49394fadebae283d525af8276eb2e241e | sefaria/model/text.py | python | Ref.prev_section_ref | (self, vstate=None) | return self._prev | Returns a Ref to the previous section (e.g. Chapter).
If this is the first section, returns ``None``
:return: :class:`Ref` | Returns a Ref to the previous section (e.g. Chapter). | [
"Returns",
"a",
"Ref",
"to",
"the",
"previous",
"section",
"(",
"e",
".",
"g",
".",
"Chapter",
")",
"."
] | def prev_section_ref(self, vstate=None):
"""
Returns a Ref to the previous section (e.g. Chapter).
If this is the first section, returns ``None``
:return: :class:`Ref`
"""
if not self._prev:
if self.index_node.is_virtual:
pl = self.index_node.prev_leaf()
self._prev = pl.ref() if pl else None
return self._prev
self._prev = self._iter_text_section(False, vstate=vstate)
if self._prev is None and not self.index_node.children:
current_leaf = self.index_node
# we now need to iterate over the prev leaves, finding the first available section
while True:
prev_leaf = current_leaf.prev_leaf() # prev schema/JANode
if prev_leaf and prev_leaf.is_virtual:
if prev_leaf.last_child():
return prev_leaf.last_child().ref()
else:
return None
if prev_leaf:
prev_node_ref = prev_leaf.ref() # get a ref so we can do the next lines
potential_prev = prev_node_ref._iter_text_section(forward=False, depth_up=0 if prev_leaf.depth == 1 else 1, vstate=vstate)
if potential_prev:
self._prev = potential_prev
break
current_leaf = prev_leaf
else:
self._prev = None
break
return self._prev | [
"def",
"prev_section_ref",
"(",
"self",
",",
"vstate",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"_prev",
":",
"if",
"self",
".",
"index_node",
".",
"is_virtual",
":",
"pl",
"=",
"self",
".",
"index_node",
".",
"prev_leaf",
"(",
")",
"self",
".",
"_prev",
"=",
"pl",
".",
"ref",
"(",
")",
"if",
"pl",
"else",
"None",
"return",
"self",
".",
"_prev",
"self",
".",
"_prev",
"=",
"self",
".",
"_iter_text_section",
"(",
"False",
",",
"vstate",
"=",
"vstate",
")",
"if",
"self",
".",
"_prev",
"is",
"None",
"and",
"not",
"self",
".",
"index_node",
".",
"children",
":",
"current_leaf",
"=",
"self",
".",
"index_node",
"# we now need to iterate over the prev leaves, finding the first available section",
"while",
"True",
":",
"prev_leaf",
"=",
"current_leaf",
".",
"prev_leaf",
"(",
")",
"# prev schema/JANode",
"if",
"prev_leaf",
"and",
"prev_leaf",
".",
"is_virtual",
":",
"if",
"prev_leaf",
".",
"last_child",
"(",
")",
":",
"return",
"prev_leaf",
".",
"last_child",
"(",
")",
".",
"ref",
"(",
")",
"else",
":",
"return",
"None",
"if",
"prev_leaf",
":",
"prev_node_ref",
"=",
"prev_leaf",
".",
"ref",
"(",
")",
"# get a ref so we can do the next lines",
"potential_prev",
"=",
"prev_node_ref",
".",
"_iter_text_section",
"(",
"forward",
"=",
"False",
",",
"depth_up",
"=",
"0",
"if",
"prev_leaf",
".",
"depth",
"==",
"1",
"else",
"1",
",",
"vstate",
"=",
"vstate",
")",
"if",
"potential_prev",
":",
"self",
".",
"_prev",
"=",
"potential_prev",
"break",
"current_leaf",
"=",
"prev_leaf",
"else",
":",
"self",
".",
"_prev",
"=",
"None",
"break",
"return",
"self",
".",
"_prev"
] | https://github.com/Sefaria/Sefaria-Project/blob/506752f49394fadebae283d525af8276eb2e241e/sefaria/model/text.py#L3305-L3339 |
|
IonicChina/ioniclub | 208d5298939672ef44076bb8a7e8e6df5278e286 | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/msvs.py | python | _MapFileToMsBuildSourceType | (source, extension_to_rule_name) | return (group, element) | Returns the group and element type of the source file.
Arguments:
source: The source file name.
extension_to_rule_name: A dictionary mapping file extensions to rules.
Returns:
A pair of (group this file should be part of, the label of element) | Returns the group and element type of the source file. | [
"Returns",
"the",
"group",
"and",
"element",
"type",
"of",
"the",
"source",
"file",
"."
] | def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
"""Returns the group and element type of the source file.
Arguments:
source: The source file name.
extension_to_rule_name: A dictionary mapping file extensions to rules.
Returns:
A pair of (group this file should be part of, the label of element)
"""
_, ext = os.path.splitext(source)
if ext in extension_to_rule_name:
group = 'rule'
element = extension_to_rule_name[ext]
elif ext in ['.cc', '.cpp', '.c', '.cxx']:
group = 'compile'
element = 'ClCompile'
elif ext in ['.h', '.hxx']:
group = 'include'
element = 'ClInclude'
elif ext == '.rc':
group = 'resource'
element = 'ResourceCompile'
elif ext == '.idl':
group = 'midl'
element = 'Midl'
else:
group = 'none'
element = 'None'
return (group, element) | [
"def",
"_MapFileToMsBuildSourceType",
"(",
"source",
",",
"extension_to_rule_name",
")",
":",
"_",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"source",
")",
"if",
"ext",
"in",
"extension_to_rule_name",
":",
"group",
"=",
"'rule'",
"element",
"=",
"extension_to_rule_name",
"[",
"ext",
"]",
"elif",
"ext",
"in",
"[",
"'.cc'",
",",
"'.cpp'",
",",
"'.c'",
",",
"'.cxx'",
"]",
":",
"group",
"=",
"'compile'",
"element",
"=",
"'ClCompile'",
"elif",
"ext",
"in",
"[",
"'.h'",
",",
"'.hxx'",
"]",
":",
"group",
"=",
"'include'",
"element",
"=",
"'ClInclude'",
"elif",
"ext",
"==",
"'.rc'",
":",
"group",
"=",
"'resource'",
"element",
"=",
"'ResourceCompile'",
"elif",
"ext",
"==",
"'.idl'",
":",
"group",
"=",
"'midl'",
"element",
"=",
"'Midl'",
"else",
":",
"group",
"=",
"'none'",
"element",
"=",
"'None'",
"return",
"(",
"group",
",",
"element",
")"
] | https://github.com/IonicChina/ioniclub/blob/208d5298939672ef44076bb8a7e8e6df5278e286/node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/msvs.py#L2056-L2085 |
|
xixiaoyao/CS224n-winter-together | f1fbcd4db284a804cb9dfc24b65481ba66e7d32c | Assignments/assignment3/geekhch/parser_transitions.py | python | test_parse | () | Simple tests for the PartialParse.parse function
Warning: these are not exhaustive | Simple tests for the PartialParse.parse function
Warning: these are not exhaustive | [
"Simple",
"tests",
"for",
"the",
"PartialParse",
".",
"parse",
"function",
"Warning",
":",
"these",
"are",
"not",
"exhaustive"
] | def test_parse():
"""Simple tests for the PartialParse.parse function
Warning: these are not exhaustive
"""
sentence = ["parse", "this", "sentence"]
dependencies = PartialParse(sentence).parse(["S", "S", "S", "LA", "RA", "RA"])
dependencies = tuple(sorted(dependencies))
expected = (('ROOT', 'parse'), ('parse', 'sentence'), ('sentence', 'this'))
assert dependencies == expected, \
"parse test resulted in dependencies {:}, expected {:}".format(dependencies, expected)
assert tuple(sentence) == ("parse", "this", "sentence"), \
"parse test failed: the input sentence should not be modified"
print("parse test passed!") | [
"def",
"test_parse",
"(",
")",
":",
"sentence",
"=",
"[",
"\"parse\"",
",",
"\"this\"",
",",
"\"sentence\"",
"]",
"dependencies",
"=",
"PartialParse",
"(",
"sentence",
")",
".",
"parse",
"(",
"[",
"\"S\"",
",",
"\"S\"",
",",
"\"S\"",
",",
"\"LA\"",
",",
"\"RA\"",
",",
"\"RA\"",
"]",
")",
"dependencies",
"=",
"tuple",
"(",
"sorted",
"(",
"dependencies",
")",
")",
"expected",
"=",
"(",
"(",
"'ROOT'",
",",
"'parse'",
")",
",",
"(",
"'parse'",
",",
"'sentence'",
")",
",",
"(",
"'sentence'",
",",
"'this'",
")",
")",
"assert",
"dependencies",
"==",
"expected",
",",
"\"parse test resulted in dependencies {:}, expected {:}\"",
".",
"format",
"(",
"dependencies",
",",
"expected",
")",
"assert",
"tuple",
"(",
"sentence",
")",
"==",
"(",
"\"parse\"",
",",
"\"this\"",
",",
"\"sentence\"",
")",
",",
"\"parse test failed: the input sentence should not be modified\"",
"print",
"(",
"\"parse test passed!\"",
")"
] | https://github.com/xixiaoyao/CS224n-winter-together/blob/f1fbcd4db284a804cb9dfc24b65481ba66e7d32c/Assignments/assignment3/geekhch/parser_transitions.py#L162-L174 |
||
carlosperate/ardublockly | 04fa48273b5651386d0ef1ce6dd446795ffc2594 | start.py | python | open_browser | (ip, port, file_path='') | Start a browser in a separate thread after waiting for half a second.
:param ip: IP address or host name to build URL.
:param port: Server port to build the URL.
:param file_path: Path within domain for the browser to open.
:return: None. | Start a browser in a separate thread after waiting for half a second. | [
"Start",
"a",
"browser",
"in",
"a",
"separate",
"thread",
"after",
"waiting",
"for",
"half",
"a",
"second",
"."
] | def open_browser(ip, port, file_path=''):
"""Start a browser in a separate thread after waiting for half a second.
:param ip: IP address or host name to build URL.
:param port: Server port to build the URL.
:param file_path: Path within domain for the browser to open.
:return: None.
"""
def _open_browser():
webbrowser.get().open('http://%s:%s/%s' % (ip, port, file_path))
thread = threading.Timer(0.5, _open_browser)
thread.start() | [
"def",
"open_browser",
"(",
"ip",
",",
"port",
",",
"file_path",
"=",
"''",
")",
":",
"def",
"_open_browser",
"(",
")",
":",
"webbrowser",
".",
"get",
"(",
")",
".",
"open",
"(",
"'http://%s:%s/%s'",
"%",
"(",
"ip",
",",
"port",
",",
"file_path",
")",
")",
"thread",
"=",
"threading",
".",
"Timer",
"(",
"0.5",
",",
"_open_browser",
")",
"thread",
".",
"start",
"(",
")"
] | https://github.com/carlosperate/ardublockly/blob/04fa48273b5651386d0ef1ce6dd446795ffc2594/start.py#L27-L39 |
||
mandatoryprogrammer/tarnish | 0a5ac30bf0e88f3caa433ba6edbf88b0c35078c0 | tarnish-worker/tasks.py | python | RetireJS.check_file | ( self, filename, file_data ) | return vulnerability_match | Check a given file
@filename: Name of the file
@file_data: Contents of the JavaScript | Check a given file | [
"Check",
"a",
"given",
"file"
] | def check_file( self, filename, file_data ):
"""
Check a given file
@filename: Name of the file
@file_data: Contents of the JavaScript
"""
matching_definitions = self.get_libraries(
filename,
file_data
)
vulnerability_match_hash = {}
vulnerability_match = []
for matching_definition in matching_definitions:
vulnerabilities = self.definitions[ matching_definition[ "definition_name" ] ][ "vulnerabilities" ]
for vulnerability in vulnerabilities:
match = False
if matching_definition[ "version" ].strip() == "":
match = False
elif "atOrAbove" in vulnerability and "below" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) >= LooseVersion( vulnerability[ "atOrAbove" ] ) and LooseVersion( matching_definition[ "version" ] ) < LooseVersion( vulnerability[ "below" ] ):
match = True
elif "above" in vulnerability and "below" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) > LooseVersion( vulnerability[ "above" ] ) and LooseVersion( matching_definition[ "version" ] ) < LooseVersion( vulnerability[ "below" ] ):
match = True
elif "below" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) < LooseVersion( vulnerability[ "below" ] ):
match = True
elif "above" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) > LooseVersion( vulnerability[ "above" ] ):
match = True
elif "atOrAbove" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) >= LooseVersion( vulnerability[ "atOrAbove" ] ):
match = True
elif "atOrBelow" in vulnerability:
if LooseVersion( matching_definition[ "version" ] ) <= LooseVersion( vulnerability[ "atOrBelow" ] ):
match = True
if match:
vulnerability_match_hash[ matching_definition[ "definition_name" ] + matching_definition[ "version" ] ] = {
"version": matching_definition[ "version" ],
"definition_name": matching_definition[ "definition_name" ],
"vulnerability": vulnerability
}
# De-duplicate
for key, value in vulnerability_match_hash.iteritems():
vulnerability_match.append(
value
)
return vulnerability_match | [
"def",
"check_file",
"(",
"self",
",",
"filename",
",",
"file_data",
")",
":",
"matching_definitions",
"=",
"self",
".",
"get_libraries",
"(",
"filename",
",",
"file_data",
")",
"vulnerability_match_hash",
"=",
"{",
"}",
"vulnerability_match",
"=",
"[",
"]",
"for",
"matching_definition",
"in",
"matching_definitions",
":",
"vulnerabilities",
"=",
"self",
".",
"definitions",
"[",
"matching_definition",
"[",
"\"definition_name\"",
"]",
"]",
"[",
"\"vulnerabilities\"",
"]",
"for",
"vulnerability",
"in",
"vulnerabilities",
":",
"match",
"=",
"False",
"if",
"matching_definition",
"[",
"\"version\"",
"]",
".",
"strip",
"(",
")",
"==",
"\"\"",
":",
"match",
"=",
"False",
"elif",
"\"atOrAbove\"",
"in",
"vulnerability",
"and",
"\"below\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
">=",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"atOrAbove\"",
"]",
")",
"and",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
"<",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"below\"",
"]",
")",
":",
"match",
"=",
"True",
"elif",
"\"above\"",
"in",
"vulnerability",
"and",
"\"below\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
">",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"above\"",
"]",
")",
"and",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
"<",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"below\"",
"]",
")",
":",
"match",
"=",
"True",
"elif",
"\"below\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
"<",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"below\"",
"]",
")",
":",
"match",
"=",
"True",
"elif",
"\"above\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
">",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"above\"",
"]",
")",
":",
"match",
"=",
"True",
"elif",
"\"atOrAbove\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
">=",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"atOrAbove\"",
"]",
")",
":",
"match",
"=",
"True",
"elif",
"\"atOrBelow\"",
"in",
"vulnerability",
":",
"if",
"LooseVersion",
"(",
"matching_definition",
"[",
"\"version\"",
"]",
")",
"<=",
"LooseVersion",
"(",
"vulnerability",
"[",
"\"atOrBelow\"",
"]",
")",
":",
"match",
"=",
"True",
"if",
"match",
":",
"vulnerability_match_hash",
"[",
"matching_definition",
"[",
"\"definition_name\"",
"]",
"+",
"matching_definition",
"[",
"\"version\"",
"]",
"]",
"=",
"{",
"\"version\"",
":",
"matching_definition",
"[",
"\"version\"",
"]",
",",
"\"definition_name\"",
":",
"matching_definition",
"[",
"\"definition_name\"",
"]",
",",
"\"vulnerability\"",
":",
"vulnerability",
"}",
"# De-duplicate",
"for",
"key",
",",
"value",
"in",
"vulnerability_match_hash",
".",
"iteritems",
"(",
")",
":",
"vulnerability_match",
".",
"append",
"(",
"value",
")",
"return",
"vulnerability_match"
] | https://github.com/mandatoryprogrammer/tarnish/blob/0a5ac30bf0e88f3caa433ba6edbf88b0c35078c0/tarnish-worker/tasks.py#L392-L445 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | bt5/erp5_forge/ModuleComponentTemplateItem/portal_components/module.erp5.Subversion.py | python | Subversion.getPreferredUsername | (self) | return (username or
# not set in preferences, then we get the current user id in zope
portal.portal_membership.getAuthenticatedMember().getId()) | return username in preferences if set of the current username | return username in preferences if set of the current username | [
"return",
"username",
"in",
"preferences",
"if",
"set",
"of",
"the",
"current",
"username"
] | def getPreferredUsername(self):
"""return username in preferences if set of the current username
"""
portal = self.getPortalObject()
username = portal.portal_preferences.getPreferredSubversionUserName()
if username:
username = username.strip()
return (username or
# not set in preferences, then we get the current user id in zope
portal.portal_membership.getAuthenticatedMember().getId()) | [
"def",
"getPreferredUsername",
"(",
"self",
")",
":",
"portal",
"=",
"self",
".",
"getPortalObject",
"(",
")",
"username",
"=",
"portal",
".",
"portal_preferences",
".",
"getPreferredSubversionUserName",
"(",
")",
"if",
"username",
":",
"username",
"=",
"username",
".",
"strip",
"(",
")",
"return",
"(",
"username",
"or",
"# not set in preferences, then we get the current user id in zope",
"portal",
".",
"portal_membership",
".",
"getAuthenticatedMember",
"(",
")",
".",
"getId",
"(",
")",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/bt5/erp5_forge/ModuleComponentTemplateItem/portal_components/module.erp5.Subversion.py#L104-L113 |
|
PublicMapping/districtbuilder-classic | 6e4b9d644043082eb0499f5aa77e777fff73a67c | django/publicmapping/redistricting/management/commands/numusers.py | python | Command.handle | (self, *args, **options) | Print the number of active users | Print the number of active users | [
"Print",
"the",
"number",
"of",
"active",
"users"
] | def handle(self, *args, **options):
"""
Print the number of active users
"""
minutes = int(options.get('minutes'))
users = 0
for session in Session.objects.all():
decoded = session.get_decoded()
if 'activity_time' in decoded and (
decoded['activity_time'] -
timedelta(0, 0, 0, 0, settings.SESSION_TIMEOUT)) > (
datetime.now() - timedelta(0, 0, 0, 0, minutes)):
users += 1
self.stdout.write(
'Number of active users over the last %d minute(s): %d\n' %
(minutes, users)) | [
"def",
"handle",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"minutes",
"=",
"int",
"(",
"options",
".",
"get",
"(",
"'minutes'",
")",
")",
"users",
"=",
"0",
"for",
"session",
"in",
"Session",
".",
"objects",
".",
"all",
"(",
")",
":",
"decoded",
"=",
"session",
".",
"get_decoded",
"(",
")",
"if",
"'activity_time'",
"in",
"decoded",
"and",
"(",
"decoded",
"[",
"'activity_time'",
"]",
"-",
"timedelta",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"settings",
".",
"SESSION_TIMEOUT",
")",
")",
">",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"timedelta",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"minutes",
")",
")",
":",
"users",
"+=",
"1",
"self",
".",
"stdout",
".",
"write",
"(",
"'Number of active users over the last %d minute(s): %d\\n'",
"%",
"(",
"minutes",
",",
"users",
")",
")"
] | https://github.com/PublicMapping/districtbuilder-classic/blob/6e4b9d644043082eb0499f5aa77e777fff73a67c/django/publicmapping/redistricting/management/commands/numusers.py#L48-L64 |
||
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | tools/gyp/pylib/gyp/MSVSVersion.py | python | VisualStudioVersion.SetupScript | (self, target_arch) | Returns a command (with arguments) to be used to set up the
environment. | Returns a command (with arguments) to be used to set up the
environment. | [
"Returns",
"a",
"command",
"(",
"with",
"arguments",
")",
"to",
"be",
"used",
"to",
"set",
"up",
"the",
"environment",
"."
] | def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name == '2013' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 non-Express has a x64-x86 cross that we want to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg] | [
"def",
"SetupScript",
"(",
"self",
",",
"target_arch",
")",
":",
"# Check if we are running in the SDK command line environment and use",
"# the setup script from the SDK if so. |target_arch| should be either",
"# 'x86' or 'x64'.",
"assert",
"target_arch",
"in",
"(",
"'x86'",
",",
"'x64'",
")",
"sdk_dir",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'WindowsSDKDir'",
")",
"if",
"self",
".",
"sdk_based",
"and",
"sdk_dir",
":",
"return",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"sdk_dir",
",",
"'Bin/SetEnv.Cmd'",
")",
")",
",",
"'/'",
"+",
"target_arch",
"]",
"else",
":",
"# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls",
"# vcvars32, which it can only find if VS??COMNTOOLS is set, which it",
"# isn't always.",
"if",
"target_arch",
"==",
"'x86'",
":",
"if",
"self",
".",
"short_name",
"==",
"'2013'",
"and",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITECTURE'",
")",
"==",
"'AMD64'",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITEW6432'",
")",
"==",
"'AMD64'",
")",
":",
"# VS2013 non-Express has a x64-x86 cross that we want to prefer.",
"return",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"'VC/vcvarsall.bat'",
")",
")",
",",
"'amd64_x86'",
"]",
"# Otherwise, the standard x86 compiler.",
"return",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"'Common7/Tools/vsvars32.bat'",
")",
")",
"]",
"else",
":",
"assert",
"target_arch",
"==",
"'x64'",
"arg",
"=",
"'x86_amd64'",
"# Use the 64-on-64 compiler if we're not using an express",
"# edition and we're running on a 64bit OS.",
"if",
"self",
".",
"short_name",
"[",
"-",
"1",
"]",
"!=",
"'e'",
"and",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITECTURE'",
")",
"==",
"'AMD64'",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITEW6432'",
")",
"==",
"'AMD64'",
")",
":",
"arg",
"=",
"'amd64'",
"return",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"'VC/vcvarsall.bat'",
")",
")",
",",
"arg",
"]"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/tools/gyp/pylib/gyp/MSVSVersion.py#L71-L106 |
||
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | odoo/api.py | python | Cache.get | (self, record, field, default=NOTHING) | Return the value of ``field`` for ``record``. | Return the value of ``field`` for ``record``. | [
"Return",
"the",
"value",
"of",
"field",
"for",
"record",
"."
] | def get(self, record, field, default=NOTHING):
""" Return the value of ``field`` for ``record``. """
try:
field_cache = self._get_field_cache(record, field)
return field_cache[record._ids[0]]
except KeyError:
if default is NOTHING:
raise CacheMiss(record, field)
return default | [
"def",
"get",
"(",
"self",
",",
"record",
",",
"field",
",",
"default",
"=",
"NOTHING",
")",
":",
"try",
":",
"field_cache",
"=",
"self",
".",
"_get_field_cache",
"(",
"record",
",",
"field",
")",
"return",
"field_cache",
"[",
"record",
".",
"_ids",
"[",
"0",
"]",
"]",
"except",
"KeyError",
":",
"if",
"default",
"is",
"NOTHING",
":",
"raise",
"CacheMiss",
"(",
"record",
",",
"field",
")",
"return",
"default"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/odoo/api.py#L882-L890 |
||
ayojs/ayo | 45a1c8cf6384f5bcc81d834343c3ed9d78b97df3 | deps/v8/tools/gyp_flag_compare.py | python | NormalizeSymbolArguments | (command_line) | Normalize -g arguments.
If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g.
Modifies |command_line| in place. | Normalize -g arguments. | [
"Normalize",
"-",
"g",
"arguments",
"."
] | def NormalizeSymbolArguments(command_line):
"""Normalize -g arguments.
If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g.
Modifies |command_line| in place.
"""
# Strip -g0 if there's no symbols.
have_some_symbols = False
for x in command_line:
if x.startswith('-g') and x != '-g0':
have_some_symbols = True
if not have_some_symbols and '-g0' in command_line:
command_line.remove('-g0')
# Rename -g2 to -g.
if '-g2' in command_line:
command_line[command_line.index('-g2')] = '-g' | [
"def",
"NormalizeSymbolArguments",
"(",
"command_line",
")",
":",
"# Strip -g0 if there's no symbols.",
"have_some_symbols",
"=",
"False",
"for",
"x",
"in",
"command_line",
":",
"if",
"x",
".",
"startswith",
"(",
"'-g'",
")",
"and",
"x",
"!=",
"'-g0'",
":",
"have_some_symbols",
"=",
"True",
"if",
"not",
"have_some_symbols",
"and",
"'-g0'",
"in",
"command_line",
":",
"command_line",
".",
"remove",
"(",
"'-g0'",
")",
"# Rename -g2 to -g.",
"if",
"'-g2'",
"in",
"command_line",
":",
"command_line",
"[",
"command_line",
".",
"index",
"(",
"'-g2'",
")",
"]",
"=",
"'-g'"
] | https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/v8/tools/gyp_flag_compare.py#L55-L71 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/bootstrap/erp5_core/DocumentTemplateItem/portal_components/document.erp5.Movement.py | python | Movement.getRootCausalityValueList | (self) | return self.getExplanationValue().getRootCausalityValueList() | Returns the initial causality value for this movement.
This method will look at the causality and check if the
causality has already a causality | Returns the initial causality value for this movement.
This method will look at the causality and check if the
causality has already a causality | [
"Returns",
"the",
"initial",
"causality",
"value",
"for",
"this",
"movement",
".",
"This",
"method",
"will",
"look",
"at",
"the",
"causality",
"and",
"check",
"if",
"the",
"causality",
"has",
"already",
"a",
"causality"
] | def getRootCausalityValueList(self):
"""
Returns the initial causality value for this movement.
This method will look at the causality and check if the
causality has already a causality
"""
return self.getExplanationValue().getRootCausalityValueList() | [
"def",
"getRootCausalityValueList",
"(",
"self",
")",
":",
"return",
"self",
".",
"getExplanationValue",
"(",
")",
".",
"getRootCausalityValueList",
"(",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/bootstrap/erp5_core/DocumentTemplateItem/portal_components/document.erp5.Movement.py#L620-L626 |
|
prometheus-ar/vot.ar | 72d8fa1ea08fe417b64340b98dff68df8364afdf | msa/core/clases.py | python | Apertura.a_qr_str | (self) | return ";".join(datos) | Devuelve la informacion del recuento para almacenar en qr. | Devuelve la informacion del recuento para almacenar en qr. | [
"Devuelve",
"la",
"informacion",
"del",
"recuento",
"para",
"almacenar",
"en",
"qr",
"."
] | def a_qr_str(self):
"""Devuelve la informacion del recuento para almacenar en qr."""
# tipo de qr
# cod_mesa
#encoded_data = string_to_array(self.a_tag())
#datos = [int(TOKEN, 16), len(encoded_data) * 2]
#datos.extend(encoded_data)
#todo = "A" + array_to_printable_string(datos)
datos = [self.mesa.numero,
"%s:%s" % (self.hora["horas"], self.hora["minutos"])]
for autoridad in self.autoridades:
dato = ",".join((autoridad.apellido, autoridad.nombre,
str(autoridad.nro_documento)))
datos.append(dato)
return ";".join(datos) | [
"def",
"a_qr_str",
"(",
"self",
")",
":",
"# tipo de qr",
"# cod_mesa",
"#encoded_data = string_to_array(self.a_tag())",
"#datos = [int(TOKEN, 16), len(encoded_data) * 2]",
"#datos.extend(encoded_data)",
"#todo = \"A\" + array_to_printable_string(datos)",
"datos",
"=",
"[",
"self",
".",
"mesa",
".",
"numero",
",",
"\"%s:%s\"",
"%",
"(",
"self",
".",
"hora",
"[",
"\"horas\"",
"]",
",",
"self",
".",
"hora",
"[",
"\"minutos\"",
"]",
")",
"]",
"for",
"autoridad",
"in",
"self",
".",
"autoridades",
":",
"dato",
"=",
"\",\"",
".",
"join",
"(",
"(",
"autoridad",
".",
"apellido",
",",
"autoridad",
".",
"nombre",
",",
"str",
"(",
"autoridad",
".",
"nro_documento",
")",
")",
")",
"datos",
".",
"append",
"(",
"dato",
")",
"return",
"\";\"",
".",
"join",
"(",
"datos",
")"
] | https://github.com/prometheus-ar/vot.ar/blob/72d8fa1ea08fe417b64340b98dff68df8364afdf/msa/core/clases.py#L189-L203 |
|
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_internal/wheel.py | python | rehash | (path, blocksize=1 << 20) | return (digest, str(length)) | Return (encoded_digest, length) for path using hashlib.sha256() | Return (encoded_digest, length) for path using hashlib.sha256() | [
"Return",
"(",
"encoded_digest",
"length",
")",
"for",
"path",
"using",
"hashlib",
".",
"sha256",
"()"
] | def rehash(path, blocksize=1 << 20):
# type: (str, int) -> Tuple[str, str]
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
h, length = hash_file(path, blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
# unicode/str python2 issues
return (digest, str(length)) | [
"def",
"rehash",
"(",
"path",
",",
"blocksize",
"=",
"1",
"<<",
"20",
")",
":",
"# type: (str, int) -> Tuple[str, str]",
"h",
",",
"length",
"=",
"hash_file",
"(",
"path",
",",
"blocksize",
")",
"digest",
"=",
"'sha256='",
"+",
"urlsafe_b64encode",
"(",
"h",
".",
"digest",
"(",
")",
")",
".",
"decode",
"(",
"'latin1'",
")",
".",
"rstrip",
"(",
"'='",
")",
"# unicode/str python2 issues",
"return",
"(",
"digest",
",",
"str",
"(",
"length",
")",
")"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_internal/wheel.py#L82-L90 |
|
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | deps/v8/third_party/jinja2/debug.py | python | ProcessedTraceback.render_as_text | (self, limit=None) | return ''.join(lines).rstrip() | Return a string with the traceback. | Return a string with the traceback. | [
"Return",
"a",
"string",
"with",
"the",
"traceback",
"."
] | def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip() | [
"def",
"render_as_text",
"(",
"self",
",",
"limit",
"=",
"None",
")",
":",
"lines",
"=",
"traceback",
".",
"format_exception",
"(",
"self",
".",
"exc_type",
",",
"self",
".",
"exc_value",
",",
"self",
".",
"frames",
"[",
"0",
"]",
",",
"limit",
"=",
"limit",
")",
"return",
"''",
".",
"join",
"(",
"lines",
")",
".",
"rstrip",
"(",
")"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/v8/third_party/jinja2/debug.py#L97-L101 |
|
stdlib-js/stdlib | e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df | lib/node_modules/@stdlib/math/base/special/gammainc/benchmark/python/scipy/benchmark.py | python | benchmark | () | Run the benchmark and print benchmark results. | Run the benchmark and print benchmark results. | [
"Run",
"the",
"benchmark",
"and",
"print",
"benchmark",
"results",
"."
] | def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from scipy.special import gammainc; from random import random;"
stmt = "y = gammainc(1000.0*random(), 1000.0*random() + 0.1)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in range(REPEATS):
print("# python::scipy::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS) | [
"def",
"benchmark",
"(",
")",
":",
"setup",
"=",
"\"from scipy.special import gammainc; from random import random;\"",
"stmt",
"=",
"\"y = gammainc(1000.0*random(), 1000.0*random() + 0.1)\"",
"t",
"=",
"timeit",
".",
"Timer",
"(",
"stmt",
",",
"setup",
"=",
"setup",
")",
"print_version",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"REPEATS",
")",
":",
"print",
"(",
"\"# python::scipy::\"",
"+",
"NAME",
")",
"elapsed",
"=",
"t",
".",
"timeit",
"(",
"number",
"=",
"ITERATIONS",
")",
"print_results",
"(",
"elapsed",
")",
"print",
"(",
"\"ok \"",
"+",
"str",
"(",
"i",
"+",
"1",
")",
"+",
"\" benchmark finished\"",
")",
"print_summary",
"(",
"REPEATS",
",",
"REPEATS",
")"
] | https://github.com/stdlib-js/stdlib/blob/e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df/lib/node_modules/@stdlib/math/base/special/gammainc/benchmark/python/scipy/benchmark.py#L73-L88 |
||
OWASP/SecureTea-Project | ae55082d4a342f10099db4dead23267a517e1a66 | securetea/lib/notifs/secureTeaTwitter.py | python | SecureTeaTwitter.getuserid | (self) | return response['id'] | Return the user id.
Args:
None
Raises:
None
Returns:
response_id (int): User id | Return the user id. | [
"Return",
"the",
"user",
"id",
"."
] | def getuserid(self):
"""
Return the user id.
Args:
None
Raises:
None
Returns:
response_id (int): User id
"""
endpoint = "/account/verify_credentials.json"
response = requests.get(self.baseUrl + endpoint, auth=self.auth)
response = response.json()
return response['id'] | [
"def",
"getuserid",
"(",
"self",
")",
":",
"endpoint",
"=",
"\"/account/verify_credentials.json\"",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"baseUrl",
"+",
"endpoint",
",",
"auth",
"=",
"self",
".",
"auth",
")",
"response",
"=",
"response",
".",
"json",
"(",
")",
"return",
"response",
"[",
"'id'",
"]"
] | https://github.com/OWASP/SecureTea-Project/blob/ae55082d4a342f10099db4dead23267a517e1a66/securetea/lib/notifs/secureTeaTwitter.py#L56-L72 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py | python | WildcardPattern._bare_name_matches | (self, nodes) | return count, r | Special optimized matcher for bare_name. | Special optimized matcher for bare_name. | [
"Special",
"optimized",
"matcher",
"for",
"bare_name",
"."
] | def _bare_name_matches(self, nodes):
"""Special optimized matcher for bare_name."""
count = 0
r = {}
done = False
max = len(nodes)
while not done and count < max:
done = True
for leaf in self.content:
if leaf[0].match(nodes[count], r):
count += 1
done = False
break
r[self.name] = nodes[:count]
return count, r | [
"def",
"_bare_name_matches",
"(",
"self",
",",
"nodes",
")",
":",
"count",
"=",
"0",
"r",
"=",
"{",
"}",
"done",
"=",
"False",
"max",
"=",
"len",
"(",
"nodes",
")",
"while",
"not",
"done",
"and",
"count",
"<",
"max",
":",
"done",
"=",
"True",
"for",
"leaf",
"in",
"self",
".",
"content",
":",
"if",
"leaf",
"[",
"0",
"]",
".",
"match",
"(",
"nodes",
"[",
"count",
"]",
",",
"r",
")",
":",
"count",
"+=",
"1",
"done",
"=",
"False",
"break",
"r",
"[",
"self",
".",
"name",
"]",
"=",
"nodes",
"[",
":",
"count",
"]",
"return",
"count",
",",
"r"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py#L796-L810 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/v8/third_party/jinja2/filters.py | python | do_last | (environment, seq) | Return the last item of a sequence. | Return the last item of a sequence. | [
"Return",
"the",
"last",
"item",
"of",
"a",
"sequence",
"."
] | def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.') | [
"def",
"do_last",
"(",
"environment",
",",
"seq",
")",
":",
"try",
":",
"return",
"next",
"(",
"iter",
"(",
"reversed",
"(",
"seq",
")",
")",
")",
"except",
"StopIteration",
":",
"return",
"environment",
".",
"undefined",
"(",
"'No last item, sequence was empty.'",
")"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/v8/third_party/jinja2/filters.py#L355-L360 |
||
wotermelon/toJump | 3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f | lib/mac/systrace/catapult/third_party/pyserial/serial/serialcli.py | python | IronSerial.close | (self) | Close port | Close port | [
"Close",
"port"
] | def close(self):
"""Close port"""
if self._isOpen:
if self._port_handle:
try:
self._port_handle.Close()
except System.IO.Ports.InvalidOperationException:
# ignore errors. can happen for unplugged USB serial devices
pass
self._port_handle = None
self._isOpen = False | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"_isOpen",
":",
"if",
"self",
".",
"_port_handle",
":",
"try",
":",
"self",
".",
"_port_handle",
".",
"Close",
"(",
")",
"except",
"System",
".",
"IO",
".",
"Ports",
".",
"InvalidOperationException",
":",
"# ignore errors. can happen for unplugged USB serial devices",
"pass",
"self",
".",
"_port_handle",
"=",
"None",
"self",
".",
"_isOpen",
"=",
"False"
] | https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/third_party/pyserial/serial/serialcli.py#L127-L137 |
||
lukevink/hass-config-lajv | cc435372da788fdbeb28c370fe10d6b4090d5244 | custom_components/hacs/helpers/classes/repository.py | python | HacsRepository.download_zip_files | (self, validate) | return validate | Download ZIP archive from repository release. | Download ZIP archive from repository release. | [
"Download",
"ZIP",
"archive",
"from",
"repository",
"release",
"."
] | async def download_zip_files(self, validate):
"""Download ZIP archive from repository release."""
download_queue = QueueManager()
try:
contents = False
for release in self.releases.objects:
self.logger.info(
"%s ref: %s --- tag: %s.", self, self.ref, release.tag_name
)
if release.tag_name == self.ref.split("/")[1]:
contents = release.assets
if not contents:
return validate
for content in contents or []:
download_queue.add(self.async_download_zip_file(content, validate))
await download_queue.execute()
except (Exception, BaseException):
validate.errors.append("Download was not completed")
return validate | [
"async",
"def",
"download_zip_files",
"(",
"self",
",",
"validate",
")",
":",
"download_queue",
"=",
"QueueManager",
"(",
")",
"try",
":",
"contents",
"=",
"False",
"for",
"release",
"in",
"self",
".",
"releases",
".",
"objects",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"%s ref: %s --- tag: %s.\"",
",",
"self",
",",
"self",
".",
"ref",
",",
"release",
".",
"tag_name",
")",
"if",
"release",
".",
"tag_name",
"==",
"self",
".",
"ref",
".",
"split",
"(",
"\"/\"",
")",
"[",
"1",
"]",
":",
"contents",
"=",
"release",
".",
"assets",
"if",
"not",
"contents",
":",
"return",
"validate",
"for",
"content",
"in",
"contents",
"or",
"[",
"]",
":",
"download_queue",
".",
"add",
"(",
"self",
".",
"async_download_zip_file",
"(",
"content",
",",
"validate",
")",
")",
"await",
"download_queue",
".",
"execute",
"(",
")",
"except",
"(",
"Exception",
",",
"BaseException",
")",
":",
"validate",
".",
"errors",
".",
"append",
"(",
"\"Download was not completed\"",
")",
"return",
"validate"
] | https://github.com/lukevink/hass-config-lajv/blob/cc435372da788fdbeb28c370fe10d6b4090d5244/custom_components/hacs/helpers/classes/repository.py#L287-L310 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/inspect.py | python | getmoduleinfo | (path) | Get the module name, suffix, mode, and module type for a given file. | Get the module name, suffix, mode, and module type for a given file. | [
"Get",
"the",
"module",
"name",
"suffix",
"mode",
"and",
"module",
"type",
"for",
"a",
"given",
"file",
"."
] | def getmoduleinfo(path):
"""Get the module name, suffix, mode, and module type for a given file."""
filename = os.path.basename(path)
suffixes = map(lambda info:
(-len(info[0]), info[0], info[1], info[2]),
imp.get_suffixes())
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix, mode, mtype in suffixes:
if filename[neglen:] == suffix:
return ModuleInfo(filename[:neglen], suffix, mode, mtype) | [
"def",
"getmoduleinfo",
"(",
"path",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
"suffixes",
"=",
"map",
"(",
"lambda",
"info",
":",
"(",
"-",
"len",
"(",
"info",
"[",
"0",
"]",
")",
",",
"info",
"[",
"0",
"]",
",",
"info",
"[",
"1",
"]",
",",
"info",
"[",
"2",
"]",
")",
",",
"imp",
".",
"get_suffixes",
"(",
")",
")",
"suffixes",
".",
"sort",
"(",
")",
"# try longest suffixes first, in case they overlap",
"for",
"neglen",
",",
"suffix",
",",
"mode",
",",
"mtype",
"in",
"suffixes",
":",
"if",
"filename",
"[",
"neglen",
":",
"]",
"==",
"suffix",
":",
"return",
"ModuleInfo",
"(",
"filename",
"[",
":",
"neglen",
"]",
",",
"suffix",
",",
"mode",
",",
"mtype",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/inspect.py#L426-L435 |
||
KhronosGroup/Vulkan-Docs | ee155139142a2a71b56238419bf0a6859f7b0a93 | scripts/spec_tools/macro_checker.py | python | MacroChecker.processFile | (self, filename) | Parse an .adoc file belonging to the spec and check it for errors. | Parse an .adoc file belonging to the spec and check it for errors. | [
"Parse",
"an",
".",
"adoc",
"file",
"belonging",
"to",
"the",
"spec",
"and",
"check",
"it",
"for",
"errors",
"."
] | def processFile(self, filename):
"""Parse an .adoc file belonging to the spec and check it for errors."""
class FileStreamMaker(object):
def __init__(self, filename):
self.filename = filename
def make_stream(self):
return open(self.filename, 'r', encoding='utf-8')
f = self.macro_checker_file_type(self, filename, self.enabled_messages,
FileStreamMaker(filename))
f.process()
self.files.append(f) | [
"def",
"processFile",
"(",
"self",
",",
"filename",
")",
":",
"class",
"FileStreamMaker",
"(",
"object",
")",
":",
"def",
"__init__",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"filename",
"=",
"filename",
"def",
"make_stream",
"(",
"self",
")",
":",
"return",
"open",
"(",
"self",
".",
"filename",
",",
"'r'",
",",
"encoding",
"=",
"'utf-8'",
")",
"f",
"=",
"self",
".",
"macro_checker_file_type",
"(",
"self",
",",
"filename",
",",
"self",
".",
"enabled_messages",
",",
"FileStreamMaker",
"(",
"filename",
")",
")",
"f",
".",
"process",
"(",
")",
"self",
".",
"files",
".",
"append",
"(",
"f",
")"
] | https://github.com/KhronosGroup/Vulkan-Docs/blob/ee155139142a2a71b56238419bf0a6859f7b0a93/scripts/spec_tools/macro_checker.py#L141-L153 |
||
CaliOpen/Caliopen | 5361ebc5d4d2c525a87f737468b8a8e2aefbe3e8 | src/backend/components/py.pi/caliopen_pi/qualifiers/twitter.py | python | UserTwitterQualifier.lookup_discussion_sequence | (self, message, *args, **kwargs) | return seq, seq[0][1] | Return list of lookup type, value from a tweet. | Return list of lookup type, value from a tweet. | [
"Return",
"list",
"of",
"lookup",
"type",
"value",
"from",
"a",
"tweet",
"."
] | def lookup_discussion_sequence(self, message, *args, **kwargs):
"""Return list of lookup type, value from a tweet."""
seq = list()
participants = message.hash_participants
seq.append(('hash', participants))
return seq, seq[0][1] | [
"def",
"lookup_discussion_sequence",
"(",
"self",
",",
"message",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"seq",
"=",
"list",
"(",
")",
"participants",
"=",
"message",
".",
"hash_participants",
"seq",
".",
"append",
"(",
"(",
"'hash'",
",",
"participants",
")",
")",
"return",
"seq",
",",
"seq",
"[",
"0",
"]",
"[",
"1",
"]"
] | https://github.com/CaliOpen/Caliopen/blob/5361ebc5d4d2c525a87f737468b8a8e2aefbe3e8/src/backend/components/py.pi/caliopen_pi/qualifiers/twitter.py#L25-L32 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/reloop-closured/lib/python2.7/mhlib.py | python | Folder.getsequencesfilename | (self) | return os.path.join(self.getfullname(), MH_SEQUENCES) | Return the full pathname of the folder's sequences file. | Return the full pathname of the folder's sequences file. | [
"Return",
"the",
"full",
"pathname",
"of",
"the",
"folder",
"s",
"sequences",
"file",
"."
] | def getsequencesfilename(self):
"""Return the full pathname of the folder's sequences file."""
return os.path.join(self.getfullname(), MH_SEQUENCES) | [
"def",
"getsequencesfilename",
"(",
"self",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"getfullname",
"(",
")",
",",
"MH_SEQUENCES",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/mhlib.py#L264-L266 |
|
pinterest/pinball | c54a206cf6e3dbadb056c189f741d75828c02f98 | pinball/master/thrift_lib/TokenMasterService.py | python | Iface.modify | (self, request) | Parameters:
- request | Parameters:
- request | [
"Parameters",
":",
"-",
"request"
] | def modify(self, request):
"""
Parameters:
- request
"""
pass | [
"def",
"modify",
"(",
"self",
",",
"request",
")",
":",
"pass"
] | https://github.com/pinterest/pinball/blob/c54a206cf6e3dbadb056c189f741d75828c02f98/pinball/master/thrift_lib/TokenMasterService.py#L35-L40 |
||
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/core/views.py | python | source_access | (request) | Modify a user's profile. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse` | Modify a user's profile. Should be an AJAX POST. | [
"Modify",
"a",
"user",
"s",
"profile",
".",
"Should",
"be",
"an",
"AJAX",
"POST",
"."
] | def source_access(request):
"""
Modify a user's profile. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
form = SourceAccessForm(request.POST)
if form.is_valid():
data = form.cleaned_data
result = modify_source_access(request.user.username,
data)
if result['success']:
message = '<div>User modified successfully!</div>'
result['message'] = message
return HttpResponse(json.dumps(result),
content_type="application/json")
else:
return HttpResponse(json.dumps({'form':form.as_table()}),
content_type="application/json")
else:
error = "Expected AJAX POST!"
return render(request, "error.html", {"error" : error }) | [
"def",
"source_access",
"(",
"request",
")",
":",
"if",
"request",
".",
"method",
"==",
"'POST'",
"and",
"request",
".",
"is_ajax",
"(",
")",
":",
"form",
"=",
"SourceAccessForm",
"(",
"request",
".",
"POST",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"data",
"=",
"form",
".",
"cleaned_data",
"result",
"=",
"modify_source_access",
"(",
"request",
".",
"user",
".",
"username",
",",
"data",
")",
"if",
"result",
"[",
"'success'",
"]",
":",
"message",
"=",
"'<div>User modified successfully!</div>'",
"result",
"[",
"'message'",
"]",
"=",
"message",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"result",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"else",
":",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"{",
"'form'",
":",
"form",
".",
"as_table",
"(",
")",
"}",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"else",
":",
"error",
"=",
"\"Expected AJAX POST!\"",
"return",
"render",
"(",
"request",
",",
"\"error.html\"",
",",
"{",
"\"error\"",
":",
"error",
"}",
")"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/core/views.py#L718-L743 |
||
HaliteChallenge/Halite-II | 5cf95b4aef38621a44a503f90399af598fb51214 | apiserver/apiserver/web/util.py | python | parse_filter | (filter_string) | return field, operation, value | Parse a filter string into a field name, comparator, and value.
:param filter_string: Of the format field,operator,value.
:return: (field_name, operator_func, value) | Parse a filter string into a field name, comparator, and value.
:param filter_string: Of the format field,operator,value.
:return: (field_name, operator_func, value) | [
"Parse",
"a",
"filter",
"string",
"into",
"a",
"field",
"name",
"comparator",
"and",
"value",
".",
":",
"param",
"filter_string",
":",
"Of",
"the",
"format",
"field",
"operator",
"value",
".",
":",
"return",
":",
"(",
"field_name",
"operator_func",
"value",
")"
] | def parse_filter(filter_string):
"""
Parse a filter string into a field name, comparator, and value.
:param filter_string: Of the format field,operator,value.
:return: (field_name, operator_func, value)
"""
try:
field, cmp, value = filter_string.split(",", 2)
except Exception as e:
raise util.APIError(
400,
message="Filter '{}' is ill-formed.".format(filter_string))
operation = {
"=": operator.eq,
"<": operator.lt,
"<=": operator.le,
">": operator.gt,
">=": operator.ge,
"!=": operator.ne,
"contains": operator_like,
}.get(cmp, None)
if operation is None:
raise util.APIError(
400, message="Cannot compare '{}' by '{}'".format(field, cmp))
return field, operation, value | [
"def",
"parse_filter",
"(",
"filter_string",
")",
":",
"try",
":",
"field",
",",
"cmp",
",",
"value",
"=",
"filter_string",
".",
"split",
"(",
"\",\"",
",",
"2",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"util",
".",
"APIError",
"(",
"400",
",",
"message",
"=",
"\"Filter '{}' is ill-formed.\"",
".",
"format",
"(",
"filter_string",
")",
")",
"operation",
"=",
"{",
"\"=\"",
":",
"operator",
".",
"eq",
",",
"\"<\"",
":",
"operator",
".",
"lt",
",",
"\"<=\"",
":",
"operator",
".",
"le",
",",
"\">\"",
":",
"operator",
".",
"gt",
",",
"\">=\"",
":",
"operator",
".",
"ge",
",",
"\"!=\"",
":",
"operator",
".",
"ne",
",",
"\"contains\"",
":",
"operator_like",
",",
"}",
".",
"get",
"(",
"cmp",
",",
"None",
")",
"if",
"operation",
"is",
"None",
":",
"raise",
"util",
".",
"APIError",
"(",
"400",
",",
"message",
"=",
"\"Cannot compare '{}' by '{}'\"",
".",
"format",
"(",
"field",
",",
"cmp",
")",
")",
"return",
"field",
",",
"operation",
",",
"value"
] | https://github.com/HaliteChallenge/Halite-II/blob/5cf95b4aef38621a44a503f90399af598fb51214/apiserver/apiserver/web/util.py#L166-L192 |
|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/burp-git-bridge/burp_git_bridge.py | python | GitLog.remove | (self, entry) | Removes the given LogEntry from the underlying git repo. | Removes the given LogEntry from the underlying git repo. | [
"Removes",
"the",
"given",
"LogEntry",
"from",
"the",
"underlying",
"git",
"repo",
"."
] | def remove(self, entry):
'''
Removes the given LogEntry from the underlying git repo.
'''
entry_path = os.path.join(self.repo_path, entry.md5)
subprocess.check_output(["git", "rm", "-rf", entry_path],
cwd=self.repo_path)
subprocess.check_call(["git", "commit", "-m", "Removed entry at %s" %
entry_path], cwd=self.repo_path) | [
"def",
"remove",
"(",
"self",
",",
"entry",
")",
":",
"entry_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"repo_path",
",",
"entry",
".",
"md5",
")",
"subprocess",
".",
"check_output",
"(",
"[",
"\"git\"",
",",
"\"rm\"",
",",
"\"-rf\"",
",",
"entry_path",
"]",
",",
"cwd",
"=",
"self",
".",
"repo_path",
")",
"subprocess",
".",
"check_call",
"(",
"[",
"\"git\"",
",",
"\"commit\"",
",",
"\"-m\"",
",",
"\"Removed entry at %s\"",
"%",
"entry_path",
"]",
",",
"cwd",
"=",
"self",
".",
"repo_path",
")"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/burp-git-bridge/burp_git_bridge.py#L480-L488 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py | python | Leaf.__unicode__ | (self) | return self.prefix + unicode(self.value) | Return a pretty string representation.
This reproduces the input source exactly. | Return a pretty string representation. | [
"Return",
"a",
"pretty",
"string",
"representation",
"."
] | def __unicode__(self):
"""
Return a pretty string representation.
This reproduces the input source exactly.
"""
return self.prefix + unicode(self.value) | [
"def",
"__unicode__",
"(",
"self",
")",
":",
"return",
"self",
".",
"prefix",
"+",
"unicode",
"(",
"self",
".",
"value",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py#L385-L391 |
|
googleglass/mirror-quickstart-python | e34077bae91657170c305702471f5c249eb1b686 | main_handler.py | python | MainHandler.get | (self) | Render the main page. | Render the main page. | [
"Render",
"the",
"main",
"page",
"."
] | def get(self):
"""Render the main page."""
# Get the flash message and delete it.
message = memcache.get(key=self.userid)
memcache.delete(key=self.userid)
self._render_template(message) | [
"def",
"get",
"(",
"self",
")",
":",
"# Get the flash message and delete it.",
"message",
"=",
"memcache",
".",
"get",
"(",
"key",
"=",
"self",
".",
"userid",
")",
"memcache",
".",
"delete",
"(",
"key",
"=",
"self",
".",
"userid",
")",
"self",
".",
"_render_template",
"(",
"message",
")"
] | https://github.com/googleglass/mirror-quickstart-python/blob/e34077bae91657170c305702471f5c249eb1b686/main_handler.py#L110-L115 |
||
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py | python | TarInfo._proc_gnusparse_10 | (self, next, pax_headers, tarfile) | Process a GNU tar extended sparse header, version 1.0. | Process a GNU tar extended sparse header, version 1.0. | [
"Process",
"a",
"GNU",
"tar",
"extended",
"sparse",
"header",
"version",
"1",
".",
"0",
"."
] | def _proc_gnusparse_10(self, next, pax_headers, tarfile):
"""Process a GNU tar extended sparse header, version 1.0.
"""
fields = None
sparse = []
buf = tarfile.fileobj.read(BLOCKSIZE)
fields, buf = buf.split(b"\n", 1)
fields = int(fields)
while len(sparse) < fields * 2:
if b"\n" not in buf:
buf += tarfile.fileobj.read(BLOCKSIZE)
number, buf = buf.split(b"\n", 1)
sparse.append(int(number))
next.offset_data = tarfile.fileobj.tell()
next.sparse = list(zip(sparse[::2], sparse[1::2])) | [
"def",
"_proc_gnusparse_10",
"(",
"self",
",",
"next",
",",
"pax_headers",
",",
"tarfile",
")",
":",
"fields",
"=",
"None",
"sparse",
"=",
"[",
"]",
"buf",
"=",
"tarfile",
".",
"fileobj",
".",
"read",
"(",
"BLOCKSIZE",
")",
"fields",
",",
"buf",
"=",
"buf",
".",
"split",
"(",
"b\"\\n\"",
",",
"1",
")",
"fields",
"=",
"int",
"(",
"fields",
")",
"while",
"len",
"(",
"sparse",
")",
"<",
"fields",
"*",
"2",
":",
"if",
"b\"\\n\"",
"not",
"in",
"buf",
":",
"buf",
"+=",
"tarfile",
".",
"fileobj",
".",
"read",
"(",
"BLOCKSIZE",
")",
"number",
",",
"buf",
"=",
"buf",
".",
"split",
"(",
"b\"\\n\"",
",",
"1",
")",
"sparse",
".",
"append",
"(",
"int",
"(",
"number",
")",
")",
"next",
".",
"offset_data",
"=",
"tarfile",
".",
"fileobj",
".",
"tell",
"(",
")",
"next",
".",
"sparse",
"=",
"list",
"(",
"zip",
"(",
"sparse",
"[",
":",
":",
"2",
"]",
",",
"sparse",
"[",
"1",
":",
":",
"2",
"]",
")",
")"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L1502-L1516 |
||
sveith/jinfinote | 296dca4e6f765b2098cc61be50acaaa8df0a9c5a | demo/websocket.py | python | WebSocketRequest.renderWebSocket | (self) | Render a WebSocket request.
If the request is not identified with a proper WebSocket handshake, the
connection will be closed. Otherwise, the response to the handshake is
sent and a C{WebSocketHandler} is created to handle the request. | Render a WebSocket request. | [
"Render",
"a",
"WebSocket",
"request",
"."
] | def renderWebSocket(self):
"""
Render a WebSocket request.
If the request is not identified with a proper WebSocket handshake, the
connection will be closed. Otherwise, the response to the handshake is
sent and a C{WebSocketHandler} is created to handle the request.
"""
# check for post-75 handshake requests
isSecHandshake = self.requestHeaders.getRawHeaders("Sec-WebSocket-Key1", [])
if isSecHandshake:
self._clientHandshake76()
else:
check = self._checkClientHandshake()
if check is None:
return
originHeader, hostHeader, protocolHeader, handler = check
self.startedWriting = True
handshake = [
"HTTP/1.1 101 Web Socket Protocol Handshake",
"Upgrade: WebSocket",
"Connection: Upgrade"]
handshake.append("WebSocket-Origin: %s" % (originHeader))
if self.isSecure():
scheme = "wss"
else:
scheme = "ws"
handshake.append(
"WebSocket-Location: %s://%s%s" % (
scheme, hostHeader, self.uri))
if protocolHeader is not None:
handshake.append("WebSocket-Protocol: %s" % protocolHeader)
for header in handshake:
self.write("%s\r\n" % header)
self.write("\r\n")
self.channel.setRawMode()
# XXX we probably don't want to set _transferDecoder
self.channel._transferDecoder = WebSocketFrameDecoder(
self, handler)
handler.transport._connectionMade()
return | [
"def",
"renderWebSocket",
"(",
"self",
")",
":",
"# check for post-75 handshake requests",
"isSecHandshake",
"=",
"self",
".",
"requestHeaders",
".",
"getRawHeaders",
"(",
"\"Sec-WebSocket-Key1\"",
",",
"[",
"]",
")",
"if",
"isSecHandshake",
":",
"self",
".",
"_clientHandshake76",
"(",
")",
"else",
":",
"check",
"=",
"self",
".",
"_checkClientHandshake",
"(",
")",
"if",
"check",
"is",
"None",
":",
"return",
"originHeader",
",",
"hostHeader",
",",
"protocolHeader",
",",
"handler",
"=",
"check",
"self",
".",
"startedWriting",
"=",
"True",
"handshake",
"=",
"[",
"\"HTTP/1.1 101 Web Socket Protocol Handshake\"",
",",
"\"Upgrade: WebSocket\"",
",",
"\"Connection: Upgrade\"",
"]",
"handshake",
".",
"append",
"(",
"\"WebSocket-Origin: %s\"",
"%",
"(",
"originHeader",
")",
")",
"if",
"self",
".",
"isSecure",
"(",
")",
":",
"scheme",
"=",
"\"wss\"",
"else",
":",
"scheme",
"=",
"\"ws\"",
"handshake",
".",
"append",
"(",
"\"WebSocket-Location: %s://%s%s\"",
"%",
"(",
"scheme",
",",
"hostHeader",
",",
"self",
".",
"uri",
")",
")",
"if",
"protocolHeader",
"is",
"not",
"None",
":",
"handshake",
".",
"append",
"(",
"\"WebSocket-Protocol: %s\"",
"%",
"protocolHeader",
")",
"for",
"header",
"in",
"handshake",
":",
"self",
".",
"write",
"(",
"\"%s\\r\\n\"",
"%",
"header",
")",
"self",
".",
"write",
"(",
"\"\\r\\n\"",
")",
"self",
".",
"channel",
".",
"setRawMode",
"(",
")",
"# XXX we probably don't want to set _transferDecoder",
"self",
".",
"channel",
".",
"_transferDecoder",
"=",
"WebSocketFrameDecoder",
"(",
"self",
",",
"handler",
")",
"handler",
".",
"transport",
".",
"_connectionMade",
"(",
")",
"return"
] | https://github.com/sveith/jinfinote/blob/296dca4e6f765b2098cc61be50acaaa8df0a9c5a/demo/websocket.py#L219-L262 |
||
facebookarchive/nuclide | 2a2a0a642d136768b7d2a6d35a652dc5fb77d70a | modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/pycodestyle.py | python | whitespace_around_named_parameter_equals | (logical_line, tokens) | r"""Don't use spaces around the '=' sign in function arguments.
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
Okay: def foo(arg: int = 42):
Okay: async def foo(arg: int = 42):
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag) | r"""Don't use spaces around the '=' sign in function arguments. | [
"r",
"Don",
"t",
"use",
"spaces",
"around",
"the",
"=",
"sign",
"in",
"function",
"arguments",
"."
] | def whitespace_around_named_parameter_equals(logical_line, tokens):
r"""Don't use spaces around the '=' sign in function arguments.
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
Okay: def foo(arg: int = 42):
Okay: async def foo(arg: int = 42):
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag)
"""
parens = 0
no_space = False
prev_end = None
annotated_func_arg = False
in_def = bool(STARTSWITH_DEF_REGEX.match(logical_line))
message = "E251 unexpected spaces around keyword / parameter equals"
for token_type, text, start, end, line in tokens:
if token_type == tokenize.NL:
continue
if no_space:
no_space = False
if start != prev_end:
yield (prev_end, message)
if token_type == tokenize.OP:
if text in '([':
parens += 1
elif text in ')]':
parens -= 1
elif in_def and text == ':' and parens == 1:
annotated_func_arg = True
elif parens and text == ',' and parens == 1:
annotated_func_arg = False
elif parens and text == '=' and not annotated_func_arg:
no_space = True
if start != prev_end:
yield (prev_end, message)
if not parens:
annotated_func_arg = False
prev_end = end | [
"def",
"whitespace_around_named_parameter_equals",
"(",
"logical_line",
",",
"tokens",
")",
":",
"parens",
"=",
"0",
"no_space",
"=",
"False",
"prev_end",
"=",
"None",
"annotated_func_arg",
"=",
"False",
"in_def",
"=",
"bool",
"(",
"STARTSWITH_DEF_REGEX",
".",
"match",
"(",
"logical_line",
")",
")",
"message",
"=",
"\"E251 unexpected spaces around keyword / parameter equals\"",
"for",
"token_type",
",",
"text",
",",
"start",
",",
"end",
",",
"line",
"in",
"tokens",
":",
"if",
"token_type",
"==",
"tokenize",
".",
"NL",
":",
"continue",
"if",
"no_space",
":",
"no_space",
"=",
"False",
"if",
"start",
"!=",
"prev_end",
":",
"yield",
"(",
"prev_end",
",",
"message",
")",
"if",
"token_type",
"==",
"tokenize",
".",
"OP",
":",
"if",
"text",
"in",
"'(['",
":",
"parens",
"+=",
"1",
"elif",
"text",
"in",
"')]'",
":",
"parens",
"-=",
"1",
"elif",
"in_def",
"and",
"text",
"==",
"':'",
"and",
"parens",
"==",
"1",
":",
"annotated_func_arg",
"=",
"True",
"elif",
"parens",
"and",
"text",
"==",
"','",
"and",
"parens",
"==",
"1",
":",
"annotated_func_arg",
"=",
"False",
"elif",
"parens",
"and",
"text",
"==",
"'='",
"and",
"not",
"annotated_func_arg",
":",
"no_space",
"=",
"True",
"if",
"start",
"!=",
"prev_end",
":",
"yield",
"(",
"prev_end",
",",
"message",
")",
"if",
"not",
"parens",
":",
"annotated_func_arg",
"=",
"False",
"prev_end",
"=",
"end"
] | https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/pycodestyle.py#L809-L856 |
||
harvard-lil/perma | c54ff21b3eee931f5094a7654fdddc9ad90fc29c | services/docker/webrecorder/collection.py | python | Collection.get_warc_path | (self, name) | return self.redis.hget(self.get_warc_key(), name) | Returns the full path or URL to the WARC for the supplied recording name
:param str name: The recordings name
:return: The full path or URL to the WARC
:rtype: str | Returns the full path or URL to the WARC for the supplied recording name | [
"Returns",
"the",
"full",
"path",
"or",
"URL",
"to",
"the",
"WARC",
"for",
"the",
"supplied",
"recording",
"name"
] | def get_warc_path(self, name):
"""Returns the full path or URL to the WARC for the supplied recording name
:param str name: The recordings name
:return: The full path or URL to the WARC
:rtype: str
"""
return self.redis.hget(self.get_warc_key(), name) | [
"def",
"get_warc_path",
"(",
"self",
",",
"name",
")",
":",
"return",
"self",
".",
"redis",
".",
"hget",
"(",
"self",
".",
"get_warc_key",
"(",
")",
",",
"name",
")"
] | https://github.com/harvard-lil/perma/blob/c54ff21b3eee931f5094a7654fdddc9ad90fc29c/services/docker/webrecorder/collection.py#L414-L421 |
|
wotermelon/toJump | 3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f | lib/win/systrace/catapult/devil/devil/utils/cmd_helper.py | python | IterCmdOutputLines | (args, iter_timeout=None, timeout=None, cwd=None,
shell=False, check_status=True) | return _IterCmdOutputLines(
process, cmd, iter_timeout=iter_timeout, timeout=timeout,
check_status=check_status) | Executes a subprocess and continuously yields lines from its output.
Args:
args: List of arguments to the program, the program to execute is the first
element.
iter_timeout: Timeout for each iteration, in seconds.
timeout: Timeout for the entire command, in seconds.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
check_status: A boolean indicating whether to check the exit status of the
process after all output has been read.
Yields:
The output of the subprocess, line by line.
Raises:
CalledProcessError if check_status is True and the process exited with a
non-zero exit status. | Executes a subprocess and continuously yields lines from its output. | [
"Executes",
"a",
"subprocess",
"and",
"continuously",
"yields",
"lines",
"from",
"its",
"output",
"."
] | def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None,
shell=False, check_status=True):
"""Executes a subprocess and continuously yields lines from its output.
Args:
args: List of arguments to the program, the program to execute is the first
element.
iter_timeout: Timeout for each iteration, in seconds.
timeout: Timeout for the entire command, in seconds.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
check_status: A boolean indicating whether to check the exit status of the
process after all output has been read.
Yields:
The output of the subprocess, line by line.
Raises:
CalledProcessError if check_status is True and the process exited with a
non-zero exit status.
"""
cmd = _ValidateAndLogCommand(args, cwd, shell)
process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
return _IterCmdOutputLines(
process, cmd, iter_timeout=iter_timeout, timeout=timeout,
check_status=check_status) | [
"def",
"IterCmdOutputLines",
"(",
"args",
",",
"iter_timeout",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"cwd",
"=",
"None",
",",
"shell",
"=",
"False",
",",
"check_status",
"=",
"True",
")",
":",
"cmd",
"=",
"_ValidateAndLogCommand",
"(",
"args",
",",
"cwd",
",",
"shell",
")",
"process",
"=",
"Popen",
"(",
"args",
",",
"cwd",
"=",
"cwd",
",",
"shell",
"=",
"shell",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")",
"return",
"_IterCmdOutputLines",
"(",
"process",
",",
"cmd",
",",
"iter_timeout",
"=",
"iter_timeout",
",",
"timeout",
"=",
"timeout",
",",
"check_status",
"=",
"check_status",
")"
] | https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/win/systrace/catapult/devil/devil/utils/cmd_helper.py#L385-L412 |
|
npm/cli | 892b66eba9f21dbfbc250572d437141e39a6de24 | node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py | python | MsvsSettings.GetCflagsC | (self, config) | return self._GetPchFlags(config, ".c") | Returns the flags that need to be added to .c compilations. | Returns the flags that need to be added to .c compilations. | [
"Returns",
"the",
"flags",
"that",
"need",
"to",
"be",
"added",
"to",
".",
"c",
"compilations",
"."
] | def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, ".c") | [
"def",
"GetCflagsC",
"(",
"self",
",",
"config",
")",
":",
"config",
"=",
"self",
".",
"_TargetConfig",
"(",
"config",
")",
"return",
"self",
".",
"_GetPchFlags",
"(",
"config",
",",
"\".c\"",
")"
] | https://github.com/npm/cli/blob/892b66eba9f21dbfbc250572d437141e39a6de24/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py#L571-L574 |
|
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | deps/chakrashim/third_party/jinja2/ext/django2jinja/django2jinja.py | python | Writer.literal | (self, value) | Writes a value as literal. | Writes a value as literal. | [
"Writes",
"a",
"value",
"as",
"literal",
"."
] | def literal(self, value):
"""Writes a value as literal."""
value = repr(value)
if value[:2] in ('u"', "u'"):
value = value[1:]
self.write(value) | [
"def",
"literal",
"(",
"self",
",",
"value",
")",
":",
"value",
"=",
"repr",
"(",
"value",
")",
"if",
"value",
"[",
":",
"2",
"]",
"in",
"(",
"'u\"'",
",",
"\"u'\"",
")",
":",
"value",
"=",
"value",
"[",
"1",
":",
"]",
"self",
".",
"write",
"(",
"value",
")"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/chakrashim/third_party/jinja2/ext/django2jinja/django2jinja.py#L257-L262 |
||
jam-py/jam-py | 0821492cdff8665928e0f093a4435aa64285a45c | jam/third_party/sqlalchemy/util/langhelpers.py | python | coerce_kw_type | (kw, key, type_, flexi_bool=True, dest=None) | r"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
when coercing to boolean. | r"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
when coercing to boolean. | [
"r",
"If",
"key",
"is",
"present",
"in",
"dict",
"kw",
"coerce",
"its",
"value",
"to",
"type",
"type",
"\\",
"_",
"if",
"necessary",
".",
"If",
"flexi_bool",
"is",
"True",
"the",
"string",
"0",
"is",
"considered",
"false",
"when",
"coercing",
"to",
"boolean",
"."
] | def coerce_kw_type(kw, key, type_, flexi_bool=True, dest=None):
r"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
when coercing to boolean.
"""
if dest is None:
dest = kw
if (
key in kw
and (not isinstance(type_, type) or not isinstance(kw[key], type_))
and kw[key] is not None
):
if type_ is bool and flexi_bool:
dest[key] = asbool(kw[key])
else:
dest[key] = type_(kw[key]) | [
"def",
"coerce_kw_type",
"(",
"kw",
",",
"key",
",",
"type_",
",",
"flexi_bool",
"=",
"True",
",",
"dest",
"=",
"None",
")",
":",
"if",
"dest",
"is",
"None",
":",
"dest",
"=",
"kw",
"if",
"(",
"key",
"in",
"kw",
"and",
"(",
"not",
"isinstance",
"(",
"type_",
",",
"type",
")",
"or",
"not",
"isinstance",
"(",
"kw",
"[",
"key",
"]",
",",
"type_",
")",
")",
"and",
"kw",
"[",
"key",
"]",
"is",
"not",
"None",
")",
":",
"if",
"type_",
"is",
"bool",
"and",
"flexi_bool",
":",
"dest",
"[",
"key",
"]",
"=",
"asbool",
"(",
"kw",
"[",
"key",
"]",
")",
"else",
":",
"dest",
"[",
"key",
"]",
"=",
"type_",
"(",
"kw",
"[",
"key",
"]",
")"
] | https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/util/langhelpers.py#L1157-L1174 |
||
googlearchive/drive-dredit | b916358d8e5ca14687667c5fff2463165253a53f | python/main.py | python | BaseDriveHandler.CreateUserInfo | (self) | return self.CreateAuthorizedService('oauth2', 'v2') | Create a user info client instance. | Create a user info client instance. | [
"Create",
"a",
"user",
"info",
"client",
"instance",
"."
] | def CreateUserInfo(self):
"""Create a user info client instance."""
return self.CreateAuthorizedService('oauth2', 'v2') | [
"def",
"CreateUserInfo",
"(",
"self",
")",
":",
"return",
"self",
".",
"CreateAuthorizedService",
"(",
"'oauth2'",
",",
"'v2'",
")"
] | https://github.com/googlearchive/drive-dredit/blob/b916358d8e5ca14687667c5fff2463165253a53f/python/main.py#L289-L291 |
|
sbrshk/whatever | f7ba72effd6f836ca701ed889c747db804d5ea8f | node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py | python | Filter.__init__ | (self, name, contents=None) | Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained. | Initializes the folder. | [
"Initializes",
"the",
"folder",
"."
] | def __init__(self, name, contents=None):
"""Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained.
"""
self.name = name
self.contents = list(contents or []) | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"contents",
"=",
"None",
")",
":",
"self",
".",
"name",
"=",
"name",
"self",
".",
"contents",
"=",
"list",
"(",
"contents",
"or",
"[",
"]",
")"
] | https://github.com/sbrshk/whatever/blob/f7ba72effd6f836ca701ed889c747db804d5ea8f/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py#L37-L45 |
||
nodejs/quic | 5baab3f3a05548d3b51bea98868412b08766e34d | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings.GetWrapperExtension | (self) | Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles. | Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles. | [
"Returns",
"the",
"bundle",
"extension",
"(",
".",
"app",
".",
"framework",
".",
"plugin",
"etc",
")",
".",
"Only",
"valid",
"for",
"bundles",
"."
] | def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
return '.' + self.spec.get('product_extension', 'appex')
else:
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name']) | [
"def",
"GetWrapperExtension",
"(",
"self",
")",
":",
"assert",
"self",
".",
"_IsBundle",
"(",
")",
"if",
"self",
".",
"spec",
"[",
"'type'",
"]",
"in",
"(",
"'loadable_module'",
",",
"'shared_library'",
")",
":",
"default_wrapper_extension",
"=",
"{",
"'loadable_module'",
":",
"'bundle'",
",",
"'shared_library'",
":",
"'framework'",
",",
"}",
"[",
"self",
".",
"spec",
"[",
"'type'",
"]",
"]",
"wrapper_extension",
"=",
"self",
".",
"GetPerTargetSetting",
"(",
"'WRAPPER_EXTENSION'",
",",
"default",
"=",
"default_wrapper_extension",
")",
"return",
"'.'",
"+",
"self",
".",
"spec",
".",
"get",
"(",
"'product_extension'",
",",
"wrapper_extension",
")",
"elif",
"self",
".",
"spec",
"[",
"'type'",
"]",
"==",
"'executable'",
":",
"if",
"self",
".",
"_IsIosAppExtension",
"(",
")",
"or",
"self",
".",
"_IsIosWatchKitExtension",
"(",
")",
":",
"return",
"'.'",
"+",
"self",
".",
"spec",
".",
"get",
"(",
"'product_extension'",
",",
"'appex'",
")",
"else",
":",
"return",
"'.'",
"+",
"self",
".",
"spec",
".",
"get",
"(",
"'product_extension'",
",",
"'app'",
")",
"else",
":",
"assert",
"False",
",",
"\"Don't know extension for '%s', target '%s'\"",
"%",
"(",
"self",
".",
"spec",
"[",
"'type'",
"]",
",",
"self",
".",
"spec",
"[",
"'target_name'",
"]",
")"
] | https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L249-L268 |
||
cloudkick/cast | 8ea819b082201fb9a8c7c158dfe0945be4ff03f8 | lib/extern/closure-linter/closure_linter/common/position.py | python | Position.All | (string) | return Position(0, len(string)) | Create a Position representing the entire string.
Args:
string: The string to represent the entirety of.
Returns:
The created Position object. | Create a Position representing the entire string. | [
"Create",
"a",
"Position",
"representing",
"the",
"entire",
"string",
"."
] | def All(string):
"""Create a Position representing the entire string.
Args:
string: The string to represent the entirety of.
Returns:
The created Position object.
"""
return Position(0, len(string)) | [
"def",
"All",
"(",
"string",
")",
":",
"return",
"Position",
"(",
"0",
",",
"len",
"(",
"string",
")",
")"
] | https://github.com/cloudkick/cast/blob/8ea819b082201fb9a8c7c158dfe0945be4ff03f8/lib/extern/closure-linter/closure_linter/common/position.py#L104-L113 |
|
GeoNode/geonode | 326d70153ad79e1ed831d46a0e3b239d422757a8 | geonode/monitoring/aggregation.py | python | calculate_rate | (metric_name, metric_label,
current_value, valid_to) | return rate | Find previous network metric value and caclulate rate between them | Find previous network metric value and caclulate rate between them | [
"Find",
"previous",
"network",
"metric",
"value",
"and",
"caclulate",
"rate",
"between",
"them"
] | def calculate_rate(metric_name, metric_label,
current_value, valid_to):
"""
Find previous network metric value and caclulate rate between them
"""
prev = MetricValue.objects.filter(service_metric__metric__name=metric_name,
label__name=metric_label,
valid_to__lt=valid_to)\
.order_by('-valid_to').first()
if not prev:
return
prev_val = prev.value_num
valid_to = valid_to.replace(tzinfo=pytz.utc)
prev.valid_to = prev.valid_to.replace(tzinfo=pytz.utc)
interval = valid_to - prev.valid_to
if not isinstance(current_value, Decimal):
current_value = Decimal(current_value)
# this means counter was reset, don't want rates below 0
if current_value < prev_val:
return
rate = float(current_value - prev_val) / interval.total_seconds()
return rate | [
"def",
"calculate_rate",
"(",
"metric_name",
",",
"metric_label",
",",
"current_value",
",",
"valid_to",
")",
":",
"prev",
"=",
"MetricValue",
".",
"objects",
".",
"filter",
"(",
"service_metric__metric__name",
"=",
"metric_name",
",",
"label__name",
"=",
"metric_label",
",",
"valid_to__lt",
"=",
"valid_to",
")",
".",
"order_by",
"(",
"'-valid_to'",
")",
".",
"first",
"(",
")",
"if",
"not",
"prev",
":",
"return",
"prev_val",
"=",
"prev",
".",
"value_num",
"valid_to",
"=",
"valid_to",
".",
"replace",
"(",
"tzinfo",
"=",
"pytz",
".",
"utc",
")",
"prev",
".",
"valid_to",
"=",
"prev",
".",
"valid_to",
".",
"replace",
"(",
"tzinfo",
"=",
"pytz",
".",
"utc",
")",
"interval",
"=",
"valid_to",
"-",
"prev",
".",
"valid_to",
"if",
"not",
"isinstance",
"(",
"current_value",
",",
"Decimal",
")",
":",
"current_value",
"=",
"Decimal",
"(",
"current_value",
")",
"# this means counter was reset, don't want rates below 0",
"if",
"current_value",
"<",
"prev_val",
":",
"return",
"rate",
"=",
"float",
"(",
"current_value",
"-",
"prev_val",
")",
"/",
"interval",
".",
"total_seconds",
"(",
")",
"return",
"rate"
] | https://github.com/GeoNode/geonode/blob/326d70153ad79e1ed831d46a0e3b239d422757a8/geonode/monitoring/aggregation.py#L138-L160 |
|
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | deps/chakrashim/third_party/jinja2/jinja2/sandbox.py | python | SandboxedEnvironment.unsafe_undefined | (self, obj, attribute) | return self.undefined('access to attribute %r of %r '
'object is unsafe.' % (
attribute,
obj.__class__.__name__
), name=attribute, obj=obj, exc=SecurityError) | Return an undefined object for unsafe attributes. | Return an undefined object for unsafe attributes. | [
"Return",
"an",
"undefined",
"object",
"for",
"unsafe",
"attributes",
"."
] | def unsafe_undefined(self, obj, attribute):
"""Return an undefined object for unsafe attributes."""
return self.undefined('access to attribute %r of %r '
'object is unsafe.' % (
attribute,
obj.__class__.__name__
), name=attribute, obj=obj, exc=SecurityError) | [
"def",
"unsafe_undefined",
"(",
"self",
",",
"obj",
",",
"attribute",
")",
":",
"return",
"self",
".",
"undefined",
"(",
"'access to attribute %r of %r '",
"'object is unsafe.'",
"%",
"(",
"attribute",
",",
"obj",
".",
"__class__",
".",
"__name__",
")",
",",
"name",
"=",
"attribute",
",",
"obj",
"=",
"obj",
",",
"exc",
"=",
"SecurityError",
")"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/chakrashim/third_party/jinja2/jinja2/sandbox.py#L341-L347 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/website_slides/controllers/main.py | python | WebsiteSlides.slide_archive | (self, slide_id) | return False | This route allows channel publishers to archive slides.
It has to be done in sudo mode since only website_publishers can write on slides in ACLs | This route allows channel publishers to archive slides.
It has to be done in sudo mode since only website_publishers can write on slides in ACLs | [
"This",
"route",
"allows",
"channel",
"publishers",
"to",
"archive",
"slides",
".",
"It",
"has",
"to",
"be",
"done",
"in",
"sudo",
"mode",
"since",
"only",
"website_publishers",
"can",
"write",
"on",
"slides",
"in",
"ACLs"
] | def slide_archive(self, slide_id):
""" This route allows channel publishers to archive slides.
It has to be done in sudo mode since only website_publishers can write on slides in ACLs """
slide = request.env['slide.slide'].browse(int(slide_id))
if slide.channel_id.can_publish:
slide.sudo().active = False
return True
return False | [
"def",
"slide_archive",
"(",
"self",
",",
"slide_id",
")",
":",
"slide",
"=",
"request",
".",
"env",
"[",
"'slide.slide'",
"]",
".",
"browse",
"(",
"int",
"(",
"slide_id",
")",
")",
"if",
"slide",
".",
"channel_id",
".",
"can_publish",
":",
"slide",
".",
"sudo",
"(",
")",
".",
"active",
"=",
"False",
"return",
"True",
"return",
"False"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/website_slides/controllers/main.py#L825-L833 |
|
cuckoosandbox/cuckoo | 50452a39ff7c3e0c4c94d114bc6317101633b958 | cuckoo/core/resultserver.py | python | ResultServer.del_task | (self, task, machine) | Delete running task and cancel existing handlers. | Delete running task and cancel existing handlers. | [
"Delete",
"running",
"task",
"and",
"cancel",
"existing",
"handlers",
"."
] | def del_task(self, task, machine):
"""Delete running task and cancel existing handlers."""
self.instance.del_task(task.id, machine.ip) | [
"def",
"del_task",
"(",
"self",
",",
"task",
",",
"machine",
")",
":",
"self",
".",
"instance",
".",
"del_task",
"(",
"task",
".",
"id",
",",
"machine",
".",
"ip",
")"
] | https://github.com/cuckoosandbox/cuckoo/blob/50452a39ff7c3e0c4c94d114bc6317101633b958/cuckoo/core/resultserver.py#L415-L417 |
||
GeoNode/geonode | 326d70153ad79e1ed831d46a0e3b239d422757a8 | geonode/resource/api/views.py | python | resource_service_exists | (request, uuid: str) | return Response({'success': _exists}, status=status.HTTP_200_OK) | Returns a JSON boolean success field valorized with the 'exists' operation outcome.
- GET http://localhost:8000/api/v2/resource-service/exists/13decd74-df04-11eb-a0c1-00155dc3de71
```
200,
{
'success': true
}
``` | Returns a JSON boolean success field valorized with the 'exists' operation outcome. | [
"Returns",
"a",
"JSON",
"boolean",
"success",
"field",
"valorized",
"with",
"the",
"exists",
"operation",
"outcome",
"."
] | def resource_service_exists(request, uuid: str):
"""
Returns a JSON boolean success field valorized with the 'exists' operation outcome.
- GET http://localhost:8000/api/v2/resource-service/exists/13decd74-df04-11eb-a0c1-00155dc3de71
```
200,
{
'success': true
}
```
"""
_exists = False
if resource_manager.exists(uuid):
_exists = get_resources_with_perms(request.user).filter(uuid=uuid).exists()
return Response({'success': _exists}, status=status.HTTP_200_OK) | [
"def",
"resource_service_exists",
"(",
"request",
",",
"uuid",
":",
"str",
")",
":",
"_exists",
"=",
"False",
"if",
"resource_manager",
".",
"exists",
"(",
"uuid",
")",
":",
"_exists",
"=",
"get_resources_with_perms",
"(",
"request",
".",
"user",
")",
".",
"filter",
"(",
"uuid",
"=",
"uuid",
")",
".",
"exists",
"(",
")",
"return",
"Response",
"(",
"{",
"'success'",
":",
"_exists",
"}",
",",
"status",
"=",
"status",
".",
"HTTP_200_OK",
")"
] | https://github.com/GeoNode/geonode/blob/326d70153ad79e1ed831d46a0e3b239d422757a8/geonode/resource/api/views.py#L66-L81 |
|
AudiusProject/audius-protocol | 4d1fbe27b5b283befbda7b74dc956e7dd3a52b19 | discovery-provider/src/queries/health_check.py | python | sol_play_check | () | return success_response(response, 500 if error else 200, sign_response=False) | limit: number of latest plays to return
max_drift: maximum duration in seconds between `now` and the
latest recorded play record to be considered healthy | limit: number of latest plays to return
max_drift: maximum duration in seconds between `now` and the
latest recorded play record to be considered healthy | [
"limit",
":",
"number",
"of",
"latest",
"plays",
"to",
"return",
"max_drift",
":",
"maximum",
"duration",
"in",
"seconds",
"between",
"now",
"and",
"the",
"latest",
"recorded",
"play",
"record",
"to",
"be",
"considered",
"healthy"
] | def sol_play_check():
"""
limit: number of latest plays to return
max_drift: maximum duration in seconds between `now` and the
latest recorded play record to be considered healthy
"""
limit = request.args.get("limit", type=int, default=20)
max_drift = request.args.get("max_drift", type=int)
error = None
redis = redis_connection.get_redis()
response = {}
response = get_latest_sol_play_check_info(redis, limit)
latest_db_sol_plays = response["latest_db_sol_plays"]
if latest_db_sol_plays:
latest_db_play = latest_db_sol_plays[0]
latest_created_at = latest_db_play["created_at"]
drift = (datetime.now() - latest_created_at).total_seconds()
# Error if max drift was provided and the drift is greater than max_drift
error = max_drift and drift > max_drift
return success_response(response, 500 if error else 200, sign_response=False) | [
"def",
"sol_play_check",
"(",
")",
":",
"limit",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"limit\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"20",
")",
"max_drift",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"max_drift\"",
",",
"type",
"=",
"int",
")",
"error",
"=",
"None",
"redis",
"=",
"redis_connection",
".",
"get_redis",
"(",
")",
"response",
"=",
"{",
"}",
"response",
"=",
"get_latest_sol_play_check_info",
"(",
"redis",
",",
"limit",
")",
"latest_db_sol_plays",
"=",
"response",
"[",
"\"latest_db_sol_plays\"",
"]",
"if",
"latest_db_sol_plays",
":",
"latest_db_play",
"=",
"latest_db_sol_plays",
"[",
"0",
"]",
"latest_created_at",
"=",
"latest_db_play",
"[",
"\"created_at\"",
"]",
"drift",
"=",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"latest_created_at",
")",
".",
"total_seconds",
"(",
")",
"# Error if max drift was provided and the drift is greater than max_drift",
"error",
"=",
"max_drift",
"and",
"drift",
">",
"max_drift",
"return",
"success_response",
"(",
"response",
",",
"500",
"if",
"error",
"else",
"200",
",",
"sign_response",
"=",
"False",
")"
] | https://github.com/AudiusProject/audius-protocol/blob/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19/discovery-provider/src/queries/health_check.py#L86-L109 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/unclosured/lib/python2.7/pydoc.py | python | plain | (text) | return re.sub('.\b', '', text) | Remove boldface formatting from text. | Remove boldface formatting from text. | [
"Remove",
"boldface",
"formatting",
"from",
"text",
"."
] | def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text) | [
"def",
"plain",
"(",
"text",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'.\\b'",
",",
"''",
",",
"text",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/pydoc.py#L1351-L1353 |
|
tum-pbs/PhiFlow | 31d9944f4f26e56358dd73fa797dde567b6334b0 | phi/math/backend/_backend.py | python | Backend.precision | (self) | return get_precision() | Short for math.backend.get_precision() | Short for math.backend.get_precision() | [
"Short",
"for",
"math",
".",
"backend",
".",
"get_precision",
"()"
] | def precision(self) -> int:
""" Short for math.backend.get_precision() """
return get_precision() | [
"def",
"precision",
"(",
"self",
")",
"->",
"int",
":",
"return",
"get_precision",
"(",
")"
] | https://github.com/tum-pbs/PhiFlow/blob/31d9944f4f26e56358dd73fa797dde567b6334b0/phi/math/backend/_backend.py#L101-L103 |