commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 51
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
5b64a272d0830c3a85fe540a82d6ff8b62bd0ea8 | livinglots_organize/templatetags/organize_tags.py | livinglots_organize/templatetags/organize_tags.py | from django import template
from livinglots import get_organizer_model
from livinglots_generictags.tags import (GetGenericRelationList,
RenderGenericRelationList,
GetGenericRelationCount)
register = template.Library()
class RenderOrganizerList(RenderGenericRelationList):
model = get_organizer_model()
template_dir_prefix = 'livinglots'
register.tag(RenderOrganizerList)
class GetOrganizerList(GetGenericRelationList):
model = get_organizer_model()
register.tag(GetOrganizerList)
class GetOrganizerCount(GetGenericRelationCount):
model = get_organizer_model()
register.tag(GetOrganizerCount)
| from django import template
from django.contrib.contenttypes.models import ContentType
from classytags.arguments import Argument, KeywordArgument
from classytags.core import Options
from livinglots import get_organizer_model
from livinglots_generictags.tags import (GetGenericRelationList,
RenderGenericRelationList,
GetGenericRelationCount)
register = template.Library()
class RenderOrganizerList(RenderGenericRelationList):
model = get_organizer_model()
template_dir_prefix = 'livinglots'
options = Options(
'for',
Argument('target', required=True, resolve=True),
KeywordArgument('public', default=False, required=False),
)
def get_context(self, context, target, public=False):
context.update({
self.get_model_plural_name(): self.get_objects(target, public=public),
})
return context
def get_objects(self, target, public=False):
return self.model.objects.filter(
content_type=ContentType.objects.get_for_model(target),
object_id=target.pk,
post_publicly=public,
)
register.tag(RenderOrganizerList)
class GetOrganizerList(GetGenericRelationList):
model = get_organizer_model()
register.tag(GetOrganizerList)
class GetOrganizerCount(GetGenericRelationCount):
model = get_organizer_model()
register.tag(GetOrganizerCount)
| Add `public` keyword to render_organizer_list | Add `public` keyword to render_organizer_list
| Python | agpl-3.0 | 596acres/django-livinglots-organize,596acres/django-livinglots-organize | from django import template
+ from django.contrib.contenttypes.models import ContentType
+
+ from classytags.arguments import Argument, KeywordArgument
+ from classytags.core import Options
from livinglots import get_organizer_model
from livinglots_generictags.tags import (GetGenericRelationList,
RenderGenericRelationList,
GetGenericRelationCount)
register = template.Library()
class RenderOrganizerList(RenderGenericRelationList):
model = get_organizer_model()
template_dir_prefix = 'livinglots'
+ options = Options(
+ 'for',
+ Argument('target', required=True, resolve=True),
+ KeywordArgument('public', default=False, required=False),
+ )
+
+ def get_context(self, context, target, public=False):
+ context.update({
+ self.get_model_plural_name(): self.get_objects(target, public=public),
+ })
+ return context
+
+ def get_objects(self, target, public=False):
+ return self.model.objects.filter(
+ content_type=ContentType.objects.get_for_model(target),
+ object_id=target.pk,
+ post_publicly=public,
+ )
register.tag(RenderOrganizerList)
class GetOrganizerList(GetGenericRelationList):
model = get_organizer_model()
register.tag(GetOrganizerList)
class GetOrganizerCount(GetGenericRelationCount):
model = get_organizer_model()
register.tag(GetOrganizerCount)
| Add `public` keyword to render_organizer_list | ## Code Before:
from django import template
from livinglots import get_organizer_model
from livinglots_generictags.tags import (GetGenericRelationList,
RenderGenericRelationList,
GetGenericRelationCount)
register = template.Library()
class RenderOrganizerList(RenderGenericRelationList):
model = get_organizer_model()
template_dir_prefix = 'livinglots'
register.tag(RenderOrganizerList)
class GetOrganizerList(GetGenericRelationList):
model = get_organizer_model()
register.tag(GetOrganizerList)
class GetOrganizerCount(GetGenericRelationCount):
model = get_organizer_model()
register.tag(GetOrganizerCount)
## Instruction:
Add `public` keyword to render_organizer_list
## Code After:
from django import template
from django.contrib.contenttypes.models import ContentType
from classytags.arguments import Argument, KeywordArgument
from classytags.core import Options
from livinglots import get_organizer_model
from livinglots_generictags.tags import (GetGenericRelationList,
RenderGenericRelationList,
GetGenericRelationCount)
register = template.Library()
class RenderOrganizerList(RenderGenericRelationList):
model = get_organizer_model()
template_dir_prefix = 'livinglots'
options = Options(
'for',
Argument('target', required=True, resolve=True),
KeywordArgument('public', default=False, required=False),
)
def get_context(self, context, target, public=False):
context.update({
self.get_model_plural_name(): self.get_objects(target, public=public),
})
return context
def get_objects(self, target, public=False):
return self.model.objects.filter(
content_type=ContentType.objects.get_for_model(target),
object_id=target.pk,
post_publicly=public,
)
register.tag(RenderOrganizerList)
class GetOrganizerList(GetGenericRelationList):
model = get_organizer_model()
register.tag(GetOrganizerList)
class GetOrganizerCount(GetGenericRelationCount):
model = get_organizer_model()
register.tag(GetOrganizerCount)
|
1105dfb75bf373b38e2f12579843af54f7a78c6f | DataModelAdapter.py | DataModelAdapter.py |
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
if key in self._data :
return self._data[key]
return None
def addChild(self, child) :
child.setParent(self)
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
if key in self._data :
return self._data[key]
return None
def addChild(self, child) :
child.setParent(self)
self._children.add(child)
def child(self, row) :
children_list = [c for c in self._children]
return children_list[row]
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
| Add child(); TODO: test this | Add child(); TODO: test this
| Python | apache-2.0 | mattdeckard/wherewithal |
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
if key in self._data :
return self._data[key]
return None
def addChild(self, child) :
child.setParent(self)
self._children.add(child)
+ def child(self, row) :
+ children_list = [c for c in self._children]
+ return children_list[row]
+
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
| Add child(); TODO: test this | ## Code Before:
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
if key in self._data :
return self._data[key]
return None
def addChild(self, child) :
child.setParent(self)
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
## Instruction:
Add child(); TODO: test this
## Code After:
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
if key in self._data :
return self._data[key]
return None
def addChild(self, child) :
child.setParent(self)
self._children.add(child)
def child(self, row) :
children_list = [c for c in self._children]
return children_list[row]
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
b53a6fb45934856fcf1aca419b4022241fc7fcbc | tests/t_all.py | tests/t_all.py |
import os
import re
import unittest
_TEST_MODULE_PATTERN = re.compile(r'^(t_.+)\.py$')
def _run_all_tests():
module_names = []
loader = unittest.TestLoader()
test_path = os.path.join(os.path.split(__file__)[0], '.')
for filename in os.listdir(test_path):
match = _TEST_MODULE_PATTERN.search(filename)
if match:
case = match.group(1)
if case != 't_expire' and case != 't_multi':
module_names.append(case)
return loader.loadTestsFromNames(module_names)
def ExpireTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_expire')
if __name__ == '__main__':
unittest.main(defaultTest='_run_all_tests')
|
import os
import re
import unittest
_TEST_MODULE_PATTERN = re.compile(r'^(t_.+)\.py$')
def _run_all_tests():
module_names = []
loader = unittest.TestLoader()
test_path = os.path.join(os.path.split(__file__)[0], '.')
for filename in os.listdir(test_path):
match = _TEST_MODULE_PATTERN.search(filename)
if match:
case = match.group(1)
if case not in ('t_expire', 't_multi', 't_script'):
module_names.append(case)
return loader.loadTestsFromNames(module_names)
def ExpireTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_expire')
def MultiTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_multi')
def ScriptTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_script')
if __name__ == '__main__':
unittest.main(defaultTest='_run_all_tests')
| Exclude the play script test case from the default test suite. | Exclude the play script test case from the default test suite.
| Python | bsd-3-clause | sapo/python-kyototycoon,sapo/python-kyototycoon-ng |
import os
import re
import unittest
_TEST_MODULE_PATTERN = re.compile(r'^(t_.+)\.py$')
def _run_all_tests():
module_names = []
loader = unittest.TestLoader()
test_path = os.path.join(os.path.split(__file__)[0], '.')
for filename in os.listdir(test_path):
match = _TEST_MODULE_PATTERN.search(filename)
if match:
case = match.group(1)
- if case != 't_expire' and case != 't_multi':
+ if case not in ('t_expire', 't_multi', 't_script'):
module_names.append(case)
return loader.loadTestsFromNames(module_names)
def ExpireTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_expire')
+ def MultiTestCase():
+ loader = unittest.TestLoader()
+ return loader.loadTestsFromName('t_multi')
+
+ def ScriptTestCase():
+ loader = unittest.TestLoader()
+ return loader.loadTestsFromName('t_script')
+
if __name__ == '__main__':
unittest.main(defaultTest='_run_all_tests')
| Exclude the play script test case from the default test suite. | ## Code Before:
import os
import re
import unittest
_TEST_MODULE_PATTERN = re.compile(r'^(t_.+)\.py$')
def _run_all_tests():
module_names = []
loader = unittest.TestLoader()
test_path = os.path.join(os.path.split(__file__)[0], '.')
for filename in os.listdir(test_path):
match = _TEST_MODULE_PATTERN.search(filename)
if match:
case = match.group(1)
if case != 't_expire' and case != 't_multi':
module_names.append(case)
return loader.loadTestsFromNames(module_names)
def ExpireTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_expire')
if __name__ == '__main__':
unittest.main(defaultTest='_run_all_tests')
## Instruction:
Exclude the play script test case from the default test suite.
## Code After:
import os
import re
import unittest
_TEST_MODULE_PATTERN = re.compile(r'^(t_.+)\.py$')
def _run_all_tests():
module_names = []
loader = unittest.TestLoader()
test_path = os.path.join(os.path.split(__file__)[0], '.')
for filename in os.listdir(test_path):
match = _TEST_MODULE_PATTERN.search(filename)
if match:
case = match.group(1)
if case not in ('t_expire', 't_multi', 't_script'):
module_names.append(case)
return loader.loadTestsFromNames(module_names)
def ExpireTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_expire')
def MultiTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_multi')
def ScriptTestCase():
loader = unittest.TestLoader()
return loader.loadTestsFromName('t_script')
if __name__ == '__main__':
unittest.main(defaultTest='_run_all_tests')
|
4e8c84bf36250d7e61b585fc5db545206cab9730 | perfkitbenchmarker/scripts/spark_table.py | perfkitbenchmarker/scripts/spark_table.py |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('drop table if exists ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
if __name__ == '__main__':
main()
| Support creating Hive tables with partitioned data. | Support creating Hive tables with partitioned data.
PiperOrigin-RevId: 335539022
| Python | apache-2.0 | GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
+ from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
- spark.sql('drop table if exists ' + table)
+ spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
-
+ try:
+ # This loads the partitions under the table if table is partitioned.
+ spark.sql('MSCK REPAIR TABLE ' + table)
+ except AnalysisException:
+ # The table was not partitioned, which was presumably expected
+ pass
if __name__ == '__main__':
main()
| Support creating Hive tables with partitioned data. | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('drop table if exists ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
if __name__ == '__main__':
main()
## Instruction:
Support creating Hive tables with partitioned data.
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
if __name__ == '__main__':
main()
|
35f45d3fcee5a1fe9d6d5ce71b708d0bc68db3fc | python/matasano/set1/c7.py | python/matasano/set1/c7.py | from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64_to_bytes(chal_file.read()).encode('latin-1')
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
| from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
| Switch to using base64 builtin decoder for simplicity. | Switch to using base64 builtin decoder for simplicity.
| Python | mit | TheLunchtimeAttack/matasano-challenges,TheLunchtimeAttack/matasano-challenges | from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
- ct = base64_to_bytes(chal_file.read()).encode('latin-1')
+ ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
| Switch to using base64 builtin decoder for simplicity. | ## Code Before:
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64_to_bytes(chal_file.read()).encode('latin-1')
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
## Instruction:
Switch to using base64 builtin decoder for simplicity.
## Code After:
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
0da81b53b521c22368899211dc851d6147e1a30d | common_components/static_renderers.py | common_components/static_renderers.py | from os.path import join, splitext, basename
from bricks.staticfiles import StaticCss, StaticJs, StaticFile
class _BuiltStatic(StaticFile):
has_build_stage = True
def __init__(self, *args):
StaticFile.__init__(self, *args)
self.url = self.url.rsplit('.', 1)[0] + '.' + self.target_type
class Sass(_BuiltStatic):
relpath = 'scss'
target_type = 'css'
def __call__(self):
return '<link rel="stylesheet" href="{}" />'.format(self.url)
class Coffee(_BuiltStatic):
relpath = 'coffee'
target_type = 'js'
def __call__(self):
return '<script src="{}"></script>'.format(self.url)
class StaticLib(StaticFile):
"""A static asset or a directory with static assets that's needed
to build other static assets but is not directly used by the page."""
has_build_stage = True
def __call__(self):
return ''
class SassLib(StaticLib):
relpath = 'scss'
| from os.path import join, splitext, basename
from bricks.staticfiles import StaticCss, StaticJs, StaticFile
class _BuiltStatic(StaticFile):
has_build_stage = True
def __init__(self, *args):
StaticFile.__init__(self, *args)
self.url = self.url.rsplit('.', 1)[0] + '.' + self.target_type
class Sass(_BuiltStatic):
relpath = 'scss'
target_type = 'css'
class Coffee(_BuiltStatic):
relpath = 'coffee'
target_type = 'js'
class StaticLib(StaticFile):
"""A static asset or a directory with static assets that's needed
to build other static assets but is not directly used by the page."""
has_build_stage = True
def __call__(self):
return ''
class SassLib(StaticLib):
relpath = 'scss'
| Revert "fixed rendering of Sass and Coffee" | Revert "fixed rendering of Sass and Coffee"
This reverts commit b21834c9d439603f666d17aea338934bae063ef4.
| Python | mpl-2.0 | Zer0-/common_components | from os.path import join, splitext, basename
from bricks.staticfiles import StaticCss, StaticJs, StaticFile
class _BuiltStatic(StaticFile):
has_build_stage = True
def __init__(self, *args):
StaticFile.__init__(self, *args)
self.url = self.url.rsplit('.', 1)[0] + '.' + self.target_type
class Sass(_BuiltStatic):
relpath = 'scss'
target_type = 'css'
- def __call__(self):
- return '<link rel="stylesheet" href="{}" />'.format(self.url)
-
class Coffee(_BuiltStatic):
relpath = 'coffee'
target_type = 'js'
-
- def __call__(self):
- return '<script src="{}"></script>'.format(self.url)
class StaticLib(StaticFile):
"""A static asset or a directory with static assets that's needed
to build other static assets but is not directly used by the page."""
has_build_stage = True
def __call__(self):
return ''
class SassLib(StaticLib):
relpath = 'scss'
| Revert "fixed rendering of Sass and Coffee" | ## Code Before:
from os.path import join, splitext, basename
from bricks.staticfiles import StaticCss, StaticJs, StaticFile
class _BuiltStatic(StaticFile):
has_build_stage = True
def __init__(self, *args):
StaticFile.__init__(self, *args)
self.url = self.url.rsplit('.', 1)[0] + '.' + self.target_type
class Sass(_BuiltStatic):
relpath = 'scss'
target_type = 'css'
def __call__(self):
return '<link rel="stylesheet" href="{}" />'.format(self.url)
class Coffee(_BuiltStatic):
relpath = 'coffee'
target_type = 'js'
def __call__(self):
return '<script src="{}"></script>'.format(self.url)
class StaticLib(StaticFile):
"""A static asset or a directory with static assets that's needed
to build other static assets but is not directly used by the page."""
has_build_stage = True
def __call__(self):
return ''
class SassLib(StaticLib):
relpath = 'scss'
## Instruction:
Revert "fixed rendering of Sass and Coffee"
## Code After:
from os.path import join, splitext, basename
from bricks.staticfiles import StaticCss, StaticJs, StaticFile
class _BuiltStatic(StaticFile):
has_build_stage = True
def __init__(self, *args):
StaticFile.__init__(self, *args)
self.url = self.url.rsplit('.', 1)[0] + '.' + self.target_type
class Sass(_BuiltStatic):
relpath = 'scss'
target_type = 'css'
class Coffee(_BuiltStatic):
relpath = 'coffee'
target_type = 'js'
class StaticLib(StaticFile):
"""A static asset or a directory with static assets that's needed
to build other static assets but is not directly used by the page."""
has_build_stage = True
def __call__(self):
return ''
class SassLib(StaticLib):
relpath = 'scss'
|
61de7c1827867cea3385c5db3862e5e68caa98fd | Puli/src/octopus/dispatcher/rules/graphview.py | Puli/src/octopus/dispatcher/rules/graphview.py | from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup
from octopus.dispatcher import rules
import logging
logger = logging.getLogger("dispatcher")
class RuleError(rules.RuleError):
'''Base class for GraphViewBuilder related exceptions.'''
pass
class TaskNodeHasNoChildrenError(RuleError):
'''Raised when a GraphViewBuilder is requested to add a child node
to a FolderNode.
'''
class GraphViewBuilder(object):
def __init__(self, dispatchTree, root):
self.dispatchTree = dispatchTree
self.root = root
def apply(self, task):
id = None
name = task.name
parent = task.parent.nodes['graph_rule'] if task.parent else self.root
user = task.user
priority = task.priority
dispatchKey = task.dispatchKey
maxRN = task.maxRN
if isinstance(task, TaskGroup):
strategy = task.strategy
node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN,
strategy, taskGroup=task)
else:
node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task)
task.nodes['graph_rule'] = node
return [node]
def processDependencies(self, dependencies):
for task, taskdeps in dependencies.items():
node = task.nodes['graph_rule']
for deptask, statuslist in taskdeps.items():
depnode = deptask.nodes['graph_rule']
node.addDependency(depnode, statuslist)
| from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup
from octopus.dispatcher import rules
import logging
logger = logging.getLogger("dispatcher")
class RuleError(rules.RuleError):
'''Base class for GraphViewBuilder related exceptions.'''
pass
class TaskNodeHasNoChildrenError(RuleError):
'''Raised when a GraphViewBuilder is requested to add a child node
to a FolderNode.
'''
class GraphViewBuilder(object):
def __init__(self, dispatchTree, root):
self.dispatchTree = dispatchTree
self.root = root
def apply(self, task):
id = None
name = task.name
parent = task.parent.nodes['graph_rule'] if task.parent else self.root
user = task.user
priority = task.priority
dispatchKey = task.dispatchKey
maxRN = task.maxRN
if isinstance(task, TaskGroup):
strategy = task.strategy
node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN,
strategy, taskGroup=task)
else:
node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task)
task.nodes['graph_rule'] = node
return [node]
def processDependencies(self, dependencies):
for task, taskdeps in dependencies.items():
node = task.nodes['graph_rule']
for deptask, statuslist in taskdeps.items():
depnode = deptask.nodes['graph_rule']
node.addDependency(depnode, statuslist)
def __repr__(self):
return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree ) | Add a representation of GraphView object | Add a representation of GraphView object
| Python | bsd-3-clause | mikrosimage/OpenRenderManagement,mikrosimage/OpenRenderManagement,smaragden/OpenRenderManagement,smaragden/OpenRenderManagement,smaragden/OpenRenderManagement,mikrosimage/OpenRenderManagement | from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup
from octopus.dispatcher import rules
import logging
logger = logging.getLogger("dispatcher")
class RuleError(rules.RuleError):
'''Base class for GraphViewBuilder related exceptions.'''
pass
class TaskNodeHasNoChildrenError(RuleError):
'''Raised when a GraphViewBuilder is requested to add a child node
to a FolderNode.
'''
class GraphViewBuilder(object):
def __init__(self, dispatchTree, root):
self.dispatchTree = dispatchTree
self.root = root
def apply(self, task):
id = None
name = task.name
parent = task.parent.nodes['graph_rule'] if task.parent else self.root
user = task.user
priority = task.priority
dispatchKey = task.dispatchKey
maxRN = task.maxRN
if isinstance(task, TaskGroup):
strategy = task.strategy
node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN,
strategy, taskGroup=task)
else:
node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task)
task.nodes['graph_rule'] = node
return [node]
def processDependencies(self, dependencies):
for task, taskdeps in dependencies.items():
node = task.nodes['graph_rule']
for deptask, statuslist in taskdeps.items():
depnode = deptask.nodes['graph_rule']
node.addDependency(depnode, statuslist)
+ def __repr__(self):
+ return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree ) | Add a representation of GraphView object | ## Code Before:
from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup
from octopus.dispatcher import rules
import logging
logger = logging.getLogger("dispatcher")
class RuleError(rules.RuleError):
'''Base class for GraphViewBuilder related exceptions.'''
pass
class TaskNodeHasNoChildrenError(RuleError):
'''Raised when a GraphViewBuilder is requested to add a child node
to a FolderNode.
'''
class GraphViewBuilder(object):
def __init__(self, dispatchTree, root):
self.dispatchTree = dispatchTree
self.root = root
def apply(self, task):
id = None
name = task.name
parent = task.parent.nodes['graph_rule'] if task.parent else self.root
user = task.user
priority = task.priority
dispatchKey = task.dispatchKey
maxRN = task.maxRN
if isinstance(task, TaskGroup):
strategy = task.strategy
node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN,
strategy, taskGroup=task)
else:
node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task)
task.nodes['graph_rule'] = node
return [node]
def processDependencies(self, dependencies):
for task, taskdeps in dependencies.items():
node = task.nodes['graph_rule']
for deptask, statuslist in taskdeps.items():
depnode = deptask.nodes['graph_rule']
node.addDependency(depnode, statuslist)
## Instruction:
Add a representation of GraphView object
## Code After:
from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup
from octopus.dispatcher import rules
import logging
logger = logging.getLogger("dispatcher")
class RuleError(rules.RuleError):
'''Base class for GraphViewBuilder related exceptions.'''
pass
class TaskNodeHasNoChildrenError(RuleError):
'''Raised when a GraphViewBuilder is requested to add a child node
to a FolderNode.
'''
class GraphViewBuilder(object):
def __init__(self, dispatchTree, root):
self.dispatchTree = dispatchTree
self.root = root
def apply(self, task):
id = None
name = task.name
parent = task.parent.nodes['graph_rule'] if task.parent else self.root
user = task.user
priority = task.priority
dispatchKey = task.dispatchKey
maxRN = task.maxRN
if isinstance(task, TaskGroup):
strategy = task.strategy
node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN,
strategy, taskGroup=task)
else:
node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task)
task.nodes['graph_rule'] = node
return [node]
def processDependencies(self, dependencies):
for task, taskdeps in dependencies.items():
node = task.nodes['graph_rule']
for deptask, statuslist in taskdeps.items():
depnode = deptask.nodes['graph_rule']
node.addDependency(depnode, statuslist)
def __repr__(self):
return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree ) |
f32ab8ebd509df7e815fb96189974e7db44af3e3 | plugins/owner.py | plugins/owner.py | import inspect
import traceback
from curious import commands
from curious.commands.context import Context
from curious.commands.plugin import Plugin
class Owner(Plugin):
"""
Owner-only commands.
"""
@commands.command(name="eval")
async def _eval(self, ctx: Context, *, eval_str: str):
msg = await ctx.channel.send("Evaluating...")
try:
result = eval(eval_str)
if inspect.isawaitable(result):
result = await result
result = str(result)
except Exception as e:
tb = ''.join(traceback.format_exc())
result = tb
fmtted = "```py\n{}\n```".format(result)
await msg.edit(fmtted)
| import inspect
import traceback
from curious import commands
from curious.commands.context import Context
from curious.commands.plugin import Plugin
def is_owner(self, ctx: Context):
return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id
class Owner(Plugin):
"""
Owner-only commands.
"""
plugin_check = is_owner
@commands.command(name="eval")
async def _eval(self, ctx: Context, *, eval_str: str):
msg = await ctx.channel.send("Evaluating...")
try:
result = eval(eval_str)
if inspect.isawaitable(result):
result = await result
result = str(result)
except Exception as e:
tb = ''.join(traceback.format_exc())
result = tb
fmtted = "```py\n{}\n```".format(result)
await msg.edit(fmtted)
@commands.command(name="load", invokation_checks=[is_owner])
async def _load(self, ctx: Context, *, import_name: str):
"""
Loads a plugin.
"""
await self.bot.load_plugins_from(import_name)
await ctx.message.channel.send(":heavy_check_mark: Loaded.")
@commands.command(name="unload", invokation_checks=[is_owner])
async def _unload(self, ctx: Context, *, import_name: str):
"""
Unloads a plugin.
"""
await self.bot.unload_plugins_from(import_name)
await ctx.message.channel.send(":heavy_check_mark: Unloaded.")
| Add load and unload commands. | Add load and unload commands.
| Python | mit | SunDwarf/curiosity | import inspect
import traceback
from curious import commands
from curious.commands.context import Context
from curious.commands.plugin import Plugin
+ def is_owner(self, ctx: Context):
+ return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id
+
+
class Owner(Plugin):
"""
Owner-only commands.
"""
+ plugin_check = is_owner
+
@commands.command(name="eval")
async def _eval(self, ctx: Context, *, eval_str: str):
msg = await ctx.channel.send("Evaluating...")
try:
result = eval(eval_str)
if inspect.isawaitable(result):
result = await result
result = str(result)
except Exception as e:
tb = ''.join(traceback.format_exc())
result = tb
fmtted = "```py\n{}\n```".format(result)
await msg.edit(fmtted)
+ @commands.command(name="load", invokation_checks=[is_owner])
+ async def _load(self, ctx: Context, *, import_name: str):
+ """
+ Loads a plugin.
+ """
+ await self.bot.load_plugins_from(import_name)
+ await ctx.message.channel.send(":heavy_check_mark: Loaded.")
+
+ @commands.command(name="unload", invokation_checks=[is_owner])
+ async def _unload(self, ctx: Context, *, import_name: str):
+ """
+ Unloads a plugin.
+ """
+ await self.bot.unload_plugins_from(import_name)
+ await ctx.message.channel.send(":heavy_check_mark: Unloaded.")
+ | Add load and unload commands. | ## Code Before:
import inspect
import traceback
from curious import commands
from curious.commands.context import Context
from curious.commands.plugin import Plugin
class Owner(Plugin):
"""
Owner-only commands.
"""
@commands.command(name="eval")
async def _eval(self, ctx: Context, *, eval_str: str):
msg = await ctx.channel.send("Evaluating...")
try:
result = eval(eval_str)
if inspect.isawaitable(result):
result = await result
result = str(result)
except Exception as e:
tb = ''.join(traceback.format_exc())
result = tb
fmtted = "```py\n{}\n```".format(result)
await msg.edit(fmtted)
## Instruction:
Add load and unload commands.
## Code After:
import inspect
import traceback
from curious import commands
from curious.commands.context import Context
from curious.commands.plugin import Plugin
def is_owner(self, ctx: Context):
return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id
class Owner(Plugin):
"""
Owner-only commands.
"""
plugin_check = is_owner
@commands.command(name="eval")
async def _eval(self, ctx: Context, *, eval_str: str):
msg = await ctx.channel.send("Evaluating...")
try:
result = eval(eval_str)
if inspect.isawaitable(result):
result = await result
result = str(result)
except Exception as e:
tb = ''.join(traceback.format_exc())
result = tb
fmtted = "```py\n{}\n```".format(result)
await msg.edit(fmtted)
@commands.command(name="load", invokation_checks=[is_owner])
async def _load(self, ctx: Context, *, import_name: str):
"""
Loads a plugin.
"""
await self.bot.load_plugins_from(import_name)
await ctx.message.channel.send(":heavy_check_mark: Loaded.")
@commands.command(name="unload", invokation_checks=[is_owner])
async def _unload(self, ctx: Context, *, import_name: str):
"""
Unloads a plugin.
"""
await self.bot.unload_plugins_from(import_name)
await ctx.message.channel.send(":heavy_check_mark: Unloaded.")
|
f5b1975aebf50af78d41b8f192dabc128ad78b2a | sc2reader/engine/plugins/__init__.py | sc2reader/engine/plugins/__init__.py | from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.engine.plugins.apm import APMTracker
from sc2reader.engine.plugins.selection import SelectionTracker
from sc2reader.engine.plugins.context import ContextLoader
from sc2reader.engine.plugins.supply import SupplyTracker
from sc2reader.engine.plugins.creeptracker import CreepTracker
from sc2reader.engine.plugins.gameheart import GameHeartNormalizer
>>>>>>> GameHeart Plugin
| from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.engine.plugins.apm import APMTracker
from sc2reader.engine.plugins.selection import SelectionTracker
from sc2reader.engine.plugins.context import ContextLoader
from sc2reader.engine.plugins.supply import SupplyTracker
from sc2reader.engine.plugins.creeptracker import CreepTracker
from sc2reader.engine.plugins.gameheart import GameHeartNormalizer
| Fix a small rebase error, my bad. | Fix a small rebase error, my bad.
| Python | mit | StoicLoofah/sc2reader,ggtracker/sc2reader,StoicLoofah/sc2reader,ggtracker/sc2reader | from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.engine.plugins.apm import APMTracker
from sc2reader.engine.plugins.selection import SelectionTracker
from sc2reader.engine.plugins.context import ContextLoader
from sc2reader.engine.plugins.supply import SupplyTracker
from sc2reader.engine.plugins.creeptracker import CreepTracker
from sc2reader.engine.plugins.gameheart import GameHeartNormalizer
- >>>>>>> GameHeart Plugin
+ | Fix a small rebase error, my bad. | ## Code Before:
from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.engine.plugins.apm import APMTracker
from sc2reader.engine.plugins.selection import SelectionTracker
from sc2reader.engine.plugins.context import ContextLoader
from sc2reader.engine.plugins.supply import SupplyTracker
from sc2reader.engine.plugins.creeptracker import CreepTracker
from sc2reader.engine.plugins.gameheart import GameHeartNormalizer
>>>>>>> GameHeart Plugin
## Instruction:
Fix a small rebase error, my bad.
## Code After:
from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.engine.plugins.apm import APMTracker
from sc2reader.engine.plugins.selection import SelectionTracker
from sc2reader.engine.plugins.context import ContextLoader
from sc2reader.engine.plugins.supply import SupplyTracker
from sc2reader.engine.plugins.creeptracker import CreepTracker
from sc2reader.engine.plugins.gameheart import GameHeartNormalizer
|
fb39b3ffc6fcd3df0f89cd3978796a4377335075 | tests/primitives/utils.py | tests/primitives/utils.py | import binascii
import os
import pytest
from cryptography.bindings import _ALL_APIS
from cryptography.primitives.block import BlockCipher
def generate_encrypt_test(param_loader, path, file_names, cipher_factory,
mode_factory, only_if=lambda api: True,
skip_message=None):
def test_encryption(self):
for api in _ALL_APIS:
for file_name in file_names:
for params in param_loader(os.path.join(path, file_name)):
yield (
encrypt_test,
api,
cipher_factory,
mode_factory,
params,
only_if,
skip_message
)
return test_encryption
def encrypt_test(api, cipher_factory, mode_factory, params, only_if,
skip_message):
if not only_if(api):
pytest.skip(skip_message)
plaintext = params.pop("plaintext")
ciphertext = params.pop("ciphertext")
cipher = BlockCipher(
cipher_factory(**params),
mode_factory(**params),
api
)
actual_ciphertext = cipher.encrypt(binascii.unhexlify(plaintext))
actual_ciphertext += cipher.finalize()
assert binascii.hexlify(actual_ciphertext) == ciphertext
| import binascii
import os
import pytest
from cryptography.bindings import _ALL_APIS
from cryptography.primitives.block import BlockCipher
def generate_encrypt_test(param_loader, path, file_names, cipher_factory,
mode_factory, only_if=lambda api: True,
skip_message=None):
def test_encryption(self):
for api in _ALL_APIS:
for file_name in file_names:
for params in param_loader(os.path.join(path, file_name)):
yield (
encrypt_test,
api,
cipher_factory,
mode_factory,
params,
only_if,
skip_message
)
return test_encryption
def encrypt_test(api, cipher_factory, mode_factory, params, only_if,
skip_message):
if not only_if(api):
pytest.skip(skip_message)
plaintext = params.pop("plaintext")
ciphertext = params.pop("ciphertext")
cipher = BlockCipher(
cipher_factory(**params),
mode_factory(**params),
api
)
actual_ciphertext = cipher.encrypt(binascii.unhexlify(plaintext))
actual_ciphertext += cipher.finalize()
assert actual_ciphertext == binascii.unhexlify(ciphertext)
| Rewrite to avoid capitalization issues | Rewrite to avoid capitalization issues
| Python | bsd-3-clause | kimvais/cryptography,Ayrx/cryptography,dstufft/cryptography,sholsapp/cryptography,dstufft/cryptography,bwhmather/cryptography,sholsapp/cryptography,kimvais/cryptography,kimvais/cryptography,Lukasa/cryptography,skeuomorf/cryptography,sholsapp/cryptography,Hasimir/cryptography,skeuomorf/cryptography,Lukasa/cryptography,dstufft/cryptography,Hasimir/cryptography,glyph/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,sholsapp/cryptography,Hasimir/cryptography,Lukasa/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Ayrx/cryptography,Ayrx/cryptography,dstufft/cryptography,glyph/cryptography,bwhmather/cryptography,Hasimir/cryptography,bwhmather/cryptography,dstufft/cryptography | import binascii
import os
import pytest
from cryptography.bindings import _ALL_APIS
from cryptography.primitives.block import BlockCipher
def generate_encrypt_test(param_loader, path, file_names, cipher_factory,
mode_factory, only_if=lambda api: True,
skip_message=None):
def test_encryption(self):
for api in _ALL_APIS:
for file_name in file_names:
for params in param_loader(os.path.join(path, file_name)):
yield (
encrypt_test,
api,
cipher_factory,
mode_factory,
params,
only_if,
skip_message
)
return test_encryption
def encrypt_test(api, cipher_factory, mode_factory, params, only_if,
skip_message):
if not only_if(api):
pytest.skip(skip_message)
plaintext = params.pop("plaintext")
ciphertext = params.pop("ciphertext")
cipher = BlockCipher(
cipher_factory(**params),
mode_factory(**params),
api
)
actual_ciphertext = cipher.encrypt(binascii.unhexlify(plaintext))
actual_ciphertext += cipher.finalize()
- assert binascii.hexlify(actual_ciphertext) == ciphertext
+ assert actual_ciphertext == binascii.unhexlify(ciphertext)
| Rewrite to avoid capitalization issues | ## Code Before:
import binascii
import os
import pytest
from cryptography.bindings import _ALL_APIS
from cryptography.primitives.block import BlockCipher
def generate_encrypt_test(param_loader, path, file_names, cipher_factory,
mode_factory, only_if=lambda api: True,
skip_message=None):
def test_encryption(self):
for api in _ALL_APIS:
for file_name in file_names:
for params in param_loader(os.path.join(path, file_name)):
yield (
encrypt_test,
api,
cipher_factory,
mode_factory,
params,
only_if,
skip_message
)
return test_encryption
def encrypt_test(api, cipher_factory, mode_factory, params, only_if,
skip_message):
if not only_if(api):
pytest.skip(skip_message)
plaintext = params.pop("plaintext")
ciphertext = params.pop("ciphertext")
cipher = BlockCipher(
cipher_factory(**params),
mode_factory(**params),
api
)
actual_ciphertext = cipher.encrypt(binascii.unhexlify(plaintext))
actual_ciphertext += cipher.finalize()
assert binascii.hexlify(actual_ciphertext) == ciphertext
## Instruction:
Rewrite to avoid capitalization issues
## Code After:
import binascii
import os
import pytest
from cryptography.bindings import _ALL_APIS
from cryptography.primitives.block import BlockCipher
def generate_encrypt_test(param_loader, path, file_names, cipher_factory,
mode_factory, only_if=lambda api: True,
skip_message=None):
def test_encryption(self):
for api in _ALL_APIS:
for file_name in file_names:
for params in param_loader(os.path.join(path, file_name)):
yield (
encrypt_test,
api,
cipher_factory,
mode_factory,
params,
only_if,
skip_message
)
return test_encryption
def encrypt_test(api, cipher_factory, mode_factory, params, only_if,
skip_message):
if not only_if(api):
pytest.skip(skip_message)
plaintext = params.pop("plaintext")
ciphertext = params.pop("ciphertext")
cipher = BlockCipher(
cipher_factory(**params),
mode_factory(**params),
api
)
actual_ciphertext = cipher.encrypt(binascii.unhexlify(plaintext))
actual_ciphertext += cipher.finalize()
assert actual_ciphertext == binascii.unhexlify(ciphertext)
|
010040a8f7cb6a7a60b88ae80c43198fc46594d9 | tests/test_integration.py | tests/test_integration.py | import os
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_get_all_zones(self):
zones = self.cloudflare.get_zones()
self.assertIsInstance(zones, list)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
| import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_get_all_zones(self):
zones = self.cloudflare.iter_zones()
self.assertIsInstance(zones, types.GeneratorType)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
| Test iter_zones instead of get_zones | Test iter_zones instead of get_zones
| Python | mit | yola/pycloudflare,gnowxilef/pycloudflare | import os
+ import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_get_all_zones(self):
- zones = self.cloudflare.get_zones()
+ zones = self.cloudflare.iter_zones()
- self.assertIsInstance(zones, list)
+ self.assertIsInstance(zones, types.GeneratorType)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
| Test iter_zones instead of get_zones | ## Code Before:
import os
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_get_all_zones(self):
zones = self.cloudflare.get_zones()
self.assertIsInstance(zones, list)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
## Instruction:
Test iter_zones instead of get_zones
## Code After:
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_get_all_zones(self):
zones = self.cloudflare.iter_zones()
self.assertIsInstance(zones, types.GeneratorType)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
582da24725e03a159aa47cdf730915cddab52c5d | workflows/cp-leaveout/scripts/print-node-info.py | workflows/cp-leaveout/scripts/print-node-info.py |
import argparse, os, pickle, sys
from Node import Node
from utils import abort
parser = argparse.ArgumentParser(description='Parse all log files')
parser.add_argument('directory',
help='The experiment directory (EXPID)')
args = parser.parse_args()
node_pkl = args.directory + "/node-info.pkl"
try:
with open(node_pkl, 'rb') as fp:
data = pickle.load(fp)
except IOError as e:
abort(e, os.EX_IOERR, "Could not read: " + node_pkl)
# print(data)
for item in data.values():
print(item.str_table())
# print(len(data))
|
import argparse, os, pickle, sys
from Node import Node
from utils import fail
parser = argparse.ArgumentParser(description='Parse all log files')
parser.add_argument('directory',
help='The experiment directory (EXPID)')
args = parser.parse_args()
node_pkl = args.directory + "/node-info.pkl"
try:
with open(node_pkl, 'rb') as fp:
data = pickle.load(fp)
except IOError as e:
fail(e, os.EX_IOERR, "Could not read: " + node_pkl)
# print(data)
for item in data.values():
print(item.str_table())
# print(len(data))
| Replace abort() with fail() again | Replace abort() with fail() again
| Python | mit | ECP-CANDLE/Supervisor,ECP-CANDLE/Supervisor,ECP-CANDLE/Supervisor,ECP-CANDLE/Supervisor,ECP-CANDLE/Supervisor,ECP-CANDLE/Supervisor |
import argparse, os, pickle, sys
from Node import Node
- from utils import abort
+ from utils import fail
parser = argparse.ArgumentParser(description='Parse all log files')
parser.add_argument('directory',
help='The experiment directory (EXPID)')
args = parser.parse_args()
node_pkl = args.directory + "/node-info.pkl"
try:
with open(node_pkl, 'rb') as fp:
data = pickle.load(fp)
except IOError as e:
- abort(e, os.EX_IOERR, "Could not read: " + node_pkl)
+ fail(e, os.EX_IOERR, "Could not read: " + node_pkl)
# print(data)
for item in data.values():
print(item.str_table())
# print(len(data))
| Replace abort() with fail() again | ## Code Before:
import argparse, os, pickle, sys
from Node import Node
from utils import abort
parser = argparse.ArgumentParser(description='Parse all log files')
parser.add_argument('directory',
help='The experiment directory (EXPID)')
args = parser.parse_args()
node_pkl = args.directory + "/node-info.pkl"
try:
with open(node_pkl, 'rb') as fp:
data = pickle.load(fp)
except IOError as e:
abort(e, os.EX_IOERR, "Could not read: " + node_pkl)
# print(data)
for item in data.values():
print(item.str_table())
# print(len(data))
## Instruction:
Replace abort() with fail() again
## Code After:
import argparse, os, pickle, sys
from Node import Node
from utils import fail
parser = argparse.ArgumentParser(description='Parse all log files')
parser.add_argument('directory',
help='The experiment directory (EXPID)')
args = parser.parse_args()
node_pkl = args.directory + "/node-info.pkl"
try:
with open(node_pkl, 'rb') as fp:
data = pickle.load(fp)
except IOError as e:
fail(e, os.EX_IOERR, "Could not read: " + node_pkl)
# print(data)
for item in data.values():
print(item.str_table())
# print(len(data))
|
c40d63852807645a39bb1e3316a10e5f2a3ad650 | syntacticframes_project/loadmapping/management/commands/save_correspondances.py | syntacticframes_project/loadmapping/management/commands/save_correspondances.py | import csv
from os import path
from distutils.version import LooseVersion
from django.core.management.base import BaseCommand
from django.conf import settings
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
with open(path.join(settings.SITE_ROOT, 'loadmapping/resources/Correspondances.csv'), 'w') as csvfile:
correswriter = csv.writer(csvfile)
correswriter.writerow(['VerbNet', 'LADL', 'LVF', 'Parangon', 'Commentaires'])
for vn_class in sorted(VerbNetClass.objects.all(), key = lambda v: LooseVersion(v.name.split('-')[1])):
root_fs = vn_class.verbnetframeset_set.get(parent=None)
correswriter.writerow(["{}: {}".format(vn_class.name.split('-')[1], vn_class.name),
root_fs.ladl_string, root_fs.lvf_string,
root_fs.paragon, root_fs.comment])
| import csv
from os import path
from distutils.version import LooseVersion
from django.core.management.base import BaseCommand
from django.conf import settings
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
with open(path.join(settings.SITE_ROOT, 'loadmapping/resources/Correspondances.csv'), 'w') as csvfile:
correswriter = csv.writer(csvfile)
correswriter.writerow(['VerbNet', 'LVF', 'LADL', 'Parangon', 'Commentaires'])
for vn_class in sorted(VerbNetClass.objects.all(), key = lambda v: LooseVersion(v.name.split('-')[1])):
root_fs = vn_class.verbnetframeset_set.get(parent=None)
correswriter.writerow(["{}: {}".format(vn_class.name.split('-')[1], vn_class.name),
root_fs.lvf_string, root_fs.ladl_string,
root_fs.paragon, root_fs.comment])
| Save LVF before LADL in CSV to be similar to website | Save LVF before LADL in CSV to be similar to website
| Python | mit | aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor | import csv
from os import path
from distutils.version import LooseVersion
from django.core.management.base import BaseCommand
from django.conf import settings
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
with open(path.join(settings.SITE_ROOT, 'loadmapping/resources/Correspondances.csv'), 'w') as csvfile:
correswriter = csv.writer(csvfile)
- correswriter.writerow(['VerbNet', 'LADL', 'LVF', 'Parangon', 'Commentaires'])
+ correswriter.writerow(['VerbNet', 'LVF', 'LADL', 'Parangon', 'Commentaires'])
for vn_class in sorted(VerbNetClass.objects.all(), key = lambda v: LooseVersion(v.name.split('-')[1])):
root_fs = vn_class.verbnetframeset_set.get(parent=None)
correswriter.writerow(["{}: {}".format(vn_class.name.split('-')[1], vn_class.name),
- root_fs.ladl_string, root_fs.lvf_string,
+ root_fs.lvf_string, root_fs.ladl_string,
root_fs.paragon, root_fs.comment])
| Save LVF before LADL in CSV to be similar to website | ## Code Before:
import csv
from os import path
from distutils.version import LooseVersion
from django.core.management.base import BaseCommand
from django.conf import settings
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
with open(path.join(settings.SITE_ROOT, 'loadmapping/resources/Correspondances.csv'), 'w') as csvfile:
correswriter = csv.writer(csvfile)
correswriter.writerow(['VerbNet', 'LADL', 'LVF', 'Parangon', 'Commentaires'])
for vn_class in sorted(VerbNetClass.objects.all(), key = lambda v: LooseVersion(v.name.split('-')[1])):
root_fs = vn_class.verbnetframeset_set.get(parent=None)
correswriter.writerow(["{}: {}".format(vn_class.name.split('-')[1], vn_class.name),
root_fs.ladl_string, root_fs.lvf_string,
root_fs.paragon, root_fs.comment])
## Instruction:
Save LVF before LADL in CSV to be similar to website
## Code After:
import csv
from os import path
from distutils.version import LooseVersion
from django.core.management.base import BaseCommand
from django.conf import settings
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
with open(path.join(settings.SITE_ROOT, 'loadmapping/resources/Correspondances.csv'), 'w') as csvfile:
correswriter = csv.writer(csvfile)
correswriter.writerow(['VerbNet', 'LVF', 'LADL', 'Parangon', 'Commentaires'])
for vn_class in sorted(VerbNetClass.objects.all(), key = lambda v: LooseVersion(v.name.split('-')[1])):
root_fs = vn_class.verbnetframeset_set.get(parent=None)
correswriter.writerow(["{}: {}".format(vn_class.name.split('-')[1], vn_class.name),
root_fs.lvf_string, root_fs.ladl_string,
root_fs.paragon, root_fs.comment])
|
84af44868ea742bb5f6d08991526a98c8c78a931 | tellurium/teconverters/__init__.py | tellurium/teconverters/__init__.py |
from __future__ import absolute_import
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter, OmexFormatDetector
try:
from .convert_phrasedml import phrasedmlImporter
except:
pass
from .antimony_sbo import SBOError
from .inline_omex import inlineOmex, saveInlineOMEX
|
from __future__ import absolute_import
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter, OmexFormatDetector
try:
from .convert_phrasedml import phrasedmlImporter
from .inline_omex import inlineOmex, saveInlineOMEX
except:
pass
from .antimony_sbo import SBOError
| Drop inline omex if it fails. | Drop inline omex if it fails.
| Python | apache-2.0 | sys-bio/tellurium,sys-bio/tellurium |
from __future__ import absolute_import
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter, OmexFormatDetector
try:
from .convert_phrasedml import phrasedmlImporter
+ from .inline_omex import inlineOmex, saveInlineOMEX
except:
pass
from .antimony_sbo import SBOError
- from .inline_omex import inlineOmex, saveInlineOMEX
| Drop inline omex if it fails. | ## Code Before:
from __future__ import absolute_import
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter, OmexFormatDetector
try:
from .convert_phrasedml import phrasedmlImporter
except:
pass
from .antimony_sbo import SBOError
from .inline_omex import inlineOmex, saveInlineOMEX
## Instruction:
Drop inline omex if it fails.
## Code After:
from __future__ import absolute_import
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter, OmexFormatDetector
try:
from .convert_phrasedml import phrasedmlImporter
from .inline_omex import inlineOmex, saveInlineOMEX
except:
pass
from .antimony_sbo import SBOError
|
69b262f502bbc48204db70815476aa256bd7db6e | rmgpy/tools/canteraTest.py | rmgpy/tools/canteraTest.py | import unittest
import os
import numpy
from rmgpy.tools.canteraModel import *
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
| import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac) | Add unit test for CanteraCondition that tests that the repr() function works | Add unit test for CanteraCondition that tests that the repr() function works
| Python | mit | nyee/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,nyee/RMG-Py,pierrelb/RMG-Py,pierrelb/RMG-Py | import unittest
import os
import numpy
- from rmgpy.tools.canteraModel import *
-
+ from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
+ from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
+ def testRepr(self):
+ """
+ Test that the repr function for a CanteraCondition object can reconstitute
+ the same object
+ """
+ reactorType='IdealGasReactor'
+ molFrac={'CC': 0.05, '[Ar]': 0.95}
+ P=(3,'atm')
+ T=(1500,'K')
+ terminationTime=(5e-5,'s')
+ condition = CanteraCondition(reactorType,
+ terminationTime,
+ molFrac,
+ T0=T,
+ P0=P)
+ reprCondition=eval(condition.__repr__())
+ self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
+ self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
+ self.assertEqual(reprCondition.V0,None)
+ self.assertEqual(reprCondition.molFrac,molFrac) | Add unit test for CanteraCondition that tests that the repr() function works | ## Code Before:
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import *
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
## Instruction:
Add unit test for CanteraCondition that tests that the repr() function works
## Code After:
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac) |
7e7f9da097563d8fbd407268093b56c2f10464a5 | radar/radar/tests/validation/test_reset_password_validation.py | radar/radar/tests/validation/test_reset_password_validation.py | import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == 'password'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
| import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def test_weak_password():
invalid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
| Use stronger password in reset password test | Use stronger password in reset password test
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
- 'password': 'password',
+ 'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
- assert obj['password'] == 'password'
+ assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
+ def test_weak_password():
+ invalid({
+ 'token': '12345',
+ 'username': 'hello',
+ 'password': 'password',
+ })
+
+
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
| Use stronger password in reset password test | ## Code Before:
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == 'password'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
## Instruction:
Use stronger password in reset password test
## Code After:
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def test_weak_password():
invalid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
fa1f148b33c61e91044c19a88737abd2ec86c6bf | yunity/api/public/auth.py | yunity/api/public/auth.py | from django.contrib.auth import logout
from django.middleware.csrf import get_token as generate_csrf_token_for_frontend
from rest_framework import status, viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from yunity.api.serializers import UserSerializer, AuthLoginSerializer
class AuthViewSet(viewsets.ViewSet):
@list_route(methods=['get'])
def status(self, request):
""" Get the login state (logged in user)
---
response_serializer: UserSerializer
"""
generate_csrf_token_for_frontend(request)
if request.user.is_anonymous():
serializer = UserSerializer()
else:
serializer = UserSerializer(request.user)
return Response(serializer.data)
def create(self, request, **kwargs):
""" Log in
---
request_serializer: AuthLoginSerializer
response_serializer: UserSerializer
"""
serializer = AuthLoginSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@list_route(methods=['POST'])
def logout(self, request, **kwargs):
logout(request)
return Response(status = status.HTTP_200_OK)
| from django.contrib.auth import logout
from django.middleware.csrf import get_token as generate_csrf_token_for_frontend
from rest_framework import status, viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from yunity.api.serializers import UserSerializer, AuthLoginSerializer
class AuthViewSet(viewsets.GenericViewSet):
serializer_class = AuthLoginSerializer
@list_route(methods=['get'])
def status(self, request):
""" Get the login state (logged in user)
---
response_serializer: UserSerializer
"""
generate_csrf_token_for_frontend(request)
if request.user.is_anonymous():
serializer = UserSerializer()
else:
serializer = UserSerializer(request.user)
return Response(serializer.data)
def create(self, request, **kwargs):
""" Log in
---
request_serializer: AuthLoginSerializer
response_serializer: UserSerializer
"""
serializer = AuthLoginSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@list_route(methods=['POST'])
def logout(self, request, **kwargs):
logout(request)
return Response(status = status.HTTP_200_OK)
| Enable easy login through browsable API (discovery through serializer_class) | Enable easy login through browsable API (discovery through serializer_class)
| Python | agpl-3.0 | yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core | from django.contrib.auth import logout
from django.middleware.csrf import get_token as generate_csrf_token_for_frontend
from rest_framework import status, viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from yunity.api.serializers import UserSerializer, AuthLoginSerializer
- class AuthViewSet(viewsets.ViewSet):
+ class AuthViewSet(viewsets.GenericViewSet):
+ serializer_class = AuthLoginSerializer
+
@list_route(methods=['get'])
def status(self, request):
""" Get the login state (logged in user)
---
response_serializer: UserSerializer
"""
generate_csrf_token_for_frontend(request)
if request.user.is_anonymous():
serializer = UserSerializer()
else:
serializer = UserSerializer(request.user)
return Response(serializer.data)
def create(self, request, **kwargs):
""" Log in
---
request_serializer: AuthLoginSerializer
response_serializer: UserSerializer
"""
serializer = AuthLoginSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@list_route(methods=['POST'])
def logout(self, request, **kwargs):
logout(request)
return Response(status = status.HTTP_200_OK)
| Enable easy login through browsable API (discovery through serializer_class) | ## Code Before:
from django.contrib.auth import logout
from django.middleware.csrf import get_token as generate_csrf_token_for_frontend
from rest_framework import status, viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from yunity.api.serializers import UserSerializer, AuthLoginSerializer
class AuthViewSet(viewsets.ViewSet):
@list_route(methods=['get'])
def status(self, request):
""" Get the login state (logged in user)
---
response_serializer: UserSerializer
"""
generate_csrf_token_for_frontend(request)
if request.user.is_anonymous():
serializer = UserSerializer()
else:
serializer = UserSerializer(request.user)
return Response(serializer.data)
def create(self, request, **kwargs):
""" Log in
---
request_serializer: AuthLoginSerializer
response_serializer: UserSerializer
"""
serializer = AuthLoginSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@list_route(methods=['POST'])
def logout(self, request, **kwargs):
logout(request)
return Response(status = status.HTTP_200_OK)
## Instruction:
Enable easy login through browsable API (discovery through serializer_class)
## Code After:
from django.contrib.auth import logout
from django.middleware.csrf import get_token as generate_csrf_token_for_frontend
from rest_framework import status, viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from yunity.api.serializers import UserSerializer, AuthLoginSerializer
class AuthViewSet(viewsets.GenericViewSet):
serializer_class = AuthLoginSerializer
@list_route(methods=['get'])
def status(self, request):
""" Get the login state (logged in user)
---
response_serializer: UserSerializer
"""
generate_csrf_token_for_frontend(request)
if request.user.is_anonymous():
serializer = UserSerializer()
else:
serializer = UserSerializer(request.user)
return Response(serializer.data)
def create(self, request, **kwargs):
""" Log in
---
request_serializer: AuthLoginSerializer
response_serializer: UserSerializer
"""
serializer = AuthLoginSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@list_route(methods=['POST'])
def logout(self, request, **kwargs):
logout(request)
return Response(status = status.HTTP_200_OK)
|
e49e7484987e3b508802adbd9e05b2b156eb6bdd | manage.py | manage.py | import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User, Dictionary
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User, Dictionary=Dictionary)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run() | import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User, Dictionary, Word
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User, Dictionary=Dictionary, Word=Word)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run() | Add Word model to shell context | Add Word model to shell context
| Python | mit | Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary | import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
- from app.models import User, Dictionary
+ from app.models import User, Dictionary, Word
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
- return dict(app=app, db=db, User=User, Dictionary=Dictionary)
+ return dict(app=app, db=db, User=User, Dictionary=Dictionary, Word=Word)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run() | Add Word model to shell context | ## Code Before:
import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User, Dictionary
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User, Dictionary=Dictionary)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run()
## Instruction:
Add Word model to shell context
## Code After:
import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User, Dictionary, Word
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User, Dictionary=Dictionary, Word=Word)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run() |
d883a0fd09a42ff84ebb2ccf331692167370444b | ESLog/esloghandler.py | ESLog/esloghandler.py |
from datetime import datetime
import logging
import os
import json
import urllib.request
import urllib.parse
class ESLogHandler(logging.Handler):
def __init__(self, url, index=None, doc_type="log", level=logging.NOTSET):
logging.Handler.__init__(self, level=level)
self.url = urllib.parse.urlparse(url)
print(self.url)
# end __init__
def emit(self, record):
# Break the record down to a dictionary
message = dict()
message["timestamp"] = datetime.now().isoformat()
message["level"] = record.levelname
message["name"] = record.name
message["lineno"] = record.lineno
message["message"] = record.msg
json_message = json.dumps(message)
json_message_bytes = json_message.encode("utf8")
urllib.request.urlopen(self.url, data=json_message_bytes)
# end emit
# end ESLogHandler
|
from datetime import datetime
import logging
import os
import json
import urllib.request
import urllib.parse
class ESLogHandler(logging.Handler):
def __init__(self, url, index=None, doc_type="log", level=logging.NOTSET):
logging.Handler.__init__(self, level=level)
# Parse the url
self.url = urllib.parse.urlparse(url)
# If no scheme is given, set it to http
if not self.url.scheme:
self.url.scheme = "http"
# If a scheme is given but it is not http, raise an exception
elif self.url.scheme != "http":
raise ValueError("Only HTTP is supported.")
# If no port is given default to 9200
if not self.url.port:
self.url.port = "9200"
# If no path is given or it is only a / use thi index and doc_type to construct one.
if not self.url.path or self.url.path == "/":
# an index is mandatory for Elasticsearch, doc_type too but it defaults to log
if not index:
raise ValueError("Elasticsearch index cannot be ommitted.")
else:
self.url.path = os.path.join("/", index, doc_type)
# end __init__
def emit(self, record):
# Break the record down to a dictionary
message = dict()
message["timestamp"] = datetime.now().isoformat()
message["level"] = record.levelname
message["name"] = record.name
message["lineno"] = record.lineno
message["message"] = record.msg
json_message = json.dumps(message)
json_message_bytes = json_message.encode("utf8")
urllib.request.urlopen(self.url, data=json_message_bytes)
# end emit
# end ESLogHandler
| Revert "trying to simplefy __init__" | Revert "trying to simplefy __init__"
This reverts commit f2e3887bcd53b1c35af2d9dfe2363ea9e2a407f5.
| Python | mit | Rio/ESLog |
from datetime import datetime
import logging
import os
import json
import urllib.request
import urllib.parse
class ESLogHandler(logging.Handler):
def __init__(self, url, index=None, doc_type="log", level=logging.NOTSET):
logging.Handler.__init__(self, level=level)
-
+
+ # Parse the url
self.url = urllib.parse.urlparse(url)
-
- print(self.url)
+
+ # If no scheme is given, set it to http
+ if not self.url.scheme:
+ self.url.scheme = "http"
+
+ # If a scheme is given but it is not http, raise an exception
+ elif self.url.scheme != "http":
+ raise ValueError("Only HTTP is supported.")
+
+ # If no port is given default to 9200
+ if not self.url.port:
+ self.url.port = "9200"
+
+ # If no path is given or it is only a / use thi index and doc_type to construct one.
+ if not self.url.path or self.url.path == "/":
+ # an index is mandatory for Elasticsearch, doc_type too but it defaults to log
+ if not index:
+ raise ValueError("Elasticsearch index cannot be ommitted.")
+
+ else:
+ self.url.path = os.path.join("/", index, doc_type)
# end __init__
def emit(self, record):
# Break the record down to a dictionary
message = dict()
message["timestamp"] = datetime.now().isoformat()
message["level"] = record.levelname
message["name"] = record.name
message["lineno"] = record.lineno
message["message"] = record.msg
json_message = json.dumps(message)
json_message_bytes = json_message.encode("utf8")
urllib.request.urlopen(self.url, data=json_message_bytes)
# end emit
# end ESLogHandler
| Revert "trying to simplefy __init__" | ## Code Before:
from datetime import datetime
import logging
import os
import json
import urllib.request
import urllib.parse
class ESLogHandler(logging.Handler):
def __init__(self, url, index=None, doc_type="log", level=logging.NOTSET):
logging.Handler.__init__(self, level=level)
self.url = urllib.parse.urlparse(url)
print(self.url)
# end __init__
def emit(self, record):
# Break the record down to a dictionary
message = dict()
message["timestamp"] = datetime.now().isoformat()
message["level"] = record.levelname
message["name"] = record.name
message["lineno"] = record.lineno
message["message"] = record.msg
json_message = json.dumps(message)
json_message_bytes = json_message.encode("utf8")
urllib.request.urlopen(self.url, data=json_message_bytes)
# end emit
# end ESLogHandler
## Instruction:
Revert "trying to simplefy __init__"
## Code After:
from datetime import datetime
import logging
import os
import json
import urllib.request
import urllib.parse
class ESLogHandler(logging.Handler):
def __init__(self, url, index=None, doc_type="log", level=logging.NOTSET):
logging.Handler.__init__(self, level=level)
# Parse the url
self.url = urllib.parse.urlparse(url)
# If no scheme is given, set it to http
if not self.url.scheme:
self.url.scheme = "http"
# If a scheme is given but it is not http, raise an exception
elif self.url.scheme != "http":
raise ValueError("Only HTTP is supported.")
# If no port is given default to 9200
if not self.url.port:
self.url.port = "9200"
# If no path is given or it is only a / use thi index and doc_type to construct one.
if not self.url.path or self.url.path == "/":
# an index is mandatory for Elasticsearch, doc_type too but it defaults to log
if not index:
raise ValueError("Elasticsearch index cannot be ommitted.")
else:
self.url.path = os.path.join("/", index, doc_type)
# end __init__
def emit(self, record):
# Break the record down to a dictionary
message = dict()
message["timestamp"] = datetime.now().isoformat()
message["level"] = record.levelname
message["name"] = record.name
message["lineno"] = record.lineno
message["message"] = record.msg
json_message = json.dumps(message)
json_message_bytes = json_message.encode("utf8")
urllib.request.urlopen(self.url, data=json_message_bytes)
# end emit
# end ESLogHandler
|
3e45f7d71fbd154a1039836228098efb62457f1b | tests/app/dvla_organisation/test_rest.py | tests/app/dvla_organisation/test_rest.py | from flask import json
from tests import create_authorization_header
def test_get_dvla_organisations(client):
auth_header = create_authorization_header()
response = client.get('/dvla_organisations', headers=[auth_header])
assert response.status_code == 200
dvla_organisations = json.loads(response.get_data(as_text=True))
assert dvla_organisations == {'001': 'HM Government', '500': 'Land Registry'}
| from flask import json
from tests import create_authorization_header
def test_get_dvla_organisations(client):
auth_header = create_authorization_header()
response = client.get('/dvla_organisations', headers=[auth_header])
assert response.status_code == 200
dvla_organisations = json.loads(response.get_data(as_text=True))
assert dvla_organisations['001'] == 'HM Government'
assert dvla_organisations['500'] == 'Land Registry'
| Refactor test so that it does not have to change every time we add a new organisation. | Refactor test so that it does not have to change every time we add a new organisation.
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | from flask import json
from tests import create_authorization_header
def test_get_dvla_organisations(client):
auth_header = create_authorization_header()
response = client.get('/dvla_organisations', headers=[auth_header])
assert response.status_code == 200
dvla_organisations = json.loads(response.get_data(as_text=True))
- assert dvla_organisations == {'001': 'HM Government', '500': 'Land Registry'}
+ assert dvla_organisations['001'] == 'HM Government'
+ assert dvla_organisations['500'] == 'Land Registry'
| Refactor test so that it does not have to change every time we add a new organisation. | ## Code Before:
from flask import json
from tests import create_authorization_header
def test_get_dvla_organisations(client):
auth_header = create_authorization_header()
response = client.get('/dvla_organisations', headers=[auth_header])
assert response.status_code == 200
dvla_organisations = json.loads(response.get_data(as_text=True))
assert dvla_organisations == {'001': 'HM Government', '500': 'Land Registry'}
## Instruction:
Refactor test so that it does not have to change every time we add a new organisation.
## Code After:
from flask import json
from tests import create_authorization_header
def test_get_dvla_organisations(client):
auth_header = create_authorization_header()
response = client.get('/dvla_organisations', headers=[auth_header])
assert response.status_code == 200
dvla_organisations = json.loads(response.get_data(as_text=True))
assert dvla_organisations['001'] == 'HM Government'
assert dvla_organisations['500'] == 'Land Registry'
|
e3c840567fae974b2a1f169b05b86de97b60c8d0 | gitcms/publications/urls.py | gitcms/publications/urls.py | from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
)
| from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
| Remove stay mention to BASE_URL | Remove stay mention to BASE_URL
| Python | agpl-3.0 | luispedro/django-gitcms,luispedro/django-gitcms | from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
- (r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
+ (r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
| Remove stay mention to BASE_URL | ## Code Before:
from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
)
## Instruction:
Remove stay mention to BASE_URL
## Code After:
from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
|
cc85fdf3b44b7a69b8d0406c170d409783687d2d | __TEMPLATE__.py | __TEMPLATE__.py | """Module docstring. This talks about the module."""
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
IS_MAIN = True if __name__ == '__main__' else False
if IS_MAIN:
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
class MyClass(object):
"""Class docstring."""
raise NotImplementedError
if IS_MAIN:
with Section('SOME MODULE TITLE'):
pass
| """Module docstring. This talks about the module."""
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
IS_MAIN = True if __name__ == '__main__' else False
if IS_MAIN:
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
class MyClass(object):
"""Class docstring."""
raise NotImplementedError
if IS_MAIN:
with Section(__doc__):
pass
| Use Docstring as default title value. | Use Docstring as default title value.
| Python | apache-2.0 | christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL | """Module docstring. This talks about the module."""
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
IS_MAIN = True if __name__ == '__main__' else False
if IS_MAIN:
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
class MyClass(object):
"""Class docstring."""
raise NotImplementedError
if IS_MAIN:
- with Section('SOME MODULE TITLE'):
+ with Section(__doc__):
pass
| Use Docstring as default title value. | ## Code Before:
"""Module docstring. This talks about the module."""
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
IS_MAIN = True if __name__ == '__main__' else False
if IS_MAIN:
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
class MyClass(object):
"""Class docstring."""
raise NotImplementedError
if IS_MAIN:
with Section('SOME MODULE TITLE'):
pass
## Instruction:
Use Docstring as default title value.
## Code After:
"""Module docstring. This talks about the module."""
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
IS_MAIN = True if __name__ == '__main__' else False
if IS_MAIN:
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
class MyClass(object):
"""Class docstring."""
raise NotImplementedError
if IS_MAIN:
with Section(__doc__):
pass
|
58d11644b08a91ab1e71f697741197f1b697d817 | tests/request/test_request_header.py | tests/request/test_request_header.py | def test_multiple_same_headers():
pass
def test_header_case_insensitivity():
pass
def test_header_with_continuation_lines():
pass
def test_request_without_headers():
pass
def test_invalid_header_syntax():
pass
| from httoop import Headers, InvalidHeader
def test_multiple_same_headers():
pass
def test_header_case_insensitivity():
pass
def test_header_with_continuation_lines():
h = Headers()
h.parse('Foo: bar\r\n baz')
h.parse('Foo2: bar\r\n\tbaz')
h.parse('Foo3: bar\r\n baz')
h.parse('Foo4: bar\r\n\t baz')
assert h['Foo'] == 'barbaz'
assert h['Foo2'] == 'barbaz'
assert h['Foo3'] == 'bar baz'
assert h['Foo4'] == 'bar baz'
def test_request_without_headers():
pass
def test_invalid_header_syntax():
h = Headers()
invalid_headers = ['Foo']
for char in b"%s\x7F()<>@,;\\\\\"/\[\]?={} \t%s" % (b''.join(map(chr, range(0x00, 0x1F))), ''.join(map(chr, range(0x80, 0xFF)))):
invalid_headers.append(b'Fo%so: bar' % (char,))
for invalid in invalid_headers:
try:
h.parse(invalid)
except InvalidHeader:
pass
else:
assert False, 'Invalid header %r parsed successfully' % (invalid,)
| Add test case for invalid headers and continuation lines | Add test case for invalid headers and continuation lines
| Python | mit | spaceone/httoop,spaceone/httoop,spaceone/httoop | + from httoop import Headers, InvalidHeader
+
def test_multiple_same_headers():
pass
def test_header_case_insensitivity():
pass
def test_header_with_continuation_lines():
- pass
+ h = Headers()
+ h.parse('Foo: bar\r\n baz')
+ h.parse('Foo2: bar\r\n\tbaz')
+ h.parse('Foo3: bar\r\n baz')
+ h.parse('Foo4: bar\r\n\t baz')
+ assert h['Foo'] == 'barbaz'
+ assert h['Foo2'] == 'barbaz'
+ assert h['Foo3'] == 'bar baz'
+ assert h['Foo4'] == 'bar baz'
def test_request_without_headers():
pass
def test_invalid_header_syntax():
+ h = Headers()
+ invalid_headers = ['Foo']
+ for char in b"%s\x7F()<>@,;\\\\\"/\[\]?={} \t%s" % (b''.join(map(chr, range(0x00, 0x1F))), ''.join(map(chr, range(0x80, 0xFF)))):
+ invalid_headers.append(b'Fo%so: bar' % (char,))
+ for invalid in invalid_headers:
+ try:
+ h.parse(invalid)
+ except InvalidHeader:
- pass
+ pass
+ else:
+ assert False, 'Invalid header %r parsed successfully' % (invalid,)
| Add test case for invalid headers and continuation lines | ## Code Before:
def test_multiple_same_headers():
pass
def test_header_case_insensitivity():
pass
def test_header_with_continuation_lines():
pass
def test_request_without_headers():
pass
def test_invalid_header_syntax():
pass
## Instruction:
Add test case for invalid headers and continuation lines
## Code After:
from httoop import Headers, InvalidHeader
def test_multiple_same_headers():
pass
def test_header_case_insensitivity():
pass
def test_header_with_continuation_lines():
h = Headers()
h.parse('Foo: bar\r\n baz')
h.parse('Foo2: bar\r\n\tbaz')
h.parse('Foo3: bar\r\n baz')
h.parse('Foo4: bar\r\n\t baz')
assert h['Foo'] == 'barbaz'
assert h['Foo2'] == 'barbaz'
assert h['Foo3'] == 'bar baz'
assert h['Foo4'] == 'bar baz'
def test_request_without_headers():
pass
def test_invalid_header_syntax():
h = Headers()
invalid_headers = ['Foo']
for char in b"%s\x7F()<>@,;\\\\\"/\[\]?={} \t%s" % (b''.join(map(chr, range(0x00, 0x1F))), ''.join(map(chr, range(0x80, 0xFF)))):
invalid_headers.append(b'Fo%so: bar' % (char,))
for invalid in invalid_headers:
try:
h.parse(invalid)
except InvalidHeader:
pass
else:
assert False, 'Invalid header %r parsed successfully' % (invalid,)
|
f6429a3c4b413231ad480f2768d47b78ec0c690b | great_expectations/cli/cli_logging.py | great_expectations/cli/cli_logging.py | import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setLevel(level=logging.WARNING)
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
return module_logger | import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
module_logger.setLevel(level=logging.WARNING)
return module_logger | Set level on module logger instead | Set level on module logger instead
| Python | apache-2.0 | great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations | import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
- handler.setLevel(level=logging.WARNING)
+
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
+ module_logger.setLevel(level=logging.WARNING)
return module_logger | Set level on module logger instead | ## Code Before:
import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setLevel(level=logging.WARNING)
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
return module_logger
## Instruction:
Set level on module logger instead
## Code After:
import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
module_logger.setLevel(level=logging.WARNING)
return module_logger |
323a92afd125bd97c960ab71c64f78601ec4b000 | aioinotify/watch.py | aioinotify/watch.py | import asyncio
class Watch:
"""Represents an inotify watch as added by InotifyProtocol.watch()"""
def __init__(self, watch_descriptor, callback, protocol):
"""
:param int watch_descriptor: The watch descriptor as returned by inotify_add_watch
:param callback: A function with one positional argument (the event object) called when
an inotify event happens.
"""
self.watch_descriptor = watch_descriptor
self._callback = callback
self._closed = False
self._protocol = protocol
@asyncio.coroutine
def dispatch_event(self, event):
if not self._closed:
yield from self._callback(event)
def close(self):
if not self._closed:
self._protocol._remove_watch(self.watch_descriptor)
self._closed = True
| import asyncio
class Watch:
"""Represents an inotify watch as added by InotifyProtocol.watch()"""
def __init__(self, watch_descriptor, callback, protocol):
"""
:param int watch_descriptor: The watch descriptor as returned by inotify_add_watch
:param callback: A function with one positional argument (the event object) called when
an inotify event happens.
"""
self.watch_descriptor = watch_descriptor
self._callback = callback
self._closed = False
self._protocol = protocol
def __enter__(self):
return self
def __exit__(self, *exc):
self.close()
@asyncio.coroutine
def dispatch_event(self, event):
if not self._closed:
yield from self._callback(event)
def close(self):
if not self._closed:
self._protocol._remove_watch(self.watch_descriptor)
self._closed = True
| Make Watch also a context manager | Make Watch also a context manager
| Python | apache-2.0 | mwfrojdman/aioinotify | import asyncio
class Watch:
"""Represents an inotify watch as added by InotifyProtocol.watch()"""
def __init__(self, watch_descriptor, callback, protocol):
"""
:param int watch_descriptor: The watch descriptor as returned by inotify_add_watch
:param callback: A function with one positional argument (the event object) called when
an inotify event happens.
"""
self.watch_descriptor = watch_descriptor
self._callback = callback
self._closed = False
self._protocol = protocol
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc):
+ self.close()
+
@asyncio.coroutine
def dispatch_event(self, event):
if not self._closed:
yield from self._callback(event)
def close(self):
if not self._closed:
self._protocol._remove_watch(self.watch_descriptor)
self._closed = True
| Make Watch also a context manager | ## Code Before:
import asyncio
class Watch:
"""Represents an inotify watch as added by InotifyProtocol.watch()"""
def __init__(self, watch_descriptor, callback, protocol):
"""
:param int watch_descriptor: The watch descriptor as returned by inotify_add_watch
:param callback: A function with one positional argument (the event object) called when
an inotify event happens.
"""
self.watch_descriptor = watch_descriptor
self._callback = callback
self._closed = False
self._protocol = protocol
@asyncio.coroutine
def dispatch_event(self, event):
if not self._closed:
yield from self._callback(event)
def close(self):
if not self._closed:
self._protocol._remove_watch(self.watch_descriptor)
self._closed = True
## Instruction:
Make Watch also a context manager
## Code After:
import asyncio
class Watch:
"""Represents an inotify watch as added by InotifyProtocol.watch()"""
def __init__(self, watch_descriptor, callback, protocol):
"""
:param int watch_descriptor: The watch descriptor as returned by inotify_add_watch
:param callback: A function with one positional argument (the event object) called when
an inotify event happens.
"""
self.watch_descriptor = watch_descriptor
self._callback = callback
self._closed = False
self._protocol = protocol
def __enter__(self):
return self
def __exit__(self, *exc):
self.close()
@asyncio.coroutine
def dispatch_event(self, event):
if not self._closed:
yield from self._callback(event)
def close(self):
if not self._closed:
self._protocol._remove_watch(self.watch_descriptor)
self._closed = True
|
aa77e74c02ec7276c233454806d55fdb32899a13 | __init__.py | __init__.py | from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification
from . import visualization
| from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification as vf
from . import visualization as plt
| Use namespaces plt and vf for visualization and verification modules | Use namespaces plt and vf for visualization and verification modules
| Python | bsd-3-clause | pySTEPS/pysteps | from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
- from . import verification
+ from . import verification as vf
- from . import visualization
+ from . import visualization as plt
| Use namespaces plt and vf for visualization and verification modules | ## Code Before:
from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification
from . import visualization
## Instruction:
Use namespaces plt and vf for visualization and verification modules
## Code After:
from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification as vf
from . import visualization as plt
|
b1153bc6e8b8b132c146076aeeb6b86ec4f54365 | __init__.py | __init__.py | if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import * | bl_info = {
"name": "glTF format",
"author": "Daniel Stokes",
"version": (0, 1, 0),
"blender": (2, 76, 0),
"location": "File > Import-Export",
"description": "Export glTF",
"warning": "",
"wiki_url": ""
"",
"support": 'TESTING',
"category": "Import-Export"}
# Treat as module
if '.' in __name__:
if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import *
# Treat as addon
else:
if "bpy" in locals():
import importlib
importlib.reload(blendergltf)
import json
import bpy
from bpy.props import (
StringProperty,
)
from bpy_extras.io_utils import (
ExportHelper,
)
from . import blendergltf
class ExportGLTF(bpy.types.Operator, ExportHelper):
"""Save a Khronos glTF File"""
bl_idname = "export_scene.gltf"
bl_label = 'Export glTF'
filename_ext = ".gltf"
filter_glob = StringProperty(
default="*.gltf",
options={'HIDDEN'},
)
check_extension = True
def execute(self, context):
scene = {
'camera': bpy.data.cameras,
'lamps': bpy.data.lamps,
'images': bpy.data.images,
'materials': bpy.data.materials,
'meshes': bpy.data.meshes,
'objects': bpy.data.objects,
'scenes': bpy.data.scenes,
'textures': bpy.data.textures,
}
gltf = blendergltf.export_gltf(scene)
with open(self.filepath, 'w') as fout:
json.dump(gltf, fout, indent=4)
return {'FINISHED'}
def menu_func_export(self, context):
self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export) | Add experimental support to run module as Blender addon | Add experimental support to run module as Blender addon
| Python | apache-2.0 | Kupoman/blendergltf,lukesanantonio/blendergltf | + bl_info = {
+ "name": "glTF format",
+ "author": "Daniel Stokes",
+ "version": (0, 1, 0),
+ "blender": (2, 76, 0),
+ "location": "File > Import-Export",
+ "description": "Export glTF",
+ "warning": "",
+ "wiki_url": ""
+ "",
+ "support": 'TESTING',
+ "category": "Import-Export"}
+
+
+ # Treat as module
+ if '.' in __name__:
- if 'loaded' in locals():
+ if 'loaded' in locals():
- import imp
+ import imp
- imp.reload(blendergltf)
+ imp.reload(blendergltf)
- from .blendergltf import *
+ from .blendergltf import *
+ else:
+ loaded = True
+ from .blendergltf import *
+
+ # Treat as addon
else:
- loaded = True
- from .blendergltf import *
+ if "bpy" in locals():
+ import importlib
+ importlib.reload(blendergltf)
+
+
+ import json
+
+ import bpy
+ from bpy.props import (
+ StringProperty,
+ )
+ from bpy_extras.io_utils import (
+ ExportHelper,
+ )
+
+ from . import blendergltf
+
+
+ class ExportGLTF(bpy.types.Operator, ExportHelper):
+ """Save a Khronos glTF File"""
+
+ bl_idname = "export_scene.gltf"
+ bl_label = 'Export glTF'
+
+ filename_ext = ".gltf"
+ filter_glob = StringProperty(
+ default="*.gltf",
+ options={'HIDDEN'},
+ )
+
+ check_extension = True
+
+ def execute(self, context):
+ scene = {
+ 'camera': bpy.data.cameras,
+ 'lamps': bpy.data.lamps,
+ 'images': bpy.data.images,
+ 'materials': bpy.data.materials,
+ 'meshes': bpy.data.meshes,
+ 'objects': bpy.data.objects,
+ 'scenes': bpy.data.scenes,
+ 'textures': bpy.data.textures,
+ }
+ gltf = blendergltf.export_gltf(scene)
+ with open(self.filepath, 'w') as fout:
+ json.dump(gltf, fout, indent=4)
+ return {'FINISHED'}
+
+
+ def menu_func_export(self, context):
+ self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)")
+
+
+ def register():
+ bpy.utils.register_module(__name__)
+
+ bpy.types.INFO_MT_file_export.append(menu_func_export)
+
+
+ def unregister():
+ bpy.utils.unregister_module(__name__)
+
+ bpy.types.INFO_MT_file_export.remove(menu_func_export) | Add experimental support to run module as Blender addon | ## Code Before:
if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import *
## Instruction:
Add experimental support to run module as Blender addon
## Code After:
bl_info = {
"name": "glTF format",
"author": "Daniel Stokes",
"version": (0, 1, 0),
"blender": (2, 76, 0),
"location": "File > Import-Export",
"description": "Export glTF",
"warning": "",
"wiki_url": ""
"",
"support": 'TESTING',
"category": "Import-Export"}
# Treat as module
if '.' in __name__:
if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import *
# Treat as addon
else:
if "bpy" in locals():
import importlib
importlib.reload(blendergltf)
import json
import bpy
from bpy.props import (
StringProperty,
)
from bpy_extras.io_utils import (
ExportHelper,
)
from . import blendergltf
class ExportGLTF(bpy.types.Operator, ExportHelper):
"""Save a Khronos glTF File"""
bl_idname = "export_scene.gltf"
bl_label = 'Export glTF'
filename_ext = ".gltf"
filter_glob = StringProperty(
default="*.gltf",
options={'HIDDEN'},
)
check_extension = True
def execute(self, context):
scene = {
'camera': bpy.data.cameras,
'lamps': bpy.data.lamps,
'images': bpy.data.images,
'materials': bpy.data.materials,
'meshes': bpy.data.meshes,
'objects': bpy.data.objects,
'scenes': bpy.data.scenes,
'textures': bpy.data.textures,
}
gltf = blendergltf.export_gltf(scene)
with open(self.filepath, 'w') as fout:
json.dump(gltf, fout, indent=4)
return {'FINISHED'}
def menu_func_export(self, context):
self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export) |
8c81f606499ebadddaf2a362bc8845eb69a21e8d | lds-gen.py | lds-gen.py |
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
| Stop exporting internal symbols from the shared libraries. | Stop exporting internal symbols from the shared libraries.
| Python | bsd-2-clause | orthrus/librdkafka,klonikar/librdkafka,klonikar/librdkafka,senior7515/librdkafka,janmejay/librdkafka,senior7515/librdkafka,orthrus/librdkafka,klonikar/librdkafka,janmejay/librdkafka,orthrus/librdkafka,janmejay/librdkafka,senior7515/librdkafka,senior7515/librdkafka,klonikar/librdkafka,orthrus/librdkafka,janmejay/librdkafka |
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
+ print('local:\n *;')
+
print('};')
| Stop exporting internal symbols from the shared libraries. | ## Code Before:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
## Instruction:
Stop exporting internal symbols from the shared libraries.
## Code After:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
|
b07d74f99338165f8bb83ac0599452b021b96a8f | django_boolean_sum.py | django_boolean_sum.py | from django.conf import settings
from django.db.models.aggregates import Sum
from django.db.models.sql.aggregates import Sum as BaseSQLSum
class SQLSum(BaseSQLSum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
function = None
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
| from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
| Add support for Django 1.10+ | Add support for Django 1.10+
| Python | bsd-2-clause | Mibou/django-boolean-sum | from django.conf import settings
from django.db.models.aggregates import Sum
- from django.db.models.sql.aggregates import Sum as BaseSQLSum
- class SQLSum(BaseSQLSum):
+ class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
- function = None
-
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
| Add support for Django 1.10+ | ## Code Before:
from django.conf import settings
from django.db.models.aggregates import Sum
from django.db.models.sql.aggregates import Sum as BaseSQLSum
class SQLSum(BaseSQLSum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
function = None
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
## Instruction:
Add support for Django 1.10+
## Code After:
from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
|
fdf559007b9596e8d075d3de7f6e9f27e8a24ed6 | rippl/legislature/api.py | rippl/legislature/api.py | from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'str': str(district)
})
| from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'district_id': district.id,
'str': str(district)
})
| Add district id to find_district response | Add district id to find_district response
| Python | mit | gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl | from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
+ 'district_id': district.id,
'str': str(district)
})
| Add district id to find_district response | ## Code Before:
from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'str': str(district)
})
## Instruction:
Add district id to find_district response
## Code After:
from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'district_id': district.id,
'str': str(district)
})
|
eaff795bddb0e07f4ad4e4c9277c5c0f6f199380 | salt/beacons/__init__.py | salt/beacons/__init__.py | '''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
| '''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
| Add id tot he beacon event dataset | Add id tot he beacon event dataset
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
+ if not 'id' in data:
+ data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
| Add id tot he beacon event dataset | ## Code Before:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
## Instruction:
Add id tot he beacon event dataset
## Code After:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
309439f65bb668aba85a31a46b2633a46ee55777 | apps/careeropportunity/migrations/0001_initial.py | apps/careeropportunity/migrations/0001_initial.py | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_squashed_0003_company_image'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
| Revert "Change careeropportunity migration dep" | Revert "Change careeropportunity migration dep"
This reverts commit 60fdfab7e3b557e46276c225ff159f5773930525.
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
- ('companyprofile', '0001_squashed_0003_company_image'),
+ ('companyprofile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
| Revert "Change careeropportunity migration dep" | ## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_squashed_0003_company_image'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
## Instruction:
Revert "Change careeropportunity migration dep"
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
|
437623aee55fd68683126bd6852df52379837eaa | bash_command.py | bash_command.py | import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
| import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
| Print both output + error for bash command | Print both output + error for bash command
| Python | mit | ktuan89/sublimeplugins | import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
- output, _ = run_bash_for_output(final_command)
+ output, err = run_bash_for_output(final_command)
- print(final_command, " ", output)
+ new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
- results_view.run_command('replace_content', {"new_content": output})
+ results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
| Print both output + error for bash command | ## Code Before:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
## Instruction:
Print both output + error for bash command
## Code After:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
53681ae30bdaccce2321601f1ebab09b4c572cc9 | sqlalchemy_mptt/__init__.py | sqlalchemy_mptt/__init__.py | from .mixins import BaseNestedSets
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
|
from sqlalchemy.orm import mapper
from .mixins import BaseNestedSets
from .events import TreesManager
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
__all__ = ['BaseNestedSets', 'mptt_sessionmaker']
tree_manager = TreesManager(BaseNestedSets)
tree_manager.register_mapper(mapper)
mptt_sessionmaker = tree_manager.register_factory
| Make a default tree manager importable from the package. | Make a default tree manager importable from the package.
| Python | mit | uralbash/sqlalchemy_mptt,ITCase/sqlalchemy_mptt,ITCase/sqlalchemy_mptt,uralbash/sqlalchemy_mptt | +
+ from sqlalchemy.orm import mapper
from .mixins import BaseNestedSets
+ from .events import TreesManager
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
+ __all__ = ['BaseNestedSets', 'mptt_sessionmaker']
+ tree_manager = TreesManager(BaseNestedSets)
+ tree_manager.register_mapper(mapper)
+ mptt_sessionmaker = tree_manager.register_factory
+ | Make a default tree manager importable from the package. | ## Code Before:
from .mixins import BaseNestedSets
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
## Instruction:
Make a default tree manager importable from the package.
## Code After:
from sqlalchemy.orm import mapper
from .mixins import BaseNestedSets
from .events import TreesManager
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
__all__ = ['BaseNestedSets', 'mptt_sessionmaker']
tree_manager = TreesManager(BaseNestedSets)
tree_manager.register_mapper(mapper)
mptt_sessionmaker = tree_manager.register_factory
|
f032501126e7bb6d86441e38112c6bdf5035c62e | icekit/search_indexes.py | icekit/search_indexes.py | from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
| from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
| Add setting to turn of search indexes. | Add setting to turn of search indexes.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
+ from django.conf import settings
+ # Optional search indexes which can be used with the default FluentPage and FlatPage models.
+ if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
- class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
+ class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
- """
+ """
- Search index for a fluent page.
+ Search index for a fluent page.
- """
+ """
- text = indexes.CharField(document=True, use_template=True)
+ text = indexes.CharField(document=True, use_template=True)
- author = indexes.CharField(model_attr='author')
+ author = indexes.CharField(model_attr='author')
- publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
+ publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
- @staticmethod
+ @staticmethod
- def get_model():
+ def get_model():
- """
+ """
- Get the model for the search index.
+ Get the model for the search index.
- """
+ """
- return FluentPage
+ return FluentPage
- def index_queryset(self, using=None):
+ def index_queryset(self, using=None):
- """
+ """
- Queryset appropriate for this object to allow search for.
+ Queryset appropriate for this object to allow search for.
- """
+ """
- return self.get_model().objects.published()
+ return self.get_model().objects.published()
- class FlatPageIndex(FluentPageIndex):
+ class FlatPageIndex(FluentPageIndex):
- """
+ """
- Search index for a flat page.
+ Search index for a flat page.
- As everything except the model is the same as for a FluentPageIndex
+ As everything except the model is the same as for a FluentPageIndex
- we shall subclass it and overwrite the one part we need.
+ we shall subclass it and overwrite the one part we need.
- """
- @staticmethod
- def get_model():
"""
+ @staticmethod
+ def get_model():
+ """
- Get the model for the search index.
+ Get the model for the search index.
- """
+ """
- return FlatPage
+ return FlatPage
| Add setting to turn of search indexes. | ## Code Before:
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
## Instruction:
Add setting to turn of search indexes.
## Code After:
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
|
08dc0ce7c44d0149b443261ff6d3708e28a928e7 | src/meshparser/__init__.py | src/meshparser/__init__.py | from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version
| from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version.decode('utf-8')
| Add decode to version read from pkg_resources. | Add decode to version read from pkg_resources.
| Python | apache-2.0 | ABI-Software/MeshParser | from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
- __version__ = version
+ __version__ = version.decode('utf-8')
| Add decode to version read from pkg_resources. | ## Code Before:
from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version
## Instruction:
Add decode to version read from pkg_resources.
## Code After:
from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version.decode('utf-8')
|
670bbf8758e63cfeafc1de6f9330403dec2517c2 | astrobin_apps_platesolving/utils.py | astrobin_apps_platesolving/utils.py | import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
def encoded(path):
return urllib2.quote(path.encode('utf-8'))
url = image.thumbnail(alias)
if "://" in url:
# We are getting the full path and must only encode the part after the protocol
# (we assume that the hostname is ASCII)
protocol, path = url.split("://")
url = protocol + encoded(path)
else:
url = settings.BASE_URL + encoded(url)
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
| import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
url = image.thumbnail(alias)
if "://" in url:
url = url.split('://')[1]
else:
url = settings.BASE_URL + url
url = 'http://' + urllib2.quote(url.encode('utf-8'))
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
| Revert "Fix plate-solving on local development mode" | Revert "Fix plate-solving on local development mode"
This reverts commit 40897be402bd05ed5fb53e116f03d2d954720245.
| Python | agpl-3.0 | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin | import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
- def encoded(path):
- return urllib2.quote(path.encode('utf-8'))
+ url = image.thumbnail(alias)
+ if "://" in url:
+ url = url.split('://')[1]
+ else:
+ url = settings.BASE_URL + url
+ url = 'http://' + urllib2.quote(url.encode('utf-8'))
- url = image.thumbnail(alias)
-
- if "://" in url:
- # We are getting the full path and must only encode the part after the protocol
- # (we assume that the hostname is ASCII)
- protocol, path = url.split("://")
- url = protocol + encoded(path)
- else:
- url = settings.BASE_URL + encoded(url)
-
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
| Revert "Fix plate-solving on local development mode" | ## Code Before:
import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
def encoded(path):
return urllib2.quote(path.encode('utf-8'))
url = image.thumbnail(alias)
if "://" in url:
# We are getting the full path and must only encode the part after the protocol
# (we assume that the hostname is ASCII)
protocol, path = url.split("://")
url = protocol + encoded(path)
else:
url = settings.BASE_URL + encoded(url)
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
## Instruction:
Revert "Fix plate-solving on local development mode"
## Code After:
import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
url = image.thumbnail(alias)
if "://" in url:
url = url.split('://')[1]
else:
url = settings.BASE_URL + url
url = 'http://' + urllib2.quote(url.encode('utf-8'))
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
|
aaa6b6683e4ce46ec672899802c035c592d50b0e | app/initial_tables.py | app/initial_tables.py | from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload(
document_name TEXT
, time_uploaded TEXT DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_name, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
| from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload_meta(
document_name TEXT NOT NULL
, document_slug TEXT NOT NULL
, time_uploaded TEXT NOT NULL DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_slug, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
| Add slug field to file upload meta table, rename table | Add slug field to file upload meta table, rename table
| Python | mit | sprin/heroku-tut | from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
- CREATE TABLE file_upload(
+ CREATE TABLE file_upload_meta(
- document_name TEXT
+ document_name TEXT NOT NULL
+ , document_slug TEXT NOT NULL
- , time_uploaded TEXT DEFAULT now()
+ , time_uploaded TEXT NOT NULL DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
- , PRIMARY KEY(document_name, time_uploaded)
+ , PRIMARY KEY(document_slug, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
| Add slug field to file upload meta table, rename table | ## Code Before:
from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload(
document_name TEXT
, time_uploaded TEXT DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_name, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
## Instruction:
Add slug field to file upload meta table, rename table
## Code After:
from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload_meta(
document_name TEXT NOT NULL
, document_slug TEXT NOT NULL
, time_uploaded TEXT NOT NULL DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_slug, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
|
5559e9f429e9019959f1c79fbc2a7f82c12f91c4 | src/hpp/utils.py | src/hpp/utils.py |
import os
import subprocess
import time
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
self.process.kill()
|
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
| Fix how hppcorbaserver is killed in ServerManager | Fix how hppcorbaserver is killed in ServerManager
| Python | bsd-2-clause | humanoid-path-planner/hpp-corbaserver,humanoid-path-planner/hpp-corbaserver |
import os
import subprocess
import time
+ import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
+ tool = hpp.corbaserver.tools.Tools()
+ tool.shutdown()
- self.process.kill()
+ self.process.communicate()
| Fix how hppcorbaserver is killed in ServerManager | ## Code Before:
import os
import subprocess
import time
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
self.process.kill()
## Instruction:
Fix how hppcorbaserver is killed in ServerManager
## Code After:
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
|
c0a74ce4110d295b3662066e4d08c4ab65fb0905 | bills/views.py | bills/views.py |
from django.shortcuts import render, redirect
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__in=filter_subjects)
else:
all_bills = Bill.objects.all()
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
if request.method == 'POST':
with transaction.atomic():
filter_subjects = request.POST.getlist('bill_subjects')
return redirect('.')
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
|
from django.db import transaction
from django.shortcuts import render, redirect
from preferences.views import _mark_selected
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__contains=filter_subjects)
else:
filter_subjects = []
all_bills = Bill.objects.all()
subjects = _mark_selected(subjects, filter_subjects)
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
| Mark pre-selected topics on form | Mark pre-selected topics on form
| Python | mit | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot |
+ from django.db import transaction
from django.shortcuts import render, redirect
+ from preferences.views import _mark_selected
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
- all_bills = Bill.objects.filter(subject__in=filter_subjects)
+ all_bills = Bill.objects.filter(subject__contains=filter_subjects)
else:
+ filter_subjects = []
all_bills = Bill.objects.all()
+ subjects = _mark_selected(subjects, filter_subjects)
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
- if request.method == 'POST':
- with transaction.atomic():
- filter_subjects = request.POST.getlist('bill_subjects')
- return redirect('.')
-
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
| Mark pre-selected topics on form | ## Code Before:
from django.shortcuts import render, redirect
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__in=filter_subjects)
else:
all_bills = Bill.objects.all()
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
if request.method == 'POST':
with transaction.atomic():
filter_subjects = request.POST.getlist('bill_subjects')
return redirect('.')
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
## Instruction:
Mark pre-selected topics on form
## Code After:
from django.db import transaction
from django.shortcuts import render, redirect
from preferences.views import _mark_selected
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__contains=filter_subjects)
else:
filter_subjects = []
all_bills = Bill.objects.all()
subjects = _mark_selected(subjects, filter_subjects)
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
|
c5496fddccffd2f16c0b4a140506b9d577d50b61 | eventlog/models.py | eventlog/models.py | from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
| from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
@property
def template_fragment_name(self):
return "eventlog/{}.html".format(self.action.lower())
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
| Add property to provide template fragment name | Add property to provide template fragment name
| Python | mit | jawed123/pinax-eventlog,pinax/pinax-eventlog,KleeTaurus/pinax-eventlog,rosscdh/pinax-eventlog | from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
+ @property
+ def template_fragment_name(self):
+ return "eventlog/{}.html".format(self.action.lower())
+
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
| Add property to provide template fragment name | ## Code Before:
from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
## Instruction:
Add property to provide template fragment name
## Code After:
from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
@property
def template_fragment_name(self):
return "eventlog/{}.html".format(self.action.lower())
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
|
5a45840e81d612e1f743ad063fd32da4d19354d4 | cacheops/signals.py | cacheops/signals.py | import django.dispatch
cache_read = django.dispatch.Signal(providing_args=["func", "hit"])
cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"])
| import django.dispatch
cache_read = django.dispatch.Signal() # args: func, hit
cache_invalidated = django.dispatch.Signal() # args: obj_dict
| Stop using Signal(providing_args) deprected in Django 4.0 | Stop using Signal(providing_args) deprected in Django 4.0
Closes #393
| Python | bsd-3-clause | Suor/django-cacheops | import django.dispatch
- cache_read = django.dispatch.Signal(providing_args=["func", "hit"])
+ cache_read = django.dispatch.Signal() # args: func, hit
- cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"])
+ cache_invalidated = django.dispatch.Signal() # args: obj_dict
| Stop using Signal(providing_args) deprected in Django 4.0 | ## Code Before:
import django.dispatch
cache_read = django.dispatch.Signal(providing_args=["func", "hit"])
cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"])
## Instruction:
Stop using Signal(providing_args) deprected in Django 4.0
## Code After:
import django.dispatch
cache_read = django.dispatch.Signal() # args: func, hit
cache_invalidated = django.dispatch.Signal() # args: obj_dict
|
b3b67fe0e68423fc2f85bccf1f20acdb779a38ba | pylxd/deprecated/tests/utils.py | pylxd/deprecated/tests/utils.py |
from pylxd import api
from pylxd import exceptions as lxd_exceptions
def upload_image(image):
alias = "{}/{}/{}/{}".format(
image["os"], image["release"], image["arch"], image["variant"]
)
lxd = api.API()
imgs = api.API(host="images.linuxcontainers.org")
d = imgs.alias_show(alias)
meta = d[1]["metadata"]
tgt = meta["target"]
try:
lxd.alias_update(meta)
except lxd_exceptions.APIError as ex:
if ex.status_code == 404:
lxd.alias_create(meta)
return tgt
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
|
from pylxd import api
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
| Remove unused testing utility function | Remove unused testing utility function
Signed-off-by: Dougal Matthews <8f24f2c0fd825cfb6716a36822888c4a01678c88@dougalmatthews.com>
| Python | apache-2.0 | lxc/pylxd,lxc/pylxd |
from pylxd import api
- from pylxd import exceptions as lxd_exceptions
-
-
- def upload_image(image):
- alias = "{}/{}/{}/{}".format(
- image["os"], image["release"], image["arch"], image["variant"]
- )
- lxd = api.API()
- imgs = api.API(host="images.linuxcontainers.org")
- d = imgs.alias_show(alias)
-
- meta = d[1]["metadata"]
- tgt = meta["target"]
-
- try:
- lxd.alias_update(meta)
- except lxd_exceptions.APIError as ex:
- if ex.status_code == 404:
- lxd.alias_create(meta)
-
- return tgt
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
| Remove unused testing utility function | ## Code Before:
from pylxd import api
from pylxd import exceptions as lxd_exceptions
def upload_image(image):
alias = "{}/{}/{}/{}".format(
image["os"], image["release"], image["arch"], image["variant"]
)
lxd = api.API()
imgs = api.API(host="images.linuxcontainers.org")
d = imgs.alias_show(alias)
meta = d[1]["metadata"]
tgt = meta["target"]
try:
lxd.alias_update(meta)
except lxd_exceptions.APIError as ex:
if ex.status_code == 404:
lxd.alias_create(meta)
return tgt
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
## Instruction:
Remove unused testing utility function
## Code After:
from pylxd import api
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
|
57bb37d7579620005a49613ff90f0a2eec55a77e | backend/offers_web.py | backend/offers_web.py | import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count()
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
| import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
| Fix max elements in header | Fix max elements in header
| Python | agpl-3.0 | jilljenn/voyageavecmoi,jilljenn/voyageavecmoi,jilljenn/voyageavecmoi | import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
- count = r.db('voyageavecmoi').table('offers').count()
+ count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
| Fix max elements in header | ## Code Before:
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count()
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
## Instruction:
Fix max elements in header
## Code After:
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
|
fac7e7d8759aab7e2bea666e55d71e35da45c334 | groundstation/gref.py | groundstation/gref.py | import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| Implement Gref.tips() to fetch it's tips. | Implement Gref.tips() to fetch it's tips.
| Python | mit | richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation | import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
+
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
+
+ def tips(self):
+ return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| Implement Gref.tips() to fetch it's tips. | ## Code Before:
import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
## Instruction:
Implement Gref.tips() to fetch it's tips.
## Code After:
import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
|
a419f6dcb7968d6af1e3ef8eae29b723d96b5fd2 | stayput/jinja2/__init__.py | stayput/jinja2/__init__.py | from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item):
return self.env.from_string(item.contents).render(site=self.site, item=item)
| from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item, site, *args, **kwargs):
return self.env.from_string(item.contents).render(site=self.site, item=item)
| Update for stayput master and ensure forward compatibility | Update for stayput master and ensure forward compatibility
| Python | mit | veeti/stayput_jinja2 | from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
- def template(self, item):
+ def template(self, item, site, *args, **kwargs):
return self.env.from_string(item.contents).render(site=self.site, item=item)
| Update for stayput master and ensure forward compatibility | ## Code Before:
from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item):
return self.env.from_string(item.contents).render(site=self.site, item=item)
## Instruction:
Update for stayput master and ensure forward compatibility
## Code After:
from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item, site, *args, **kwargs):
return self.env.from_string(item.contents).render(site=self.site, item=item)
|
df690e4c2f19e30c619db90b8b2dfd77dab54159 | sympy/printing/__init__.py | sympy/printing/__init__.py | """Printing subsystem"""
from pretty import *
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import *
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
| """Printing subsystem"""
from pretty import pager_print, pretty, pretty_print, pprint, \
pprint_use_unicode, pprint_try_use_unicode
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import print_gtk
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
| Remove glob imports from sympy.printing. | Remove glob imports from sympy.printing.
| Python | bsd-3-clause | Designist/sympy,emon10005/sympy,farhaanbukhsh/sympy,mafiya69/sympy,kaushik94/sympy,atreyv/sympy,kmacinnis/sympy,Mitchkoens/sympy,aktech/sympy,sunny94/temp,grevutiu-gabriel/sympy,wanglongqi/sympy,AunShiLord/sympy,jamesblunt/sympy,emon10005/sympy,shikil/sympy,rahuldan/sympy,diofant/diofant,yashsharan/sympy,kmacinnis/sympy,kaushik94/sympy,atsao72/sympy,AkademieOlympia/sympy,hrashk/sympy,Arafatk/sympy,souravsingh/sympy,drufat/sympy,lindsayad/sympy,skidzo/sympy,grevutiu-gabriel/sympy,kevalds51/sympy,MridulS/sympy,Curious72/sympy,Shaswat27/sympy,abloomston/sympy,lidavidm/sympy,Sumith1896/sympy,postvakje/sympy,madan96/sympy,shipci/sympy,Davidjohnwilson/sympy,maniteja123/sympy,jaimahajan1997/sympy,cswiercz/sympy,wyom/sympy,mafiya69/sympy,MechCoder/sympy,amitjamadagni/sympy,abloomston/sympy,iamutkarshtiwari/sympy,rahuldan/sympy,Arafatk/sympy,pbrady/sympy,souravsingh/sympy,cccfran/sympy,Shaswat27/sympy,beni55/sympy,cccfran/sympy,madan96/sympy,Titan-C/sympy,grevutiu-gabriel/sympy,Vishluck/sympy,aktech/sympy,dqnykamp/sympy,chaffra/sympy,ga7g08/sympy,sahmed95/sympy,kevalds51/sympy,lidavidm/sympy,skidzo/sympy,amitjamadagni/sympy,bukzor/sympy,ahhda/sympy,atsao72/sympy,yashsharan/sympy,ChristinaZografou/sympy,Designist/sympy,Designist/sympy,jerli/sympy,kumarkrishna/sympy,rahuldan/sympy,MridulS/sympy,hargup/sympy,meghana1995/sympy,moble/sympy,madan96/sympy,yashsharan/sympy,jerli/sympy,shipci/sympy,flacjacket/sympy,kmacinnis/sympy,AkademieOlympia/sympy,atsao72/sympy,Arafatk/sympy,hargup/sympy,abhiii5459/sympy,hargup/sympy,saurabhjn76/sympy,garvitr/sympy,asm666/sympy,abhiii5459/sympy,farhaanbukhsh/sympy,yukoba/sympy,vipulroxx/sympy,wyom/sympy,kumarkrishna/sympy,toolforger/sympy,oliverlee/sympy,AkademieOlympia/sympy,postvakje/sympy,bukzor/sympy,sunny94/temp,hrashk/sympy,ga7g08/sympy,ahhda/sympy,ChristinaZografou/sympy,Titan-C/sympy,ahhda/sympy,Gadal/sympy,atreyv/sympy,abhiii5459/sympy,MridulS/sympy,iamutkarshtiwari/sympy,Vishluck/sympy,MechCoder/sympy,saurabhjn76/sympy,abloomston/sympy,toolforger/sympy,mcdaniel67/sympy,Davidjohnwilson/sympy,atreyv/sympy,jerli/sympy,Titan-C/sympy,cswiercz/sympy,souravsingh/sympy,wanglongqi/sympy,mafiya69/sympy,sampadsaha5/sympy,drufat/sympy,skidzo/sympy,sahmed95/sympy,jaimahajan1997/sympy,jbbskinny/sympy,chaffra/sympy,kevalds51/sympy,Vishluck/sympy,sahilshekhawat/sympy,sahilshekhawat/sympy,Mitchkoens/sympy,Curious72/sympy,jamesblunt/sympy,sunny94/temp,AunShiLord/sympy,beni55/sympy,wyom/sympy,bukzor/sympy,debugger22/sympy,pandeyadarsh/sympy,sahilshekhawat/sympy,postvakje/sympy,wanglongqi/sympy,Sumith1896/sympy,jbbskinny/sympy,liangjiaxing/sympy,moble/sympy,AunShiLord/sympy,shikil/sympy,jaimahajan1997/sympy,pbrady/sympy,shikil/sympy,VaibhavAgarwalVA/sympy,iamutkarshtiwari/sympy,drufat/sympy,vipulroxx/sympy,farhaanbukhsh/sympy,sampadsaha5/sympy,saurabhjn76/sympy,chaffra/sympy,skirpichev/omg,kaichogami/sympy,Sumith1896/sympy,vipulroxx/sympy,mcdaniel67/sympy,liangjiaxing/sympy,asm666/sympy,jbbskinny/sympy,pandeyadarsh/sympy,VaibhavAgarwalVA/sympy,dqnykamp/sympy,lindsayad/sympy,Mitchkoens/sympy,aktech/sympy,pandeyadarsh/sympy,yukoba/sympy,maniteja123/sympy,VaibhavAgarwalVA/sympy,Shaswat27/sympy,hrashk/sympy,jamesblunt/sympy,maniteja123/sympy,dqnykamp/sympy,oliverlee/sympy,lidavidm/sympy,MechCoder/sympy,beni55/sympy,cswiercz/sympy,yukoba/sympy,debugger22/sympy,debugger22/sympy,sampadsaha5/sympy,pbrady/sympy,Gadal/sympy,kaichogami/sympy,kaichogami/sympy,ga7g08/sympy,garvitr/sympy,meghana1995/sympy,cccfran/sympy,garvitr/sympy,moble/sympy,kaushik94/sympy,Davidjohnwilson/sympy,toolforger/sympy,asm666/sympy,lindsayad/sympy,oliverlee/sympy,shipci/sympy,Gadal/sympy,ChristinaZografou/sympy,mcdaniel67/sympy,sahmed95/sympy,kumarkrishna/sympy,Curious72/sympy,emon10005/sympy,liangjiaxing/sympy,meghana1995/sympy | """Printing subsystem"""
- from pretty import *
+ from pretty import pager_print, pretty, pretty_print, pprint, \
+ pprint_use_unicode, pprint_try_use_unicode
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
- from gtk import *
+ from gtk import print_gtk
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
| Remove glob imports from sympy.printing. | ## Code Before:
"""Printing subsystem"""
from pretty import *
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import *
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
## Instruction:
Remove glob imports from sympy.printing.
## Code After:
"""Printing subsystem"""
from pretty import pager_print, pretty, pretty_print, pprint, \
pprint_use_unicode, pprint_try_use_unicode
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import print_gtk
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
|
42389e796acba99fe12e30e6ca08672b889bd5f2 | infrastructure/serializers.py | infrastructure/serializers.py | from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
| from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
read_only_fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
read_only_fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
read_only_fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
read_only_fields = [
"function",
"project_description",
"project_number",
"project_type",
"mtsf_service_outcome",
"iudf",
"own_strategic_objectives",
"asset_class",
"asset_subclass",
"ward_location",
"longitude",
"latitude",
]
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
read_only_fields = ["project_description", "latitude", "longitude"]
| Make fields readonly, skips rest_framework, validation, speeds up queries | Make fields readonly, skips rest_framework, validation, speeds up queries
| Python | mit | Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data | from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
+ read_only_fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
+ read_only_fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
+ read_only_fields = ["amount", "budget_phase", "financial_year"]
+
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
+ read_only_fields = [
+ "function",
+ "project_description",
+ "project_number",
+ "project_type",
+ "mtsf_service_outcome",
+ "iudf",
+ "own_strategic_objectives",
+ "asset_class",
+ "asset_subclass",
+ "ward_location",
+ "longitude",
+ "latitude",
+ ]
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
+ read_only_fields = ["project_description", "latitude", "longitude"]
| Make fields readonly, skips rest_framework, validation, speeds up queries | ## Code Before:
from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
## Instruction:
Make fields readonly, skips rest_framework, validation, speeds up queries
## Code After:
from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
read_only_fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
read_only_fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
read_only_fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
read_only_fields = [
"function",
"project_description",
"project_number",
"project_type",
"mtsf_service_outcome",
"iudf",
"own_strategic_objectives",
"asset_class",
"asset_subclass",
"ward_location",
"longitude",
"latitude",
]
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
read_only_fields = ["project_description", "latitude", "longitude"]
|
0f12f4a2e8b68cf48b9768a6b18a1a560068eac2 | app/timetables/models.py | app/timetables/models.py | from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.TextField()
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
| from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.CharField(max_length=60)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
| Change meal name to charfield | Change meal name to charfield
| Python | mit | teamtaverna/core | from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
- name = models.TextField()
+ name = models.CharField(max_length=60)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
| Change meal name to charfield | ## Code Before:
from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.TextField()
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
## Instruction:
Change meal name to charfield
## Code After:
from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.CharField(max_length=60)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
|
8883f1a45595219ae843b3400df1f56ab07aa4fe | corehq/apps/userreports/document_stores.py | corehq/apps/userreports/document_stores.py | from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| Add document store for locations | Add document store for locations
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | + from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
+ from pillowtop.dao.exceptions import DocumentNotFoundError
+ from pillowtop.dao.interface import ReadOnlyDocumentStore
+
+
+ class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
+
+ def __init__(self, domain):
+ self.domain = domain
+ self.queryset = SQLLocation.objects.filter(domain=domain)
+
+ def get_document(self, doc_id):
+ try:
+ return self.queryset.get(location_id=doc_id).to_json()
+ except SQLLocation.DoesNotExist as e:
+ raise DocumentNotFoundError(e)
+
+ def iter_document_ids(self, last_id=None):
+ return iter(self.queryset.location_ids())
+
+ def iter_documents(self, ids):
+ for location in self.queryset.filter(location_id__in=ids):
+ yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
+ elif doc_type == 'Location':
+ return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| Add document store for locations | ## Code Before:
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
## Instruction:
Add document store for locations
## Code After:
from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
|
7fd3e82c449ebf46e369d2a8c2bf534cb6b17607 | notebook/lib/pos_tags.py | notebook/lib/pos_tags.py | import nltk
class PosTags:
def tag(self, t):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
| import nltk
class PosTags:
def tag(self, t, as_dicts=True):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
if as_dicts:
return self.to_dicts(pos)
else:
return pos
def to_dicts(self, pos):
'''
With a list of POS tag tuples, convert the tuples to dicts
because Spark can't store tuples.
'''
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
| Change return to allow for original tuples to come out since we'll need them for chunking | Change return to allow for original tuples to come out since we'll need them for chunking
| Python | mit | mjcollin/2016spr,mjcollin/2016spr,mjcollin/2016spr | import nltk
class PosTags:
- def tag(self, t):
+ def tag(self, t, as_dicts=True):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
+ if as_dicts:
+ return self.to_dicts(pos)
+ else:
+ return pos
+
+
+ def to_dicts(self, pos):
+ '''
+ With a list of POS tag tuples, convert the tuples to dicts
+ because Spark can't store tuples.
+ '''
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
| Change return to allow for original tuples to come out since we'll need them for chunking | ## Code Before:
import nltk
class PosTags:
def tag(self, t):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
## Instruction:
Change return to allow for original tuples to come out since we'll need them for chunking
## Code After:
import nltk
class PosTags:
def tag(self, t, as_dicts=True):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
if as_dicts:
return self.to_dicts(pos)
else:
return pos
def to_dicts(self, pos):
'''
With a list of POS tag tuples, convert the tuples to dicts
because Spark can't store tuples.
'''
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
|
138aa351b3dbe95f3cdebf01dbd3c75f1ce3fac2 | src/ggrc/fulltext/sql.py | src/ggrc/fulltext/sql.py |
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
|
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, record.type, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
| Fix test broken due to delete_record change | Fix test broken due to delete_record change
| Python | apache-2.0 | kr41/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core |
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
- self.delete_record(record.key, commit=False)
+ self.delete_record(record.key, record.type, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
| Fix test broken due to delete_record change | ## Code Before:
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
## Instruction:
Fix test broken due to delete_record change
## Code After:
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, record.type, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
|
756c9ae9487ac5c35f069b79e792043bca0af27e | panoptes_client/utils.py | panoptes_client/utils.py | import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
| import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
if isinstance(to_batch, set):
to_batch = list(to_batch)
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
| Fix passing sets to batchable methods | Fix passing sets to batchable methods
Sets don't support indexing, so convert them to lists.
| Python | apache-2.0 | zooniverse/panoptes-python-client | import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
+ if isinstance(to_batch, set):
+ to_batch = list(to_batch)
+
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
| Fix passing sets to batchable methods | ## Code Before:
import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
## Instruction:
Fix passing sets to batchable methods
## Code After:
import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
if isinstance(to_batch, set):
to_batch = list(to_batch)
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
|
e170666cbbc1f2a61c0ffa077c66da4556a6c5bb | app/packages/views.py | app/packages/views.py | import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = (len(featured_list) / 3) * 3
featured_list = featured_list[:length]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
return jsonify(results=json_data)
| import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
| Add my packages to featured list | Add my packages to featured list
| Python | bsd-2-clause | NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website | import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
- length = (len(featured_list) / 3) * 3
+ length = ((len(featured_list) + 2) / 3) * 3
- featured_list = featured_list[:length]
+ featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
+ for item in ["docblockr", "git-log"]:
+ obj = Package.get_package(item)
+ json_data.append(obj.get_json())
+
return jsonify(results=json_data)
| Add my packages to featured list | ## Code Before:
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = (len(featured_list) / 3) * 3
featured_list = featured_list[:length]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
return jsonify(results=json_data)
## Instruction:
Add my packages to featured list
## Code After:
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
|
33f2636e1de536a633cec9332362252b0b614817 | serpent/templates/SerpentGamePlugin/files/serpent_game.py | serpent/templates/SerpentGamePlugin/files/serpent_game.py | from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.input_controller import InputControllers
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["input_controller"] = InputControllers.PYAUTOGUI
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
| from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
| Remove kwargs["input_controller"] from the Game plugin template | Remove kwargs["input_controller"] from the Game plugin template
| Python | mit | SerpentAI/SerpentAI | from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
- from serpent.input_controller import InputControllers
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
-
- kwargs["input_controller"] = InputControllers.PYAUTOGUI
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
| Remove kwargs["input_controller"] from the Game plugin template | ## Code Before:
from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.input_controller import InputControllers
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["input_controller"] = InputControllers.PYAUTOGUI
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
## Instruction:
Remove kwargs["input_controller"] from the Game plugin template
## Code After:
from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
|
d6433001f3660c9c4506fe5e1f62c0a52edd02f7 | project/djenerator/tests.py | project/djenerator/tests.py | from django.test import TestCase
| from django.test import TestCase
from model_reader import is_instance_of_model
from models import ExtendingModel
from models import NotExtendingModel
from models import TestModel0
from models import TestModel1
from models import TestModelA
from models import TestModelB
from models import TestModelC
from models import TestModelD
from models import TestModelE
from models import TestModelX
from models import TestModelY
class TestInstanceOfModel(TestCase):
def test(self):
models = [TestModel0, TestModel1, TestModelA, TestModelB, TestModelC,
TestModelD, TestModelE, TestModelX, TestModelY, ExtendingModel]
for model in models:
self.assertTrue(is_instance_of_model(model))
self.assertFalse(is_instance_of_model(NotExtendingModel))
def not_extending_model_function():
pass
self.assertFalse(is_instance_of_model(not_extending_model_function))
| Test Cases for is instance of Model function | Test Cases for is instance of Model function
| Python | mit | mostafa-mahmoud/djenerator,aelguindy/djenerator,mostafa-mahmoud/djenerator | from django.test import TestCase
+ from model_reader import is_instance_of_model
+ from models import ExtendingModel
+ from models import NotExtendingModel
+ from models import TestModel0
+ from models import TestModel1
+ from models import TestModelA
+ from models import TestModelB
+ from models import TestModelC
+ from models import TestModelD
+ from models import TestModelE
+ from models import TestModelX
+ from models import TestModelY
+ class TestInstanceOfModel(TestCase):
+ def test(self):
+ models = [TestModel0, TestModel1, TestModelA, TestModelB, TestModelC,
+ TestModelD, TestModelE, TestModelX, TestModelY, ExtendingModel]
+ for model in models:
+ self.assertTrue(is_instance_of_model(model))
+ self.assertFalse(is_instance_of_model(NotExtendingModel))
+ def not_extending_model_function():
+ pass
+
+ self.assertFalse(is_instance_of_model(not_extending_model_function))
+
+
+
+ | Test Cases for is instance of Model function | ## Code Before:
from django.test import TestCase
## Instruction:
Test Cases for is instance of Model function
## Code After:
from django.test import TestCase
from model_reader import is_instance_of_model
from models import ExtendingModel
from models import NotExtendingModel
from models import TestModel0
from models import TestModel1
from models import TestModelA
from models import TestModelB
from models import TestModelC
from models import TestModelD
from models import TestModelE
from models import TestModelX
from models import TestModelY
class TestInstanceOfModel(TestCase):
def test(self):
models = [TestModel0, TestModel1, TestModelA, TestModelB, TestModelC,
TestModelD, TestModelE, TestModelX, TestModelY, ExtendingModel]
for model in models:
self.assertTrue(is_instance_of_model(model))
self.assertFalse(is_instance_of_model(NotExtendingModel))
def not_extending_model_function():
pass
self.assertFalse(is_instance_of_model(not_extending_model_function))
|
6631906fc126eadc114a7ee673194da4880dc960 | flask_admin/contrib/geoa/typefmt.py | flask_admin/contrib/geoa/typefmt.py | from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
| from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
value.srid = 4326
geojson = view.model.query.with_entities(func.ST_AsGeoJSON(value)).scalar()
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
| Remove Flask-SQLAlchemy dependency It should be noted that the declarative base still has to be configured like this: | Remove Flask-SQLAlchemy dependency
It should be noted that the declarative base still has to be configured
like this:
```python
MyBase:
query = session.query_property()
```
Also decreased code duplication and removed unused imports.
| Python | bsd-3-clause | torotil/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,toddetzel/flask-admin,mikelambert/flask-admin,lifei/flask-admin,likaiguo/flask-admin,rochacbruno/flask-admin,ArtemSerga/flask-admin,closeio/flask-admin,betterlife/flask-admin,jschneier/flask-admin,torotil/flask-admin,toddetzel/flask-admin,closeio/flask-admin,jmagnusson/flask-admin,toddetzel/flask-admin,iurisilvio/flask-admin,quokkaproject/flask-admin,torotil/flask-admin,flask-admin/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,jschneier/flask-admin,quokkaproject/flask-admin,mikelambert/flask-admin,jmagnusson/flask-admin,flask-admin/flask-admin,mikelambert/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,jschneier/flask-admin,flask-admin/flask-admin,rochacbruno/flask-admin,mikelambert/flask-admin,lifei/flask-admin,jschneier/flask-admin,betterlife/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,toddetzel/flask-admin,ArtemSerga/flask-admin,flask-admin/flask-admin,iurisilvio/flask-admin,betterlife/flask-admin,ArtemSerga/flask-admin,quokkaproject/flask-admin,closeio/flask-admin,likaiguo/flask-admin,closeio/flask-admin,lifei/flask-admin,lifei/flask-admin,torotil/flask-admin,ArtemSerga/flask-admin | from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
- import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
- from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
+ value.srid = 4326
+ geojson = view.model.query.with_entities(func.ST_AsGeoJSON(value)).scalar()
- geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
- else:
- geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
| Remove Flask-SQLAlchemy dependency It should be noted that the declarative base still has to be configured like this: | ## Code Before:
from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
## Instruction:
Remove Flask-SQLAlchemy dependency It should be noted that the declarative base still has to be configured like this:
## Code After:
from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
value.srid = 4326
geojson = view.model.query.with_entities(func.ST_AsGeoJSON(value)).scalar()
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
|
e35ff2f0e45289c40a57c9488156829c60f9d3a0 | vumi_http_proxy/clickme.py | vumi_http_proxy/clickme.py |
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
cli.interface = str(interface)
cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
| Change unicode ip to string | Change unicode ip to string
| Python | bsd-3-clause | praekelt/vumi-http-proxy,praekelt/vumi-http-proxy |
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
- cli.interface = str(interface)
- cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
+ interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
| Change unicode ip to string | ## Code Before:
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
cli.interface = str(interface)
cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
## Instruction:
Change unicode ip to string
## Code After:
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
|
435b989d75b9e57cf2fe5fec6892c481a278a102 | examples/capabilities/selenoid_cap_file.py | examples/capabilities/selenoid_cap_file.py |
capabilities = {
"screenResolution": "1280x1024x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
|
capabilities = {
"acceptSslCerts": True,
"acceptInsecureCerts": True,
"screenResolution": "1920x1080x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
| Update an example capabilities file | Update an example capabilities file
| Python | mit | mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase |
capabilities = {
+ "acceptSslCerts": True,
+ "acceptInsecureCerts": True,
- "screenResolution": "1280x1024x24",
+ "screenResolution": "1920x1080x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
| Update an example capabilities file | ## Code Before:
capabilities = {
"screenResolution": "1280x1024x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
## Instruction:
Update an example capabilities file
## Code After:
capabilities = {
"acceptSslCerts": True,
"acceptInsecureCerts": True,
"screenResolution": "1920x1080x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
|
a26f3ee3df1f70302bc524e3a8decb1a1266aadd | devito/data/meta.py | devito/data/meta.py | from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left')
RIGHT = DataSide('right')
CENTER = DataSide('center')
| from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left', -1)
CENTER = DataSide('center', 0)
RIGHT = DataSide('right', 1)
| Add static value to LEFT, CENTER, RIGHT | data: Add static value to LEFT, CENTER, RIGHT
| Python | mit | opesci/devito,opesci/devito | from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
- LEFT = DataSide('left')
+ LEFT = DataSide('left', -1)
- RIGHT = DataSide('right')
- CENTER = DataSide('center')
+ CENTER = DataSide('center', 0)
+ RIGHT = DataSide('right', 1)
| Add static value to LEFT, CENTER, RIGHT | ## Code Before:
from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left')
RIGHT = DataSide('right')
CENTER = DataSide('center')
## Instruction:
Add static value to LEFT, CENTER, RIGHT
## Code After:
from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left', -1)
CENTER = DataSide('center', 0)
RIGHT = DataSide('right', 1)
|
220748a5cc481b8df76af6a1301af94def603ee2 | paci/helpers/display_helper.py | paci/helpers/display_helper.py | """Helper to output stuff"""
from tabulate import tabulate
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(entries, header, tablefmt="grid"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(entries, tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
| """Helper to output stuff"""
from tabulate import tabulate
import os
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(fix_descriptions(entries), header, tablefmt="presto"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(cleanup_entries(entries), tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
def fix_descriptions(entries):
"""Fixes the description to fit into the terminal"""
clean_entries = []
ml = get_max_desc_width(get_longest_list(entries))
for entry in entries:
clean_entry = entry
max_value = max(entry, key=len)
for idx, val in enumerate(entry):
if val is max_value:
clean_entry[idx] = entry[idx][:ml] + (entry[idx][ml:] and ' [..]')
clean_entries.append(clean_entry)
return clean_entries
def get_longest_list(entries):
max_list = ['']*len(entries[0])
for entry in entries:
for idx, val in enumerate(entry):
if len(val) > len(max_list[idx]):
max_list[idx] = val
return max_list
def get_max_desc_width(lst):
_, columns = os.popen('stty size', 'r').read().split()
length = int(columns)
max_value = max(lst, key=len)
for val in lst:
if val is not max_value:
length -= len(val)
return length - 15
| Fix how tables are printed on smaller screens | Fix how tables are printed on smaller screens
| Python | mit | tradebyte/paci,tradebyte/paci | """Helper to output stuff"""
from tabulate import tabulate
+ import os
def print_list(header, entries):
"""Prints out a list"""
- print(tabulate(entries, header, tablefmt="grid"))
+ print(tabulate(fix_descriptions(entries), header, tablefmt="presto"))
def print_table(entries):
"""Prints out a table"""
- print(tabulate(entries, tablefmt="plain"))
+ print(tabulate(cleanup_entries(entries), tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
+
+ def fix_descriptions(entries):
+ """Fixes the description to fit into the terminal"""
+
+ clean_entries = []
+ ml = get_max_desc_width(get_longest_list(entries))
+
+ for entry in entries:
+ clean_entry = entry
+ max_value = max(entry, key=len)
+ for idx, val in enumerate(entry):
+ if val is max_value:
+ clean_entry[idx] = entry[idx][:ml] + (entry[idx][ml:] and ' [..]')
+ clean_entries.append(clean_entry)
+
+ return clean_entries
+
+
+ def get_longest_list(entries):
+ max_list = ['']*len(entries[0])
+ for entry in entries:
+ for idx, val in enumerate(entry):
+ if len(val) > len(max_list[idx]):
+ max_list[idx] = val
+ return max_list
+
+
+ def get_max_desc_width(lst):
+ _, columns = os.popen('stty size', 'r').read().split()
+ length = int(columns)
+ max_value = max(lst, key=len)
+ for val in lst:
+ if val is not max_value:
+ length -= len(val)
+
+ return length - 15
+ | Fix how tables are printed on smaller screens | ## Code Before:
"""Helper to output stuff"""
from tabulate import tabulate
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(entries, header, tablefmt="grid"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(entries, tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
## Instruction:
Fix how tables are printed on smaller screens
## Code After:
"""Helper to output stuff"""
from tabulate import tabulate
import os
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(fix_descriptions(entries), header, tablefmt="presto"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(cleanup_entries(entries), tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
def fix_descriptions(entries):
"""Fixes the description to fit into the terminal"""
clean_entries = []
ml = get_max_desc_width(get_longest_list(entries))
for entry in entries:
clean_entry = entry
max_value = max(entry, key=len)
for idx, val in enumerate(entry):
if val is max_value:
clean_entry[idx] = entry[idx][:ml] + (entry[idx][ml:] and ' [..]')
clean_entries.append(clean_entry)
return clean_entries
def get_longest_list(entries):
max_list = ['']*len(entries[0])
for entry in entries:
for idx, val in enumerate(entry):
if len(val) > len(max_list[idx]):
max_list[idx] = val
return max_list
def get_max_desc_width(lst):
_, columns = os.popen('stty size', 'r').read().split()
length = int(columns)
max_value = max(lst, key=len)
for val in lst:
if val is not max_value:
length -= len(val)
return length - 15
|
e42f77d374bab66fb1a90322c3b36c8f75f2499c | pft/errors.py | pft/errors.py | """Module that contains error handlers."""
from flask import render_template, Blueprint
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
return render_template('500.html'), 500
| """Module that contains error handlers."""
from flask import render_template, Blueprint
from .database import db
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
db.session.rollback()
return render_template('500.html'), 500
| Add database rollback to error handler | Add database rollback to error handler
| Python | unknown | gregcowell/PFT,gregcowell/BAM,gregcowell/BAM,gregcowell/PFT | """Module that contains error handlers."""
from flask import render_template, Blueprint
+ from .database import db
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
+ db.session.rollback()
return render_template('500.html'), 500
| Add database rollback to error handler | ## Code Before:
"""Module that contains error handlers."""
from flask import render_template, Blueprint
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
return render_template('500.html'), 500
## Instruction:
Add database rollback to error handler
## Code After:
"""Module that contains error handlers."""
from flask import render_template, Blueprint
from .database import db
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
db.session.rollback()
return render_template('500.html'), 500
|
b728253a668c7ff2fba12678d77344bfc645e40b | dusty/daemon.py | dusty/daemon.py | import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
logging.info('Listening on socket at {}'.format(SOCKET_PATH))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
| import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket(socket_path):
try:
os.unlink(socket_path)
except OSError:
if os.path.exists(socket_path):
raise
def _listen_on_socket(socket_path):
_clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_path)
sock.listen(1)
logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
| Make this easier to test, which we'll get to a bit later | Make this easier to test, which we'll get to a bit later
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
- def _clean_up_existing_socket():
+ def _clean_up_existing_socket(socket_path):
try:
- os.unlink(SOCKET_PATH)
+ os.unlink(socket_path)
except OSError:
- if os.path.exists(SOCKET_PATH):
+ if os.path.exists(socket_path):
raise
- def _listen_on_socket():
+ def _listen_on_socket(socket_path):
- _clean_up_existing_socket()
+ _clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.bind(SOCKET_PATH)
+ sock.bind(socket_path)
sock.listen(1)
- logging.info('Listening on socket at {}'.format(SOCKET_PATH))
+ logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
- _listen_on_socket()
+ _listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
| Make this easier to test, which we'll get to a bit later | ## Code Before:
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
logging.info('Listening on socket at {}'.format(SOCKET_PATH))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
## Instruction:
Make this easier to test, which we'll get to a bit later
## Code After:
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket(socket_path):
try:
os.unlink(socket_path)
except OSError:
if os.path.exists(socket_path):
raise
def _listen_on_socket(socket_path):
_clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_path)
sock.listen(1)
logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
|
fca363dec1ff73e34e25084322d5a31dd6fbc1ee | simplestatistics/statistics/coefficient_of_variation.py | simplestatistics/statistics/coefficient_of_variation.py | from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data):
"""
The `coefficient_of_variation`_ is the ratio of the standard deviation to the mean
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data) / mean(data)
| from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data, sample = True):
"""
The `coefficient of variation`_ is the ratio of the standard deviation to the mean.
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> ss.coefficient_of_variation([1, 2, 3], False)
0.408248290463863
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data, sample) / mean(data)
| Add sample param to CV function | Add sample param to CV function
Boolean param to make possible to calculate coefficient of variation
for population (default is sample).
| Python | unknown | tmcw/simple-statistics-py,sheriferson/simplestatistics,sheriferson/simple-statistics-py | from .standard_deviation import standard_deviation
from .mean import mean
- def coefficient_of_variation(data):
+ def coefficient_of_variation(data, sample = True):
"""
- The `coefficient_of_variation`_ is the ratio of the standard deviation to the mean
+ The `coefficient of variation`_ is the ratio of the standard deviation to the mean.
+
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
+ >>> ss.coefficient_of_variation([1, 2, 3], False)
+ 0.408248290463863
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
- return standard_deviation(data) / mean(data)
+ return standard_deviation(data, sample) / mean(data)
-
+ | Add sample param to CV function | ## Code Before:
from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data):
"""
The `coefficient_of_variation`_ is the ratio of the standard deviation to the mean
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data) / mean(data)
## Instruction:
Add sample param to CV function
## Code After:
from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data, sample = True):
"""
The `coefficient of variation`_ is the ratio of the standard deviation to the mean.
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> ss.coefficient_of_variation([1, 2, 3], False)
0.408248290463863
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data, sample) / mean(data)
|
b62415c19459d9e5819b82f464731b166157811d | gym/envs/tests/test_registration.py | gym/envs/tests/test_registration.py | from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in e.message, 'Unexpected message: {}'.format(e)
else:
assert False
| from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| Fix exception message formatting in Python3 | Fix exception message formatting in Python3
| Python | mit | d1hotpep/openai_gym,machinaut/gym,machinaut/gym,d1hotpep/openai_gym,dianchen96/gym,Farama-Foundation/Gymnasium,dianchen96/gym,Farama-Foundation/Gymnasium | from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
- assert 'malformed environment ID' in e.message, 'Unexpected message: {}'.format(e)
+ assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| Fix exception message formatting in Python3 | ## Code Before:
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in e.message, 'Unexpected message: {}'.format(e)
else:
assert False
## Instruction:
Fix exception message formatting in Python3
## Code After:
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
|
c0ed918e09bcb0c0eb1aec20e375c7da8c7466ef | tests/NongeneratingSymbolsRemove/RecursiveTest.py | tests/NongeneratingSymbolsRemove/RecursiveTest.py |
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class RecursiveTest(TestCase):
pass
if __name__ == '__main__':
main()
|
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class C(Nonterminal):
pass
class D(Nonterminal):
pass
class E(Nonterminal):
pass
class RuleAto0B(Rule):
rule = ([A], [0, B])
class RuleBto1(Rule):
fromSymbol = B
toSymbol = 1
class RuleCto1D(Rule):
rule = ([C], [1, D])
class RuleDto0E(Rule):
rule = ([D], [0, E])
class RuleEto0C(Rule):
rule = ([E], [0, C])
class RecursiveTest(TestCase):
def __init__(self, methodName):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B],
rules=[RuleAto0B, RuleBto1, RuleCto1D, RuleDto0E, RuleEto0C])
if __name__ == '__main__':
main()
| Add grammar for test of recursive grammar | Add grammar for test of recursive grammar
| Python | mit | PatrikValkovic/grammpy |
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
+ class A(Nonterminal):
+ pass
+
+
+ class B(Nonterminal):
+ pass
+
+
+ class C(Nonterminal):
+ pass
+
+
+ class D(Nonterminal):
+ pass
+
+
+ class E(Nonterminal):
+ pass
+
+
+ class RuleAto0B(Rule):
+ rule = ([A], [0, B])
+
+
+ class RuleBto1(Rule):
+ fromSymbol = B
+ toSymbol = 1
+
+
+ class RuleCto1D(Rule):
+ rule = ([C], [1, D])
+
+
+ class RuleDto0E(Rule):
+ rule = ([D], [0, E])
+
+
+ class RuleEto0C(Rule):
+ rule = ([E], [0, C])
+
+
class RecursiveTest(TestCase):
- pass
+ def __init__(self, methodName):
+ super().__init__(methodName)
+ self.g = Grammar()
+
+ def setUp(self):
+ self.g = Grammar(terminals=[0, 1],
+ nonterminals=[A, B],
+ rules=[RuleAto0B, RuleBto1, RuleCto1D, RuleDto0E, RuleEto0C])
if __name__ == '__main__':
main()
| Add grammar for test of recursive grammar | ## Code Before:
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class RecursiveTest(TestCase):
pass
if __name__ == '__main__':
main()
## Instruction:
Add grammar for test of recursive grammar
## Code After:
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class C(Nonterminal):
pass
class D(Nonterminal):
pass
class E(Nonterminal):
pass
class RuleAto0B(Rule):
rule = ([A], [0, B])
class RuleBto1(Rule):
fromSymbol = B
toSymbol = 1
class RuleCto1D(Rule):
rule = ([C], [1, D])
class RuleDto0E(Rule):
rule = ([D], [0, E])
class RuleEto0C(Rule):
rule = ([E], [0, C])
class RecursiveTest(TestCase):
def __init__(self, methodName):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B],
rules=[RuleAto0B, RuleBto1, RuleCto1D, RuleDto0E, RuleEto0C])
if __name__ == '__main__':
main()
|
1e078b88b4eecaa5a9d0a2ada9a64237fe3c4f09 | users/management/commands/social_auth_migrate.py | users/management/commands/social_auth_migrate.py | from allauth.socialaccount.models import SocialAccount
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def handle(self, *args, **options):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
| from allauth.socialaccount.models import SocialAccount, SocialApp
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def add_arguments(self, parser):
parser.add_argument('--apps', action='store_true', dest='apps',
help='Print social app keys and secrets')
parser.add_argument('--accounts', action='store_true', dest='accounts',
help='Migrate accounts')
def migrate_accounts(self):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
def migrate_apps(self):
for app in SocialApp.objects.all():
app_id = app.provider.upper()
print("SOCIAL_AUTH_%s_KEY = '%s'" % (app_id, app.client_id))
print("SOCIAL_AUTH_%s_SECRET = '%s'" % (app_id, app.secret))
print()
def handle(self, *args, **options):
if options['apps']:
self.migrate_apps()
if options['accounts']:
self.migrate_accounts()
| Implement app secret printing to social_auth migration tool | Implement app secret printing to social_auth migration tool
| Python | mit | mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo | - from allauth.socialaccount.models import SocialAccount
+ from allauth.socialaccount.models import SocialAccount, SocialApp
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
- def handle(self, *args, **options):
+ def add_arguments(self, parser):
+ parser.add_argument('--apps', action='store_true', dest='apps',
+ help='Print social app keys and secrets')
+ parser.add_argument('--accounts', action='store_true', dest='accounts',
+ help='Migrate accounts')
+
+ def migrate_accounts(self):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
-
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
+ def migrate_apps(self):
+ for app in SocialApp.objects.all():
+ app_id = app.provider.upper()
+ print("SOCIAL_AUTH_%s_KEY = '%s'" % (app_id, app.client_id))
+ print("SOCIAL_AUTH_%s_SECRET = '%s'" % (app_id, app.secret))
+ print()
+
+ def handle(self, *args, **options):
+ if options['apps']:
+ self.migrate_apps()
+ if options['accounts']:
+ self.migrate_accounts()
+ | Implement app secret printing to social_auth migration tool | ## Code Before:
from allauth.socialaccount.models import SocialAccount
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def handle(self, *args, **options):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
## Instruction:
Implement app secret printing to social_auth migration tool
## Code After:
from allauth.socialaccount.models import SocialAccount, SocialApp
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def add_arguments(self, parser):
parser.add_argument('--apps', action='store_true', dest='apps',
help='Print social app keys and secrets')
parser.add_argument('--accounts', action='store_true', dest='accounts',
help='Migrate accounts')
def migrate_accounts(self):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
def migrate_apps(self):
for app in SocialApp.objects.all():
app_id = app.provider.upper()
print("SOCIAL_AUTH_%s_KEY = '%s'" % (app_id, app.client_id))
print("SOCIAL_AUTH_%s_SECRET = '%s'" % (app_id, app.secret))
print()
def handle(self, *args, **options):
if options['apps']:
self.migrate_apps()
if options['accounts']:
self.migrate_accounts()
|
2d067d0dbf4f04203c9bda2d8fb48d58fae3913d | datapoints/sql_queries.py | datapoints/sql_queries.py |
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.full_name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.full_name, i.name,i.id ,d.campaign_id
'''
|
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.name, i.name,i.id ,d.campaign_id
'''
| Fix a bug in the region aggregation query. | Fix a bug in the region aggregation query.
There is no full_name column for regions; it is just name.
| Python | agpl-3.0 | SeedScientific/polio,unicef/rhizome,unicef/polio,unicef/polio,unicef/rhizome,unicef/rhizome,SeedScientific/polio,unicef/rhizome,SeedScientific/polio,SeedScientific/polio,unicef/polio,unicef/polio,SeedScientific/polio |
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
- , r.full_name
+ , r.name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
- GROUP BY r.full_name, i.name,i.id ,d.campaign_id
+ GROUP BY r.name, i.name,i.id ,d.campaign_id
'''
| Fix a bug in the region aggregation query. | ## Code Before:
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.full_name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.full_name, i.name,i.id ,d.campaign_id
'''
## Instruction:
Fix a bug in the region aggregation query.
## Code After:
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.name, i.name,i.id ,d.campaign_id
'''
|
9a9ecde6f88a6c969f23dbcfc5bbc7e611f7f138 | version_info/get_version.py | version_info/get_version.py | import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield (name,) + version_func(path)
| import collections
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
| Make find_versions return a namedtuple as documented | Make find_versions return a namedtuple as documented
| Python | mit | TyMaszWeb/python-version-info | + import collections
+
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
+
+
+ VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
- yield (name,) + version_func(path)
+ yield VersionSpec(name, *version_func(path))
| Make find_versions return a namedtuple as documented | ## Code Before:
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield (name,) + version_func(path)
## Instruction:
Make find_versions return a namedtuple as documented
## Code After:
import collections
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
|
01036133ed749d96a74bafb6b3f8670c06c63a84 | 1selfOpenDashboardCommand.py | 1selfOpenDashboardCommand.py | import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s&source=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) | import sublime, sublime_plugin, webbrowser
QD_URL = "http://www.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/?streamid=%(stream_id)s&readToken=%(read_token)s&appid=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) | Change landing URLs to website | Change landing URLs to website
| Python | apache-2.0 | 1self/sublime-text-plugin,1self/sublime-text-plugin,1self/sublime-text-plugin | import sublime, sublime_plugin, webbrowser
- QD_URL = "https://app.1self.co"
+ QD_URL = "http://www.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
- url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s&source=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
+ url = "%(qd_url)s/?streamid=%(stream_id)s&readToken=%(read_token)s&appid=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) | Change landing URLs to website | ## Code Before:
import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s&source=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url)
## Instruction:
Change landing URLs to website
## Code After:
import sublime, sublime_plugin, webbrowser
QD_URL = "http://www.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/?streamid=%(stream_id)s&readToken=%(read_token)s&appid=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) |
c5609fe1b48cdd5740215c1d0783eaafdfe2e76b | listen/__init__.py | listen/__init__.py |
from __future__ import print_function # This API requires Python 2.7 or more recent
import sys
if sys.version < "2.7.0":
print("listen requires Python 2.7 or more recent")
sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.0"
|
# Prepare for deprication of versions < 2.7
#from __future__ import print_function # This API requires Python 2.7 or more recent
#import sys
#if sys.version < "2.7.0":
# print("listen requires Python 2.7 or more recent")
# sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.1"
| Remove requirement on python > 2.7 | Remove requirement on python > 2.7
| Python | mit | antevens/listen,antevens/listen |
+ # Prepare for deprication of versions < 2.7
- from __future__ import print_function # This API requires Python 2.7 or more recent
+ #from __future__ import print_function # This API requires Python 2.7 or more recent
- import sys
+ #import sys
- if sys.version < "2.7.0":
+ #if sys.version < "2.7.0":
- print("listen requires Python 2.7 or more recent")
+ # print("listen requires Python 2.7 or more recent")
- sys.exit(1)
+ # sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
- __version__ = "0.1.0"
+ __version__ = "0.1.1"
| Remove requirement on python > 2.7 | ## Code Before:
from __future__ import print_function # This API requires Python 2.7 or more recent
import sys
if sys.version < "2.7.0":
print("listen requires Python 2.7 or more recent")
sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.0"
## Instruction:
Remove requirement on python > 2.7
## Code After:
# Prepare for deprication of versions < 2.7
#from __future__ import print_function # This API requires Python 2.7 or more recent
#import sys
#if sys.version < "2.7.0":
# print("listen requires Python 2.7 or more recent")
# sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.1"
|
1b179405245bc7d7d6157528bd64e2b399491090 | quantecon/optimize/__init__.py | quantecon/optimize/__init__.py |
from .scalar_maximization import brent_max
from .root_finding import *
|
from .scalar_maximization import brent_max
from .root_finding import newton, newton_halley, newton_secant, bisect, brentq
| Fix import to list items | Fix import to list items
| Python | bsd-3-clause | oyamad/QuantEcon.py,QuantEcon/QuantEcon.py,oyamad/QuantEcon.py,QuantEcon/QuantEcon.py |
from .scalar_maximization import brent_max
- from .root_finding import *
+ from .root_finding import newton, newton_halley, newton_secant, bisect, brentq
| Fix import to list items | ## Code Before:
from .scalar_maximization import brent_max
from .root_finding import *
## Instruction:
Fix import to list items
## Code After:
from .scalar_maximization import brent_max
from .root_finding import newton, newton_halley, newton_secant, bisect, brentq
|
fa7172a5e3231e738d85df3baba130fdec7497d1 | derrida/outwork/views.py | derrida/outwork/views.py | from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=True)
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
| from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
| Fix outwork list view to properly filter on published=true in Solr | Fix outwork list view to properly filter on published=true in Solr
| Python | apache-2.0 | Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django | from django.views.generic import ListView
from haystack.query import SearchQuerySet
- from haystack.inputs import Clean
+ from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
- sqs = SearchQuerySet().models(self.model).filter(published=True)
+ sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
| Fix outwork list view to properly filter on published=true in Solr | ## Code Before:
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=True)
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
## Instruction:
Fix outwork list view to properly filter on published=true in Solr
## Code After:
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
|
0c8ab03600fa806a109861f0e560e3b3a6850a66 | nbgrader/apps/formgradeapp.py | nbgrader/apps/formgradeapp.py | from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
| from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
ipython_dir = "/tmp"
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
| Use default IPython profile when converting to HTML | Use default IPython profile when converting to HTML
| Python | bsd-3-clause | ellisonbg/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,jdfreder/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader,alope107/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,dementrock/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader | from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
+ ipython_dir = "/tmp"
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
| Use default IPython profile when converting to HTML | ## Code Before:
from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
## Instruction:
Use default IPython profile when converting to HTML
## Code After:
from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
ipython_dir = "/tmp"
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
|
037e15f383c326f1f4e7de59bc3ec3520ac6ce40 | pystachio/__init__.py | pystachio/__init__.py | __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| Add check for minimum Python version | Add check for minimum Python version
| Python | mit | wickman/pystachio | __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
+
+ import sys
+ if sys.version_info < (2, 6, 5):
+ raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| Add check for minimum Python version | ## Code Before:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
## Instruction:
Add check for minimum Python version
## Code After:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
|
d5b8018d1d722f3b1e980425af79934265b0f3eb | tests/test_navigation.py | tests/test_navigation.py | def test_right_arrows(page):
page.goto("index.html")
while(True):
# Keeps going to the next page until there is no right arrow
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
else:
break
# TODO make a similar test but going from de last page
# to the previous one until it gets to the first one
| def get_menu_titles(page) -> list:
page.goto("index.html")
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
menu_titles = []
for i in menu_list:
menu_item = i.as_element().inner_text()
menu_titles.append(menu_item)
return menu_titles
def test_check_titles(page):
menu_list = get_menu_titles(page)
page.goto("index.html")
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title()
page_title = page_title.split(" — ")[0]
assert page_title == menu_item
else:
break
| Implement assertions and a for instead of a while loop | Implement assertions and a for instead of a while loop
| Python | agpl-3.0 | PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis | - def test_right_arrows(page):
+ def get_menu_titles(page) -> list:
page.goto("index.html")
- while(True):
- # Keeps going to the next page until there is no right arrow
+ page.wait_for_load_state()
+ menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
+
+ menu_titles = []
+ for i in menu_list:
+ menu_item = i.as_element().inner_text()
+ menu_titles.append(menu_item)
+
+ return menu_titles
+
+
+ def test_check_titles(page):
+ menu_list = get_menu_titles(page)
+ page.goto("index.html")
+ page.wait_for_load_state()
+
+ for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
+ page_title = page.title()
+ page_title = page_title.split(" — ")[0]
+ assert page_title == menu_item
else:
break
- # TODO make a similar test but going from de last page
- # to the previous one until it gets to the first one
- | Implement assertions and a for instead of a while loop | ## Code Before:
def test_right_arrows(page):
page.goto("index.html")
while(True):
# Keeps going to the next page until there is no right arrow
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
else:
break
# TODO make a similar test but going from de last page
# to the previous one until it gets to the first one
## Instruction:
Implement assertions and a for instead of a while loop
## Code After:
def get_menu_titles(page) -> list:
page.goto("index.html")
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
menu_titles = []
for i in menu_list:
menu_item = i.as_element().inner_text()
menu_titles.append(menu_item)
return menu_titles
def test_check_titles(page):
menu_list = get_menu_titles(page)
page.goto("index.html")
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title()
page_title = page_title.split(" — ")[0]
assert page_title == menu_item
else:
break
|
c709c58fc128076af5f58d33dcd0983436573d79 | tests/test_parsingapi.py | tests/test_parsingapi.py | from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
| from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
class TestTaskParsing(object):
config = """
tasks:
explicit_parser:
parsing:
movie: guessit
series: guessit
"""
def test_selected_parser_cleared(self, manager, execute_task):
# make sure when a non-default parser is installed on a task, it doesn't affect other tasks
execute_task('explicit_parser')
assert not plugin_parsing.selected_parsers
| Add a test to verify plugin_parsing clears selected parsers after task | Add a test to verify plugin_parsing clears selected parsers after task
| Python | mit | tobinjt/Flexget,Flexget/Flexget,jawilson/Flexget,sean797/Flexget,OmgOhnoes/Flexget,poulpito/Flexget,antivirtel/Flexget,ianstalk/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,Pretagonist/Flexget,malkavi/Flexget,dsemi/Flexget,sean797/Flexget,tobinjt/Flexget,Pretagonist/Flexget,crawln45/Flexget,Danfocus/Flexget,tobinjt/Flexget,oxc/Flexget,gazpachoking/Flexget,qk4l/Flexget,jawilson/Flexget,ianstalk/Flexget,poulpito/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,gazpachoking/Flexget,Danfocus/Flexget,sean797/Flexget,Flexget/Flexget,qk4l/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,jawilson/Flexget,oxc/Flexget,Flexget/Flexget,qvazzler/Flexget,Danfocus/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,crawln45/Flexget,ianstalk/Flexget,tarzasai/Flexget,Danfocus/Flexget,tarzasai/Flexget,malkavi/Flexget,crawln45/Flexget,OmgOhnoes/Flexget,crawln45/Flexget,dsemi/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,dsemi/Flexget,jacobmetrick/Flexget,antivirtel/Flexget,poulpito/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,oxc/Flexget,tobinjt/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,drwyrm/Flexget,qvazzler/Flexget,drwyrm/Flexget | from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
+
+ class TestTaskParsing(object):
+ config = """
+ tasks:
+ explicit_parser:
+ parsing:
+ movie: guessit
+ series: guessit
+ """
+ def test_selected_parser_cleared(self, manager, execute_task):
+ # make sure when a non-default parser is installed on a task, it doesn't affect other tasks
+ execute_task('explicit_parser')
+ assert not plugin_parsing.selected_parsers
+ | Add a test to verify plugin_parsing clears selected parsers after task | ## Code Before:
from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
## Instruction:
Add a test to verify plugin_parsing clears selected parsers after task
## Code After:
from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
class TestTaskParsing(object):
config = """
tasks:
explicit_parser:
parsing:
movie: guessit
series: guessit
"""
def test_selected_parser_cleared(self, manager, execute_task):
# make sure when a non-default parser is installed on a task, it doesn't affect other tasks
execute_task('explicit_parser')
assert not plugin_parsing.selected_parsers
|
278b17859e4ad7464098a715777fcb755acf258c | doTranscode.py | doTranscode.py | import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile()
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
| import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile(suffix=".wav")
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
| Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file | Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file | Python | isc | jeffayle/Transcode | import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
- f = tempfile.NamedTemporaryFile()
+ f = tempfile.NamedTemporaryFile(suffix=".wav")
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
| Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file | ## Code Before:
import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile()
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
## Instruction:
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
## Code After:
import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile(suffix=".wav")
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
|
81069682d724c0a1e2cd292e286e4148cd9c3d9d | scraping/IEEE/main.py | scraping/IEEE/main.py |
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
[parents.remove(parents[0]) for _ in range(2)]
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
|
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
for _ in range(2): parents.remove(parents[0])
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
| Fix loop to delete branches from xml. | Fix loop to delete branches from xml.
| Python | mit | ArcasProject/Arcas |
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
- [parents.remove(parents[0]) for _ in range(2)]
+ for _ in range(2): parents.remove(parents[0])
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
| Fix loop to delete branches from xml. | ## Code Before:
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
[parents.remove(parents[0]) for _ in range(2)]
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
## Instruction:
Fix loop to delete branches from xml.
## Code After:
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
for _ in range(2): parents.remove(parents[0])
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
|
ae897509ecc7f190b31cc34085aacf81e45bc36e | nflpool/data/secret-config.py | nflpool/data/secret-config.py | from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
| from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
msf_api = 'YOUR API KEY'
msf_v2pw = 'MYSPORTSFEEDS' | Add the MSF API key and password fields | Add the MSF API key and password fields
| Python | mit | prcutler/nflpool,prcutler/nflpool | from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
+ msf_api = 'YOUR API KEY'
+
+ msf_v2pw = 'MYSPORTSFEEDS' | Add the MSF API key and password fields | ## Code Before:
from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
## Instruction:
Add the MSF API key and password fields
## Code After:
from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
msf_api = 'YOUR API KEY'
msf_v2pw = 'MYSPORTSFEEDS' |
df4c12d9e2b07db9aa9a1406f61020eb78998bef | nickenbot/command/__init__.py | nickenbot/command/__init__.py | import os
import string
import importlib
import traceback
from .. import irc
def execute(**kwargs):
module_string = string.join([__name__, kwargs['command']], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
| import os
import fnmatch
import string
import importlib
import traceback
from .. import irc
def get_all():
files = os.listdir('./nickenbot/command')
files.remove('__init__.py')
commands = [os.path.splitext(f)[0] for f in files if fnmatch.fnmatch(f, '*.py')]
commands = [string.replace(c, '_', '-') for c in commands]
return commands
def execute(**kwargs):
print(kwargs['command'])
command = string.replace(kwargs['command'], '-', '_')
print(command)
module_string = string.join([__name__, command], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
| Add support for hyphens, and list of commands | Add support for hyphens, and list of commands
Adds a function to retrieve all commands, and converts incoming commands
from hyphenated to underscored form.
| Python | mit | brlafreniere/nickenbot,brlafreniere/nickenbot | import os
+ import fnmatch
import string
import importlib
import traceback
from .. import irc
+ def get_all():
+ files = os.listdir('./nickenbot/command')
+ files.remove('__init__.py')
+ commands = [os.path.splitext(f)[0] for f in files if fnmatch.fnmatch(f, '*.py')]
+ commands = [string.replace(c, '_', '-') for c in commands]
+ return commands
+
def execute(**kwargs):
+ print(kwargs['command'])
+ command = string.replace(kwargs['command'], '-', '_')
+ print(command)
- module_string = string.join([__name__, kwargs['command']], '.')
+ module_string = string.join([__name__, command], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
| Add support for hyphens, and list of commands | ## Code Before:
import os
import string
import importlib
import traceback
from .. import irc
def execute(**kwargs):
module_string = string.join([__name__, kwargs['command']], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
## Instruction:
Add support for hyphens, and list of commands
## Code After:
import os
import fnmatch
import string
import importlib
import traceback
from .. import irc
def get_all():
files = os.listdir('./nickenbot/command')
files.remove('__init__.py')
commands = [os.path.splitext(f)[0] for f in files if fnmatch.fnmatch(f, '*.py')]
commands = [string.replace(c, '_', '-') for c in commands]
return commands
def execute(**kwargs):
print(kwargs['command'])
command = string.replace(kwargs['command'], '-', '_')
print(command)
module_string = string.join([__name__, command], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
|
18059a0515ea5f6edf87e8485200f001503459cd | info-txt.py | info-txt.py | import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
| import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
# Traverse the list backwards to find out when the sms was responded to
for prompt in reversed(SMSlist):
if prompt.party == sms.party:
break
else:
sms.responseTime = sms.date - prompt.date
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
| Determine response time for messages | Determine response time for messages
| Python | mit | 2nd47/info-txt | import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
+ # Traverse the list backwards to find out when the sms was responded to
+ for prompt in reversed(SMSlist):
+ if prompt.party == sms.party:
+ break
+ else:
+ sms.responseTime = sms.date - prompt.date
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
| Determine response time for messages | ## Code Before:
import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
## Instruction:
Determine response time for messages
## Code After:
import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
# Traverse the list backwards to find out when the sms was responded to
for prompt in reversed(SMSlist):
if prompt.party == sms.party:
break
else:
sms.responseTime = sms.date - prompt.date
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
|
d9b06edb63d20550c4b3fa0fa6924d99724dc11a | examples/image_resize.py | examples/image_resize.py | from __future__ import print_function
from transloadit.client import Transloadit
tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
ass = tl.new_assembly()
ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
response = ass.create(wait=True)
result_url = response.data.get('results').get('resize')[0].get('ssl_url')
print('Your result:', result_url)
| from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
| Update example syntax to python3 | Update example syntax to python3
| Python | mit | ifedapoolarewaju/transloadit-python-sdk | - from __future__ import print_function
-
from transloadit.client import Transloadit
- tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
+ tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
- ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
+ ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
- ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
+ ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
- result_url = response.data.get('results').get('resize')[0].get('ssl_url')
+ result_url = response.data.get("results").get("resize")[0].get("ssl_url")
- print('Your result:', result_url)
+ print("Your result:", result_url)
| Update example syntax to python3 | ## Code Before:
from __future__ import print_function
from transloadit.client import Transloadit
tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
ass = tl.new_assembly()
ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
response = ass.create(wait=True)
result_url = response.data.get('results').get('resize')[0].get('ssl_url')
print('Your result:', result_url)
## Instruction:
Update example syntax to python3
## Code After:
from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
|
3dcece1bb4e2490168b21d4298e297e61bdde901 | corehq/ex-submodules/casexml/apps/case/fixtures.py | corehq/ex-submodules/casexml/apps/case/fixtures.py | from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
| from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
https://github.com/dimagi/commcare/wiki/casedb
https://github.com/dimagi/commcare/wiki/fixtures
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
| Add links to fixture and casedb specs | Add links to fixture and casedb specs
| Python | bsd-3-clause | qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
+
+ https://github.com/dimagi/commcare/wiki/casedb
+ https://github.com/dimagi/commcare/wiki/fixtures
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
| Add links to fixture and casedb specs | ## Code Before:
from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
## Instruction:
Add links to fixture and casedb specs
## Code After:
from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
https://github.com/dimagi/commcare/wiki/casedb
https://github.com/dimagi/commcare/wiki/fixtures
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
|
28e67e04a88b0195184bf43f013c11ea7f320c4f | conveyor/processor.py | conveyor/processor.py | from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
| from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Add a method for getting a list of releases to fetch | Add a method for getting a list of releases to fetch
| Python | bsd-2-clause | crateio/carrier | from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
+ def get_releases(self, name, version=None):
+ if version is None:
+ return set(self.client.package_releases(name, True))
+ else:
+ return set([version])
+
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Add a method for getting a list of releases to fetch | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
## Instruction:
Add a method for getting a list of releases to fetch
## Code After:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
c694ac630f36c53c130a63908c6c3576f220a6bd | django-openstack/django_openstack/auth/__init__.py | django-openstack/django_openstack/auth/__init__.py | import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
else:
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
| import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
| Return 'user' role as default value | Return 'user' role as default value
| Python | apache-2.0 | griddynamics/osc-robot-openstack-dashboard,griddynamics/osc-robot-openstack-dashboard,griddynamics/osc-robot-openstack-dashboard | import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
- else:
- return Roles.USER
+ return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
| Return 'user' role as default value | ## Code Before:
import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
else:
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
## Instruction:
Return 'user' role as default value
## Code After:
import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
|
2a986d7c0bab1612e96cace5ce54a188e22af2aa | services/wordpress.py | services/wordpress.py | import json
import foauth
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
| import json
import foauth.providers
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
| Fix the import for Wordpress | Fix the import for Wordpress
| Python | bsd-3-clause | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | import json
- import foauth
+ import foauth.providers
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
| Fix the import for Wordpress | ## Code Before:
import json
import foauth
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
## Instruction:
Fix the import for Wordpress
## Code After:
import json
import foauth.providers
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
|
fbad3c0b80258b02cc2ba81ff1408d24cd69c69d | src/iconclassserver/util.py | src/iconclassserver/util.py | import redis
import json
from django.conf import settings
import iconclass
import requests
import time
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) | import redis
import json
from django.conf import settings
import iconclass
import requests
import time
import os
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
filepath, filename = os.path.split(filename)
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) | Handle filenames with path prefixes in git commit logs | Handle filenames with path prefixes in git commit logs
| Python | mit | epoz/iconclass-server,epoz/iconclass-server | import redis
import json
from django.conf import settings
import iconclass
import requests
import time
+ import os
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
+ filepath, filename = os.path.split(filename)
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) | Handle filenames with path prefixes in git commit logs | ## Code Before:
import redis
import json
from django.conf import settings
import iconclass
import requests
import time
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf))
## Instruction:
Handle filenames with path prefixes in git commit logs
## Code After:
import redis
import json
from django.conf import settings
import iconclass
import requests
import time
import os
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
filepath, filename = os.path.split(filename)
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) |
78ebec64e51c43005488bc1b9ce84fca65d069e4 | planet_alignment/app/app_factory.py | planet_alignment/app/app_factory.py | from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
return App(self._system_data, self._plugins, self._time)
| from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| Document the AppFactory, add the doc headers. | Document the AppFactory, add the doc headers.
| Python | mit | paulfanelli/planet_alignment | from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
+ """This is the class factory for the App.
+
+ - **parameters** and **types**::
+
+ :param cmd_args: The command-line args.
+ :type cmd_args: argparse Namespace
+ """
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
+ """Returns the created App object.
+
+ :return: Returns the App object.
+ :rtype: App class.
+ """
return App(self._system_data, self._plugins, self._time)
| Document the AppFactory, add the doc headers. | ## Code Before:
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
return App(self._system_data, self._plugins, self._time)
## Instruction:
Document the AppFactory, add the doc headers.
## Code After:
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
|
edad01902f8c9d23da106c538d118e28da286821 | lesion/lifio.py | lesion/lifio.py | import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
| import numpy as np
import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
def lif_metadata_string_size(filename):
"""Get the length in bytes of the metadata string of a LIF file.
Parameters
----------
filename : string
Path to the LIF file.
Returns
-------
length : int
The length in bytes of the metadata string.
Notes
-----
This is based on code by Lee Kamentsky. [1]
References
----------
[1] https://github.com/CellProfiler/python-bioformats/issues/8
"""
with open(filename, 'rb') as fd:
fd.read(9)
length = np.frombuffer(fd.read(4), "<i4")[0]
return length
| Add function to determine metadata length | Add function to determine metadata length
| Python | bsd-3-clause | jni/lesion | + import numpy as np
import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
+
+ def lif_metadata_string_size(filename):
+ """Get the length in bytes of the metadata string of a LIF file.
+
+ Parameters
+ ----------
+ filename : string
+ Path to the LIF file.
+
+ Returns
+ -------
+ length : int
+ The length in bytes of the metadata string.
+
+ Notes
+ -----
+ This is based on code by Lee Kamentsky. [1]
+
+ References
+ ----------
+ [1] https://github.com/CellProfiler/python-bioformats/issues/8
+ """
+ with open(filename, 'rb') as fd:
+ fd.read(9)
+ length = np.frombuffer(fd.read(4), "<i4")[0]
+ return length
+ | Add function to determine metadata length | ## Code Before:
import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
## Instruction:
Add function to determine metadata length
## Code After:
import numpy as np
import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
def lif_metadata_string_size(filename):
"""Get the length in bytes of the metadata string of a LIF file.
Parameters
----------
filename : string
Path to the LIF file.
Returns
-------
length : int
The length in bytes of the metadata string.
Notes
-----
This is based on code by Lee Kamentsky. [1]
References
----------
[1] https://github.com/CellProfiler/python-bioformats/issues/8
"""
with open(filename, 'rb') as fd:
fd.read(9)
length = np.frombuffer(fd.read(4), "<i4")[0]
return length
|
6a7a61d514ac738f8de29efe280ecfedfaf72685 | ttrss/auth.py | ttrss/auth.py | from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if r.headers['set-cookie']:
sid = r.headers['set-cookie'].split(';')[0].split('=')[1]
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
| from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if 'ttrss_api_sid' in r.cookies:
sid = r.cookies['ttrss_api_sid']
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
| Clean up cookie lookup in TTRAuth | Clean up cookie lookup in TTRAuth
| Python | mit | Vassius/ttrss-python | from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
- if r.headers['set-cookie']:
- sid = r.headers['set-cookie'].split(';')[0].split('=')[1]
+ if 'ttrss_api_sid' in r.cookies:
+ sid = r.cookies['ttrss_api_sid']
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
| Clean up cookie lookup in TTRAuth | ## Code Before:
from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if r.headers['set-cookie']:
sid = r.headers['set-cookie'].split(';')[0].split('=')[1]
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
## Instruction:
Clean up cookie lookup in TTRAuth
## Code After:
from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if 'ttrss_api_sid' in r.cookies:
sid = r.cookies['ttrss_api_sid']
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
|
a275068193c87c5a27758c17d7699e963a0bdfa8 | llvmpy/src/Support/FormattedStream.py | llvmpy/src/Support/FormattedStream.py | from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
new = Constructor(ref(raw_ostream), cast(bool, Bool))
| from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
| Fix formatted_raw_ostream ownership error with the underlying stream. | Fix formatted_raw_ostream ownership error with the underlying stream.
| Python | bsd-3-clause | llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy | from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
- new = Constructor(ref(raw_ostream), cast(bool, Bool))
+ _new = Constructor(ref(raw_ostream), cast(bool, Bool))
+
+ @CustomPythonStaticMethod
+ def new(stream, destroy=False):
+ inst = formatted_raw_ostream._new(stream, destroy)
+ inst.__underlying_stream = stream # to prevent it being freed first
+ return inst
| Fix formatted_raw_ostream ownership error with the underlying stream. | ## Code Before:
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
new = Constructor(ref(raw_ostream), cast(bool, Bool))
## Instruction:
Fix formatted_raw_ostream ownership error with the underlying stream.
## Code After:
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
|
4f8aed6ed3491e62911619eaa9aa4b86b30065e4 | leonardo/module/leonardo_auth/widget/userlogin/models.py | leonardo/module/leonardo_auth/widget/userlogin/models.py |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
| Fix missing next in context. | Fix missing next in context.
| Python | bsd-3-clause | django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
+ else:
+ context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
| Fix missing next in context. | ## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
## Instruction:
Fix missing next in context.
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
|
79bb94f51cd2dca65479cb39f6c365c4c372b0ca | forumuser/models.py | forumuser/models.py | from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
| from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
items_per_page = models.PositiveSmallIntegerField(blank=True, null=True)
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
| Add items per page as a preference to the forumm user model | Add items per page as a preference to the forumm user model
| Python | mit | hellsgate1001/thatforum_django,hellsgate1001/thatforum_django,hellsgate1001/thatforum_django | from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
+ items_per_page = models.PositiveSmallIntegerField(blank=True, null=True)
+
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
| Add items per page as a preference to the forumm user model | ## Code Before:
from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
## Instruction:
Add items per page as a preference to the forumm user model
## Code After:
from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
items_per_page = models.PositiveSmallIntegerField(blank=True, null=True)
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
|
a86eaffa53a18389ea628f37c76900cc24c701f6 | opps/contrib/logging/admin.py | opps/contrib/logging/admin.py | from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain')
admin.site.register(Logging, LoggingAdmin)
| from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain', 'mirror_site')
admin.site.register(Logging, LoggingAdmin)
| Add field mirror_site at exclude on LoggingAdmin | Add field mirror_site at exclude on LoggingAdmin
| Python | mit | YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps | from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
- exclude = ('site_iid', 'site_domain')
+ exclude = ('site_iid', 'site_domain', 'mirror_site')
admin.site.register(Logging, LoggingAdmin)
| Add field mirror_site at exclude on LoggingAdmin | ## Code Before:
from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain')
admin.site.register(Logging, LoggingAdmin)
## Instruction:
Add field mirror_site at exclude on LoggingAdmin
## Code After:
from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain', 'mirror_site')
admin.site.register(Logging, LoggingAdmin)
|
714fd7d0c173672f636e8d051b24046b10d3f481 | format_json.py | format_json.py |
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
|
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
| Make this work for non-ASCII chars as well. | Make this work for non-ASCII chars as well.
| Python | mit | nbeaver/cmd-oysters,nbeaver/cmd-oysters |
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
- json.dump(oyster, f, indent=4, separators=(',', ': '), sort_keys=True)
+ json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
| Make this work for non-ASCII chars as well. | ## Code Before:
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
## Instruction:
Make this work for non-ASCII chars as well.
## Code After:
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
|
4de23cffa16c71e287efba7d32ba375feeb9bc13 | format_json.py | format_json.py |
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
| Truncate the file before writing more data. | Truncate the file before writing more data.
| Python | mit | nbeaver/cmd-oysters,nbeaver/cmd-oysters |
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
+ fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
| Truncate the file before writing more data. | ## Code Before:
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
## Instruction:
Truncate the file before writing more data.
## Code After:
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
|
860cea2b6d183414d794eb2e2d44beb7728e2d4b | hasjob/models/location.py | hasjob/models/location.py |
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
parent = db.synonym('board_id')
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
|
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
parent = db.synonym('board')
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
| Fix parent synonym for Location model | Fix parent synonym for Location model
| Python | agpl-3.0 | hasgeek/hasjob,hasgeek/hasjob,hasgeek/hasjob,hasgeek/hasjob |
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
- parent = db.synonym('board_id')
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
+ parent = db.synonym('board')
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
| Fix parent synonym for Location model | ## Code Before:
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
parent = db.synonym('board_id')
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
## Instruction:
Fix parent synonym for Location model
## Code After:
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
parent = db.synonym('board')
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
|
401f98ad74792e9a5d9354dec8c24dc9637d1f5e | tests/gsim/pezeshk_2011_test.py | tests/gsim/pezeshk_2011_test.py |
from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011TestCase(BaseGSIMTestCase):
GSIM_CLASS = Pezeshk2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
|
from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011EtAlTestCase(BaseGSIMTestCase):
GSIM_CLASS = PezeshkEtAl2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
| Add implementation of gmpe Pezeshk et al 2011 for ENA | Add implementation of gmpe Pezeshk et al 2011 for ENA
| Python | agpl-3.0 | vup1120/oq-hazardlib,gem/oq-engine,g-weatherill/oq-hazardlib,gem/oq-hazardlib,gem/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,gem/oq-engine,rcgee/oq-hazardlib,mmpagani/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-engine,larsbutler/oq-hazardlib,g-weatherill/oq-hazardlib,rcgee/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,mmpagani/oq-hazardlib |
- from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011
+ from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011
from tests.gsim.utils import BaseGSIMTestCase
- class Pezeshk2011TestCase(BaseGSIMTestCase):
+ class Pezeshk2011EtAlTestCase(BaseGSIMTestCase):
- GSIM_CLASS = Pezeshk2011
+ GSIM_CLASS = PezeshkEtAl2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
| Add implementation of gmpe Pezeshk et al 2011 for ENA | ## Code Before:
from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011TestCase(BaseGSIMTestCase):
GSIM_CLASS = Pezeshk2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
## Instruction:
Add implementation of gmpe Pezeshk et al 2011 for ENA
## Code After:
from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011EtAlTestCase(BaseGSIMTestCase):
GSIM_CLASS = PezeshkEtAl2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
|
87771bda7fbf46519097ba433a7b4fd3f2cbaa7e | office_lunch_order/office_lunch_order_app/tests.py | office_lunch_order/office_lunch_order_app/tests.py | from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
print(response.status_code) # 302 found | from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
| Test add_order and order details with existing order_id url | Test add_order and order details with existing order_id url
| Python | epl-1.0 | MariuszKorotko/Office_Lunch_Order,MariuszKorotko/Office_Lunch_Order | from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
+ response = c.get('/officelunchorder/add_order/25/') # existing order_id
+ response.status_code # 302 found
+ response = c.get('/officelunchorder/order_details/25/') # existing order_id
+ response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
- print(response.status_code) # 302 found
+ response.status_code # 302 found
+
+ | Test add_order and order details with existing order_id url | ## Code Before:
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
print(response.status_code) # 302 found
## Instruction:
Test add_order and order details with existing order_id url
## Code After:
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
|