commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
51
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
a8818e2058fdfaec7f283a5115619d42d23b7dde
anchorhub/builtin/github/writer.py
anchorhub/builtin/github/writer.py
from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
Use Setext strategy in GitHub built in Writer
Use Setext strategy in GitHub built in Writer
Python
apache-2.0
samjabrahams/anchorhub
from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) + setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch - strategies = [atx, inline] + strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
Use Setext strategy in GitHub built in Writer
## Code Before: from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, inline] switches = [code_block_switch] return Writer(strategies, switches=switches) ## Instruction: Use Setext strategy in GitHub built in Writer ## Code After: from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
c154d79ba13d95f3240efd9eb4725cf9fc16060f
forms.py
forms.py
from flask_wtf import Form from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(Form): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(FlaskForm): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
Python
mit
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
- from flask_wtf import Form + from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email - class Login(Form): + class Login(FlaskForm): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
## Code Before: from flask_wtf import Form from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(Form): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()]) ## Instruction: Change deprecated flask_wtf.Form with flask_wtf.FlaskForm ## Code After: from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(FlaskForm): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
ce95e50b7cb3ef9bbabddb033352aacb96b9237a
pywikibot/families/wikivoyage_family.py
pywikibot/families/wikivoyage_family.py
"""Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # The new wikivoyage family that is hosted at wikimedia from pywikibot import family class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'es', 'he', 'zh', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
"""Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # # The new wikivoyage family that is hosted at wikimedia from __future__ import absolute_import, unicode_literals from pywikibot import family __version__ = '$Id$' class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'fi', 'es', 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
Add fi:wikivoyage and sort by current article count
Add fi:wikivoyage and sort by current article count Fix also pycodestyle (former PEP8) E402 problem Bug: T153470 Change-Id: Id9bc980c7a9cfb21063597a3d5eae11c31d8040c
Python
mit
Darkdadaah/pywikibot-core,magul/pywikibot-core,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,magul/pywikibot-core,happy5214/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,wikimedia/pywikibot-core,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,npdoty/pywikibot,wikimedia/pywikibot-core
"""Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # + # The new wikivoyage family that is hosted at wikimedia from __future__ import absolute_import, unicode_literals + from pywikibot import family + __version__ = '$Id$' - - # The new wikivoyage family that is hosted at wikimedia - - from pywikibot import family class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ - 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'es', 'he', + 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'fi', 'es', - 'zh', 'vi', 'sv', 'el', 'ro', 'uk', + 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
Add fi:wikivoyage and sort by current article count
## Code Before: """Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # The new wikivoyage family that is hosted at wikimedia from pywikibot import family class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'es', 'he', 'zh', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ] ## Instruction: Add fi:wikivoyage and sort by current article count ## Code After: """Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # # The new wikivoyage family that is hosted at wikimedia from __future__ import absolute_import, unicode_literals from pywikibot import family __version__ = '$Id$' class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'fi', 'es', 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
38216f9d1b875c31b97c80bb9217557e67c92ff3
spicedham/backend.py
spicedham/backend.py
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classifier, key, default) for key in keys] def set_key_list(self, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classifier, key, value) for key, value in key_value_pairs] def set_key(self, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classification_type, classifier, key, default) for classifier, key in izip(repeat(classifier), keys)] def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classification_type, classifier, key, value) for classifier, key, value in izip(repeat(classifier), key_value_pairs)] def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
Add classifier type to the base class
Add classifier type to the base class
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() + - def get_key(self, classifier, key, default=None): + def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() - def get_key_list(self, classifier, keys, default=None): + def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ - return [self.get_key(classifier, key, default) + return [self.get_key(classification_type, classifier, key, default) - for key in keys] + for classifier, key in izip(repeat(classifier), keys)] - def set_key_list(self, classifier, key_value_pairs): + def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ - return [self.set_key(classifier, key, value) + return [self.set_key(classification_type, classifier, key, value) - for key, value in key_value_pairs] + for classifier, key, value + in izip(repeat(classifier), key_value_pairs)] - def set_key(self, classifier, key, value): + def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
Add classifier type to the base class
## Code Before: class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classifier, key, default) for key in keys] def set_key_list(self, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classifier, key, value) for key, value in key_value_pairs] def set_key(self, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError() ## Instruction: Add classifier type to the base class ## Code After: class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classification_type, classifier, key, default) for classifier, key in izip(repeat(classifier), keys)] def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classification_type, classifier, key, value) for classifier, key, value in izip(repeat(classifier), key_value_pairs)] def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
ba2f2d7e53f0ffc58c882d78f1b8bc9a468eb164
predicates.py
predicates.py
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(self.members) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(map(repr, self.members)) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
Fix problem rendering oneof() predicate when the members aren't strings
Fix problem rendering oneof() predicate when the members aren't strings
Python
mit
mrozekma/pytypecheck
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): - return "one of %s" % ', '.join(self.members) + return "one of %s" % ', '.join(map(repr, self.members)) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
Fix problem rendering oneof() predicate when the members aren't strings
## Code Before: class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(self.members) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end) ## Instruction: Fix problem rendering oneof() predicate when the members aren't strings ## Code After: class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(map(repr, self.members)) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
7955e777d6ba3bbbd104bd3916f131ab7fa8f8b5
asyncmongo/__init__.py
asyncmongo/__init__.py
try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" TEXT = '{ $meta: "textScore" }' """TEXT Index sort order.""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
Support Sort Order For TEXT Index
Support Sort Order For TEXT Index
Python
apache-2.0
RealGeeks/asyncmongo
try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" + TEXT = '{ $meta: "textScore" }' + """TEXT Index sort order.""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
Support Sort Order For TEXT Index
## Code Before: try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client ## Instruction: Support Sort Order For TEXT Index ## Code After: try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" TEXT = '{ $meta: "textScore" }' """TEXT Index sort order.""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
26efd98c88a627f76ebd0865053353eb7a30e3bb
.glerbl/repo_conf.py
.glerbl/repo_conf.py
checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
import sys import os dirname = os.path.dirname(__file__) python_path = os.path.join(os.path.dirname(dirname), "selenium_test", "lib") if "PYTHONPATH" not in os.environ: os.environ["PYTHONPATH"] = python_path else: os.environ["PYTHONPATH"] = python_path + ":" + os.environ["PYTHONPATH"] checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
Modify PYTHONPATH so that pylint is able to find wedutil.
Modify PYTHONPATH so that pylint is able to find wedutil.
Python
mpl-2.0
mangalam-research/wed,slattery/wed,lddubeau/wed,slattery/wed,mangalam-research/wed,slattery/wed,mangalam-research/wed,lddubeau/wed,mangalam-research/wed,lddubeau/wed,lddubeau/wed
+ import sys + import os + + dirname = os.path.dirname(__file__) + + python_path = os.path.join(os.path.dirname(dirname), "selenium_test", "lib") + if "PYTHONPATH" not in os.environ: + os.environ["PYTHONPATH"] = python_path + else: + os.environ["PYTHONPATH"] = python_path + ":" + os.environ["PYTHONPATH"] + checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
Modify PYTHONPATH so that pylint is able to find wedutil.
## Code Before: checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] } ## Instruction: Modify PYTHONPATH so that pylint is able to find wedutil. ## Code After: import sys import os dirname = os.path.dirname(__file__) python_path = os.path.join(os.path.dirname(dirname), "selenium_test", "lib") if "PYTHONPATH" not in os.environ: os.environ["PYTHONPATH"] = python_path else: os.environ["PYTHONPATH"] = python_path + ":" + os.environ["PYTHONPATH"] checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
7608d0e89781f70fcb49e7dc3ee5cd57a094f18c
rx/__init__.py
rx/__init__.py
from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future }
from threading import Lock from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future, "Lock" : Lock }
Make it possible to set custom Lock
Make it possible to set custom Lock
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY,dbrattli/RxPY
+ from threading import Lock + from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { - "Future" : Future + "Future" : Future, + "Lock" : Lock }
Make it possible to set custom Lock
## Code Before: from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future } ## Instruction: Make it possible to set custom Lock ## Code After: from threading import Lock from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future, "Lock" : Lock }
0aa61fb32df9ae3ef9c465f4b246edf04897cd14
staticfiles/views.py
staticfiles/views.py
from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ return django_serve(request, path='', document_root=resolve(path), show_indexes=show_indexes)
from django import http from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ absolute_path = resolve(path) if not absolute_path: raise http.Http404('%r could not be resolved to a static file.' % path) return django_serve(request, path='', document_root=absolute_path, show_indexes=show_indexes)
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
Python
bsd-3-clause
tusbar/django-staticfiles,jezdez-archive/django-staticfiles,tusbar/django-staticfiles
+ from django import http from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ + absolute_path = resolve(path) + if not absolute_path: + raise http.Http404('%r could not be resolved to a static file.' % path) - return django_serve(request, path='', document_root=resolve(path), + return django_serve(request, path='', document_root=absolute_path, show_indexes=show_indexes)
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
## Code Before: from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ return django_serve(request, path='', document_root=resolve(path), show_indexes=show_indexes) ## Instruction: Make the staticfiles serve view raise a 404 for paths which could not be resolved. ## Code After: from django import http from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ absolute_path = resolve(path) if not absolute_path: raise http.Http404('%r could not be resolved to a static file.' % path) return django_serve(request, path='', document_root=absolute_path, show_indexes=show_indexes)
979c56f882178ce49194850bd9e78c9dea4692dd
chardet/__init__.py
chardet/__init__.py
from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) detector.close() return detector.result
from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) return detector.close()
Remove unnecessary line from detect
Remove unnecessary line from detect
Python
lgpl-2.1
ddboline/chardet,chardet/chardet,chardet/chardet,ddboline/chardet
from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) - detector.close() + return detector.close() - return detector.result
Remove unnecessary line from detect
## Code Before: from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) detector.close() return detector.result ## Instruction: Remove unnecessary line from detect ## Code After: from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) return detector.close()
9ddc63eb0e1e3612ac4a1ea5b95e405ca0915b52
setup.py
setup.py
from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author = "Mike Svoboda", author_email = "msvoboda@linkedin.com", py_modules=['CacheExtractor', 'RedisFinder'], data_files=[('/usr/local/bin', ['./scripts/extract_sysops_cache.py']), ('/usr/local/bin', ['./scripts/extract_sysops_api_to_disk.py']), ('/usr/local/bin', ['./scripts/extract_sysctl_live_vs_persistant_entries.py']), ('/usr/local/bin', ['./scripts/extract_user_account_access.py']), ('/usr/local/bin', ['./scripts/extract_user_sudo_privileges.py'])], package_dir={'': 'src'}, packages = ['seco'], )
from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author="Mike Svoboda", author_email="msvoboda@linkedin.com", py_modules=['CacheExtractor', 'RedisFinder'], scripts=['scripts/extract_sysops_cache.py', 'scripts/extract_sysops_api_to_disk.py', 'scripts/extract_sysctl_live_vs_persistant_entries.py', 'scripts/extract_user_account_access.py', 'scripts/extract_user_sudo_privileges.py'], package_dir={'': 'src'}, packages=['seco'], )
Install scripts properly rather than as datafiles
Install scripts properly rather than as datafiles - also fix whitespace
Python
apache-2.0
linkedin/sysops-api,linkedin/sysops-api,slietz/sysops-api,slietz/sysops-api
from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", - author = "Mike Svoboda", + author="Mike Svoboda", - author_email = "msvoboda@linkedin.com", + author_email="msvoboda@linkedin.com", py_modules=['CacheExtractor', 'RedisFinder'], - data_files=[('/usr/local/bin', ['./scripts/extract_sysops_cache.py']), + scripts=['scripts/extract_sysops_cache.py', - ('/usr/local/bin', ['./scripts/extract_sysops_api_to_disk.py']), + 'scripts/extract_sysops_api_to_disk.py', - ('/usr/local/bin', ['./scripts/extract_sysctl_live_vs_persistant_entries.py']), + 'scripts/extract_sysctl_live_vs_persistant_entries.py', - ('/usr/local/bin', ['./scripts/extract_user_account_access.py']), + 'scripts/extract_user_account_access.py', - ('/usr/local/bin', ['./scripts/extract_user_sudo_privileges.py'])], + 'scripts/extract_user_sudo_privileges.py'], package_dir={'': 'src'}, - packages = ['seco'], + packages=['seco'], )
Install scripts properly rather than as datafiles
## Code Before: from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author = "Mike Svoboda", author_email = "msvoboda@linkedin.com", py_modules=['CacheExtractor', 'RedisFinder'], data_files=[('/usr/local/bin', ['./scripts/extract_sysops_cache.py']), ('/usr/local/bin', ['./scripts/extract_sysops_api_to_disk.py']), ('/usr/local/bin', ['./scripts/extract_sysctl_live_vs_persistant_entries.py']), ('/usr/local/bin', ['./scripts/extract_user_account_access.py']), ('/usr/local/bin', ['./scripts/extract_user_sudo_privileges.py'])], package_dir={'': 'src'}, packages = ['seco'], ) ## Instruction: Install scripts properly rather than as datafiles ## Code After: from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author="Mike Svoboda", author_email="msvoboda@linkedin.com", py_modules=['CacheExtractor', 'RedisFinder'], scripts=['scripts/extract_sysops_cache.py', 'scripts/extract_sysops_api_to_disk.py', 'scripts/extract_sysctl_live_vs_persistant_entries.py', 'scripts/extract_user_account_access.py', 'scripts/extract_user_sudo_privileges.py'], package_dir={'': 'src'}, packages=['seco'], )
2727fccdb3672e1c7b28e4ba94ec743b53298f26
src/main.py
src/main.py
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() if __name__ == '__main__':main()
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main()
Include Text App in Main
Include Text App in Main
Python
mit
deshadi/python-gui-demos
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 + import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() + p13.launchApp() if __name__ == '__main__':main()
Include Text App in Main
## Code Before: ''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() if __name__ == '__main__':main() ## Instruction: Include Text App in Main ## Code After: ''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main()
1e327401d9c020bb7941b20ff51890ad1729973d
tests.py
tests.py
import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_blank_login_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_test_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
Rename test. The test tries to access a test page, not a blank page
Rename test. The test tries to access a test page, not a blank page
Python
mit
feffe/django-selenium-login,feffe/django-selenium-login
import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source - def test_authenticated_user_can_access_blank_login_page(selenium, live_server): + def test_authenticated_user_can_access_test_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
Rename test. The test tries to access a test page, not a blank page
## Code Before: import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_blank_login_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source ## Instruction: Rename test. The test tries to access a test page, not a blank page ## Code After: import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_test_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
741545dcf58fdfaf882d797d3ce4f7607ca0dad4
kobo/client/commands/cmd_resubmit_tasks.py
kobo/client/commands/cmd_resubmit_tasks.py
from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) if not kwargs.get('nowait'): TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
Add --nowait option to resubmit-tasks cmd
Add --nowait option to resubmit-tasks cmd In some use cases, waiting till the tasks finish is undesirable. Nowait option should be provided.
Python
lgpl-2.1
release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo
from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") + self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) + if not kwargs.get('nowait'): - TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) + TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
Add --nowait option to resubmit-tasks cmd
## Code Before: from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1) ## Instruction: Add --nowait option to resubmit-tasks cmd ## Code After: from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) if not kwargs.get('nowait'): TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
8e7a92bce03ca472bc78bb9df5e2c9cf063c29b7
temba/campaigns/tasks.py
temba/campaigns/tasks.py
from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'): try: push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'): try: push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
Use correct field to get org from
Use correct field to get org from
Python
agpl-3.0
harrissoerja/rapidpro,pulilab/rapidpro,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,harrissoerja/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,praekelt/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,reyrodrigues/EU-SMS,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro
from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired - for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'): + for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'): try: - push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) + push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
Use correct field to get org from
## Code Before: from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'): try: push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True) ## Instruction: Use correct field to get org from ## Code After: from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'): try: push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
1e2086b868861034d89138349c4da909f380f19e
feedback/views.py
feedback/views.py
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback fields = '__all__' @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Make feedback compatible with DRF >3.3.0
Make feedback compatible with DRF >3.3.0
Python
mit
City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback + fields = '__all__' @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Make feedback compatible with DRF >3.3.0
## Code Before: from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) ## Instruction: Make feedback compatible with DRF >3.3.0 ## Code After: from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback fields = '__all__' @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
90bdcad66a6f29c9e3d731b5b09b0a2ba477ae2f
tviit/urls.py
tviit/urls.py
from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^', views.IndexView.as_view(), name='tviit_index'), ]
from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='tviit_index'), url(r'create/$', views.create_tviit, name="create_tviit"), ]
Create url-patterns for tviit creation
Create url-patterns for tviit creation
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ - url(r'^', views.IndexView.as_view(), name='tviit_index'), + url(r'^$', views.IndexView.as_view(), name='tviit_index'), + url(r'create/$', views.create_tviit, name="create_tviit"), ]
Create url-patterns for tviit creation
## Code Before: from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^', views.IndexView.as_view(), name='tviit_index'), ] ## Instruction: Create url-patterns for tviit creation ## Code After: from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='tviit_index'), url(r'create/$', views.create_tviit, name="create_tviit"), ]
881222a49c6b3e8792adf5754c61992bd12c7b28
tests/test_conduction.py
tests/test_conduction.py
"""Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) self.conduction = pymongo.MongoClient(self.mockup.uri).test def test_bad_command_name(self): with self.assertRaises(OperationFailure): self.conduction.command('foo') if __name__ == '__main__': unittest.main()
"""Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
Test root URI and 404s.
Test root URI and 404s.
Python
apache-2.0
ajdavis/mongo-conduction
"""Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) + # Any database name will do. - self.conduction = pymongo.MongoClient(self.mockup.uri).test + self.conduction = pymongo.MongoClient(self.mockup.uri).conduction + + def test_root_uri(self): + reply = self.conduction.command('get', '/') + self.assertIn('links', reply) + self.assertIn('service', reply) def test_bad_command_name(self): - with self.assertRaises(OperationFailure): + with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') + + self.assertIn('unrecognized: {"foo": 1}', + str(context.exception)) + + def test_server_id_404(self): + with self.assertRaises(OperationFailure) as context: + self.conduction.command({'post': '/v1/servers/'}) + + self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
Test root URI and 404s.
## Code Before: """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) self.conduction = pymongo.MongoClient(self.mockup.uri).test def test_bad_command_name(self): with self.assertRaises(OperationFailure): self.conduction.command('foo') if __name__ == '__main__': unittest.main() ## Instruction: Test root URI and 404s. ## Code After: """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
b8350e91d7bd1e3a775ed230820c96a180a2ad02
tests/test_solver.py
tests/test_solver.py
from tinyik import Link, Joint, FKSolver from .utils import x, y, z, theta, approx_eq def test_forward_kinematics(): fk = FKSolver([ Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.]) ]) assert all(fk.solve([0., 0.]) == [2., 0., 0.]) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z])
from tinyik import Link, Joint, FKSolver, CCDFKSolver, CCDIKSolver from .utils import x, y, z, theta, approx_eq components = [Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])] predicted = [2., 0., 0.] def test_fk(): fk = FKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_fk(): fk = CCDFKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_ik(): fk = CCDFKSolver(components) ik = CCDIKSolver(fk) assert approx_eq(ik.solve([0., 0.], [x, y, -z]), [theta, theta]) assert approx_eq(ik.solve([0., 0.], [x, -y, z]), [-theta, -theta])
Add tests for CCD IK solver
Add tests for CCD IK solver
Python
mit
lanius/tinyik
- from tinyik import Link, Joint, FKSolver + from tinyik import Link, Joint, FKSolver, CCDFKSolver, CCDIKSolver from .utils import x, y, z, theta, approx_eq - def test_forward_kinematics(): - fk = FKSolver([ - Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.]) + components = [Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])] - ]) + predicted = [2., 0., 0.] + + + def test_fk(): + fk = FKSolver(components) - assert all(fk.solve([0., 0.]) == [2., 0., 0.]) + assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) + + def test_ccd_fk(): + fk = CCDFKSolver(components) + assert all(fk.solve([0., 0.]) == predicted) + + assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) + assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) + + + def test_ccd_ik(): + fk = CCDFKSolver(components) + ik = CCDIKSolver(fk) + assert approx_eq(ik.solve([0., 0.], [x, y, -z]), [theta, theta]) + assert approx_eq(ik.solve([0., 0.], [x, -y, z]), [-theta, -theta]) +
Add tests for CCD IK solver
## Code Before: from tinyik import Link, Joint, FKSolver from .utils import x, y, z, theta, approx_eq def test_forward_kinematics(): fk = FKSolver([ Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.]) ]) assert all(fk.solve([0., 0.]) == [2., 0., 0.]) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) ## Instruction: Add tests for CCD IK solver ## Code After: from tinyik import Link, Joint, FKSolver, CCDFKSolver, CCDIKSolver from .utils import x, y, z, theta, approx_eq components = [Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])] predicted = [2., 0., 0.] def test_fk(): fk = FKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_fk(): fk = CCDFKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_ik(): fk = CCDFKSolver(components) ik = CCDIKSolver(fk) assert approx_eq(ik.solve([0., 0.], [x, y, -z]), [theta, theta]) assert approx_eq(ik.solve([0., 0.], [x, -y, z]), [-theta, -theta])
35594a4f8c549d507c7d7030141ae511aed57c09
workflowmax/__init__.py
workflowmax/__init__.py
from .api import WorkflowMax # noqa __version__ = "0.1.0"
from .api import WorkflowMax # noqa from .credentials import Credentials # noqa __version__ = "0.1.0"
Add Credentials to root namespace
Add Credentials to root namespace
Python
bsd-3-clause
ABASystems/pyworkflowmax
from .api import WorkflowMax # noqa + from .credentials import Credentials # noqa __version__ = "0.1.0"
Add Credentials to root namespace
## Code Before: from .api import WorkflowMax # noqa __version__ = "0.1.0" ## Instruction: Add Credentials to root namespace ## Code After: from .api import WorkflowMax # noqa from .credentials import Credentials # noqa __version__ = "0.1.0"
ab5aac0c9b0e075901c4cd8dd5d134e79f0e0110
brasileirao/spiders/results_spider.py
brasileirao/spiders/results_spider.py
import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first().encode('utf8') item['away_team'] = away_team.css("abbr::attr(title)").extract_first().encode('utf8') item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first() item['away_team'] = away_team.css("abbr::attr(title)").extract_first() item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
Set utf-8 as default encoding.
Set utf-8 as default encoding.
Python
mit
pghilardi/live-football-client
+ import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round - item['home_team'] = home_team.css("abbr::attr(title)").extract_first().encode('utf8') + item['home_team'] = home_team.css("abbr::attr(title)").extract_first() - item['away_team'] = away_team.css("abbr::attr(title)").extract_first().encode('utf8') + item['away_team'] = away_team.css("abbr::attr(title)").extract_first() item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
Set utf-8 as default encoding.
## Code Before: import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first().encode('utf8') item['away_team'] = away_team.css("abbr::attr(title)").extract_first().encode('utf8') item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item ## Instruction: Set utf-8 as default encoding. ## Code After: import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first() item['away_team'] = away_team.css("abbr::attr(title)").extract_first() item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
a3c1822dd2942de4b6bf5cac14039e6789babf85
wafer/pages/admin.py
wafer/pages/admin.py
from django.contrib import admin from wafer.pages.models import File, Page class PageAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
from django.contrib import admin from wafer.pages.models import File, Page from reversion.admin import VersionAdmin class PageAdmin(VersionAdmin, admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
Add reversion support to Pages
Add reversion support to Pages
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
from django.contrib import admin from wafer.pages.models import File, Page + from reversion.admin import VersionAdmin + - class PageAdmin(admin.ModelAdmin): + class PageAdmin(VersionAdmin, admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
Add reversion support to Pages
## Code Before: from django.contrib import admin from wafer.pages.models import File, Page class PageAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File) ## Instruction: Add reversion support to Pages ## Code After: from django.contrib import admin from wafer.pages.models import File, Page from reversion.admin import VersionAdmin class PageAdmin(VersionAdmin, admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
4076fb322814848d802d1f925d163e90b3d629a9
selenium_testcase/testcases/forms.py
selenium_testcase/testcases/forms.py
from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form/*',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def set_input(self, field, value, **kwargs): input = self.find_element( self.input_search_list, field, **kwargs) input.clear() input.send_keys(value) return input
from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def get_input(self, field, **kwargs): """ Return matching input field. """ return self.find_element( self.input_search_list, field, **kwargs) def set_input(self, field, value, **kwargs): """ Clear the field and enter value. """ element = self.get_input(field, **kwargs) element.clear() element.send_keys(value) return element
Split get_input from set_input in FormTestMixin.
Split get_input from set_input in FormTestMixin. In order to reduce side-effects, this commit moves the @wait_for to a get_input method and set_input operates immediately.
Python
bsd-3-clause
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), - (By.XPATH, '//form/*',), + (By.XPATH, '//form',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for + def get_input(self, field, **kwargs): + """ Return matching input field. """ + return self.find_element( + self.input_search_list, field, **kwargs) + def set_input(self, field, value, **kwargs): - input = self.find_element( - self.input_search_list, field, **kwargs) + """ Clear the field and enter value. """ + element = self.get_input(field, **kwargs) - input.clear() + element.clear() - input.send_keys(value) + element.send_keys(value) - return input + return element
Split get_input from set_input in FormTestMixin.
## Code Before: from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form/*',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def set_input(self, field, value, **kwargs): input = self.find_element( self.input_search_list, field, **kwargs) input.clear() input.send_keys(value) return input ## Instruction: Split get_input from set_input in FormTestMixin. ## Code After: from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def get_input(self, field, **kwargs): """ Return matching input field. """ return self.find_element( self.input_search_list, field, **kwargs) def set_input(self, field, value, **kwargs): """ Clear the field and enter value. """ element = self.get_input(field, **kwargs) element.clear() element.send_keys(value) return element
149a8091333766068cac445db770ea73055d8647
simuvex/procedures/stubs/UserHook.py
simuvex/procedures/stubs/UserHook.py
import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None, length=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
Make the userhook take the length arg b/c why not
Make the userhook take the length arg b/c why not
Python
bsd-2-clause
axt/angr,schieb/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/simuvex,chubbymaggie/angr,chubbymaggie/simuvex,f-prettyland/angr,axt/angr,angr/angr,f-prettyland/angr,tyb0807/angr,schieb/angr,axt/angr,chubbymaggie/angr,f-prettyland/angr,iamahuman/angr,tyb0807/angr,iamahuman/angr,angr/angr,iamahuman/angr,angr/angr,schieb/angr,chubbymaggie/simuvex,angr/simuvex
import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ - def run(self, user_func=None, user_kwargs=None, default_return_addr=None): + def run(self, user_func=None, user_kwargs=None, default_return_addr=None, length=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
Make the userhook take the length arg b/c why not
## Code Before: import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind) ## Instruction: Make the userhook take the length arg b/c why not ## Code After: import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None, length=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
8528beef5d10355af07f641b4987df3cd64a7b0f
sprockets/mixins/metrics/__init__.py
sprockets/mixins/metrics/__init__.py
from .influxdb import InfluxDBMixin from .statsd import StatsdMixin version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
try: from .influxdb import InfluxDBMixin from .statsd import StatsdMixin except ImportError as error: def InfluxDBMixin(*args, **kwargs): raise error def StatsdMixin(*args, **kwargs): raise error version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
Make it safe to import __version__.
Make it safe to import __version__.
Python
bsd-3-clause
sprockets/sprockets.mixins.metrics
+ try: - from .influxdb import InfluxDBMixin + from .influxdb import InfluxDBMixin - from .statsd import StatsdMixin + from .statsd import StatsdMixin + except ImportError as error: + def InfluxDBMixin(*args, **kwargs): + raise error + + def StatsdMixin(*args, **kwargs): + raise error version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
Make it safe to import __version__.
## Code Before: from .influxdb import InfluxDBMixin from .statsd import StatsdMixin version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin'] ## Instruction: Make it safe to import __version__. ## Code After: try: from .influxdb import InfluxDBMixin from .statsd import StatsdMixin except ImportError as error: def InfluxDBMixin(*args, **kwargs): raise error def StatsdMixin(*args, **kwargs): raise error version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
7b1d520278b8fe33b68103d26f9aa7bb945f6791
cryptography/hazmat/backends/__init__.py
cryptography/hazmat/backends/__init__.py
from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend
from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
Make the default backend be a multi-backend
Make the default backend be a multi-backend
Python
bsd-3-clause
bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,Lukasa/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,sholsapp/cryptography,Hasimir/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,Hasimir/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,kimvais/cryptography,sholsapp/cryptography,bwhmather/cryptography
from cryptography.hazmat.backends import openssl + from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) + _default_backend = MultiBackend(_ALL_BACKENDS) + def default_backend(): - return openssl.backend + return _default_backend
Make the default backend be a multi-backend
## Code Before: from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend ## Instruction: Make the default backend be a multi-backend ## Code After: from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
5957999c52f939691cbe6b8dd5aa929980a24501
tests/unit/test_start.py
tests/unit/test_start.py
import pytest from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
Remove the unused pytest import
Remove the unused pytest import
Python
mit
kiwicom/iwant-bot
- import pytest - - from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
Remove the unused pytest import
## Code Before: import pytest from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2 ## Instruction: Remove the unused pytest import ## Code After: from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
f5d4da9fa71dbb59a9459e376fde8840037bf39a
account_banking_sepa_credit_transfer/__init__.py
account_banking_sepa_credit_transfer/__init__.py
from . import wizard from . import models
from . import wizard
Remove import models from init in sepa_credit_transfer
Remove import models from init in sepa_credit_transfer
Python
agpl-3.0
open-synergy/bank-payment,sergio-incaser/bank-payment,hbrunn/bank-payment,sergio-teruel/bank-payment,ndtran/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,sergiocorato/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,CompassionCH/bank-payment,incaser/bank-payment,Antiun/bank-payment,sergio-teruel/bank-payment,damdam-s/bank-payment,syci/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,Antiun/bank-payment,sergiocorato/bank-payment,ndtran/bank-payment,acsone/bank-payment,syci/bank-payment,diagramsoftware/bank-payment
from . import wizard - from . import models
Remove import models from init in sepa_credit_transfer
## Code Before: from . import wizard from . import models ## Instruction: Remove import models from init in sepa_credit_transfer ## Code After: from . import wizard
39dbbac659e9ae9c1bbad8a979cc99ef6eafaeff
models.py
models.py
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodMenu('%s')>" % (self.result) class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodServices('%s')>" % (self.result)
Include class name in model representations
Include class name in model representations
Python
mit
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): - return self.result + return "<FoodMenu('%s')>" % (self.result) class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): - return self.result + return "<FoodServices('%s')>" % (self.result)
Include class name in model representations
## Code Before: import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result ## Instruction: Include class name in model representations ## Code After: import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodMenu('%s')>" % (self.result) class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodServices('%s')>" % (self.result)
e16c65ec8c774cc27f9f7aa43e88521c3854b6b7
ella/imports/management/commands/fetchimports.py
ella/imports/management/commands/fetchimports.py
from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all fetch_all()
from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors)
Return exit code (count of errors)
Return exit code (count of errors) git-svn-id: 6ce22b13eace8fe533dbb322c2bb0986ea4cd3e6@520 2d143e24-0a30-0410-89d7-a2e95868dc81
Python
bsd-3-clause
MichalMaM/ella,MichalMaM/ella,WhiskeyMedia/ella,whalerock/ella,ella/ella,whalerock/ella,WhiskeyMedia/ella,petrlosa/ella,petrlosa/ella,whalerock/ella
- from django.core.management.base import BaseCommand + from django.core.management.base import NoArgsCommand from optparse import make_option + import sys - class Command(BaseCommand): + class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all - fetch_all() + errors = fetch_all() + if errors: + sys.exit(errors)
Return exit code (count of errors)
## Code Before: from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all fetch_all() ## Instruction: Return exit code (count of errors) ## Code After: from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors)
45c400e02fbeb5b455e27fef81e47e45f274eaec
core/forms.py
core/forms.py
from django import forms class GameForm(forms.Form): amount = forms.IntegerField() def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
from django import forms class GameForm(forms.Form): amount = forms.IntegerField(initial=100) def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
Add a default bet amount.
Add a default bet amount.
Python
bsd-2-clause
stephenmcd/gamblor,stephenmcd/gamblor
from django import forms class GameForm(forms.Form): - amount = forms.IntegerField() + amount = forms.IntegerField(initial=100) def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
Add a default bet amount.
## Code Before: from django import forms class GameForm(forms.Form): amount = forms.IntegerField() def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = "" ## Instruction: Add a default bet amount. ## Code After: from django import forms class GameForm(forms.Form): amount = forms.IntegerField(initial=100) def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
fcc571d2f4c35ac8f0e94e51e6ac94a0c051062d
src/rinoh/__init__.py
src/rinoh/__init__.py
import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'color', 'dimension', 'document', 'draw', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'text'] __all__ = CORE_MODULES + ['font', 'frontend', 'backend', 'styleds', 'styles'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'attribute', 'color', 'dimension', 'document', 'draw', 'element', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'template', 'text'] __all__ = CORE_MODULES + ['font', 'fonts', 'frontend', 'backend', 'resource', 'styleds', 'styles', 'stylesheets', 'templates', 'strings', 'language'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
Update the top-level rinoh package
Update the top-level rinoh package Make all symbols and modules relevant to users available directly from the rinoh package.
Python
agpl-3.0
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) - CORE_MODULES = ['annotation', 'color', 'dimension', 'document', 'draw', 'float', + CORE_MODULES = ['annotation', 'attribute', 'color', 'dimension', 'document', - 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', - 'paper', 'paragraph', 'reference', 'structure', 'style', - 'table', 'text'] + 'draw', 'element', 'float', 'flowable', 'highlight', 'index', + 'inline', 'layout', 'number', 'paper', 'paragraph', + 'reference', 'structure', 'style', 'table', 'template', 'text'] - __all__ = CORE_MODULES + ['font', 'frontend', 'backend', 'styleds', 'styles'] + __all__ = CORE_MODULES + ['font', 'fonts', 'frontend', 'backend', 'resource', + 'styleds', 'styles', 'stylesheets', 'templates', + 'strings', 'language'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
Update the top-level rinoh package
## Code Before: import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'color', 'dimension', 'document', 'draw', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'text'] __all__ = CORE_MODULES + ['font', 'frontend', 'backend', 'styleds', 'styles'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all ## Instruction: Update the top-level rinoh package ## Code After: import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'attribute', 'color', 'dimension', 'document', 'draw', 'element', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'template', 'text'] __all__ = CORE_MODULES + ['font', 'fonts', 'frontend', 'backend', 'resource', 'styleds', 'styles', 'stylesheets', 'templates', 'strings', 'language'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
f9293d838a21f495ea9b56cbe0f6f75533360aed
pyinfra/api/config.py
pyinfra/api/config.py
import six from pyinfra import logger class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Replace TIMEOUT -> CONNECT_TIMEOUT if 'TIMEOUT' in kwargs: logger.warning(( 'Config.TIMEOUT is deprecated, ' 'please use Config.CONNECT_TIMEOUT instead' )) kwargs['CONNECT_TIMEOUT'] = kwargs.pop('TIMEOUT') # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
import six class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
Remove support for deprecated `Config.TIMEOUT`.
Remove support for deprecated `Config.TIMEOUT`.
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
import six - - from pyinfra import logger class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env - # Replace TIMEOUT -> CONNECT_TIMEOUT - if 'TIMEOUT' in kwargs: - logger.warning(( - 'Config.TIMEOUT is deprecated, ' - 'please use Config.CONNECT_TIMEOUT instead' - )) - kwargs['CONNECT_TIMEOUT'] = kwargs.pop('TIMEOUT') - # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
Remove support for deprecated `Config.TIMEOUT`.
## Code Before: import six from pyinfra import logger class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Replace TIMEOUT -> CONNECT_TIMEOUT if 'TIMEOUT' in kwargs: logger.warning(( 'Config.TIMEOUT is deprecated, ' 'please use Config.CONNECT_TIMEOUT instead' )) kwargs['CONNECT_TIMEOUT'] = kwargs.pop('TIMEOUT') # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value) ## Instruction: Remove support for deprecated `Config.TIMEOUT`. ## Code After: import six class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
22f3d6d6fdc3e5f07ead782828b406c9a27d0199
UDPSender.py
UDPSender.py
from can import Listener import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
from can import Listener from socket import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
Change of import of libraries.
Change of import of libraries. Tried to fix issue displayed below. [root@alarm BeagleDash]# python3.3 CANtoUDP.py Traceback (most recent call last): File "CANtoUDP.py", line 10, in <module> listeners = [csv, UDPSender()] TypeError: 'module' object is not callable Exception AttributeError: "'super' object has no attribute '__del__'" in <bound method CSVWriter.__del__ of <can.CAN.CSVWriter object at 0xb6867730>> ignored
Python
mit
TAURacing/BeagleDash
from can import Listener - import socket + from socket import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
Change of import of libraries.
## Code Before: from can import Listener import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close() ## Instruction: Change of import of libraries. ## Code After: from can import Listener from socket import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
1da2c0e00d43c4fb9a7039e98401d333d387a057
saleor/search/views.py
saleor/search/views.py
from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = form.no_query_found() query = form.cleaned_data['q'] ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = [] query = form.cleaned_data.get('q', '') ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
Fix empty search results logic
Fix empty search results logic
Python
bsd-3-clause
mociepka/saleor,jreigel/saleor,itbabu/saleor,maferelo/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,tfroehlich82/saleor,jreigel/saleor,KenMutemi/saleor,itbabu/saleor,car3oon/saleor,maferelo/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,car3oon/saleor,UITools/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,tfroehlich82/saleor,tfroehlich82/saleor,jreigel/saleor,UITools/saleor
from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: - page = form.no_query_found() + page = [] - query = form.cleaned_data['q'] + query = form.cleaned_data.get('q', '') ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
Fix empty search results logic
## Code Before: from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = form.no_query_found() query = form.cleaned_data['q'] ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx) ## Instruction: Fix empty search results logic ## Code After: from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = [] query = form.cleaned_data.get('q', '') ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
6c9b0b0c7e78524ea889f8a89c2eba8acb57f782
gaphor/ui/iconname.py
gaphor/ui/iconname.py
from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if isinstance(element, UML.Stereotype): return "gaphor-stereotype" elif element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
Fix stereotype icon in namespace view
Fix stereotype icon in namespace view
Python
lgpl-2.1
amolenaar/gaphor,amolenaar/gaphor
from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): + if isinstance(element, UML.Stereotype): + return "gaphor-stereotype" - if element.extension: + elif element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
Fix stereotype icon in namespace view
## Code Before: from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property" ## Instruction: Fix stereotype icon in namespace view ## Code After: from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if isinstance(element, UML.Stereotype): return "gaphor-stereotype" elif element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
bb34b21ebd2378f944498708ac4f13d16aa61aa1
src/mist/io/tests/api/features/steps/backends.py
src/mist/io/tests/api/features/steps/backends.py
from behave import * @given(u'"{text}" backend added') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
from behave import * @given(u'"{text}" backend added through api') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
Rename Behave steps for api tests
Rename Behave steps for api tests
Python
agpl-3.0
johnnyWalnut/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,munkiat/mist.io,kelonye/mist.io,kelonye/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,johnnyWalnut/mist.io,zBMNForks/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,munkiat/mist.io,zBMNForks/mist.io,munkiat/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,kelonye/mist.io
from behave import * - @given(u'"{text}" backend added') + @given(u'"{text}" backend added through api') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
Rename Behave steps for api tests
## Code Before: from behave import * @given(u'"{text}" backend added') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends() ## Instruction: Rename Behave steps for api tests ## Code After: from behave import * @given(u'"{text}" backend added through api') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
6f42f03f950e4c3967eb1efd7feb9364c9fbaf1f
google.py
google.py
import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/plus/v1/people/me") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/oauth2/v1/userinfo") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
Use userinfo URI for user profile info
Use userinfo URI for user profile info
Python
mit
singingwolfboy/flask-dance-google
import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) - resp = google.get("/plus/v1/people/me") + resp = google.get("/oauth2/v1/userinfo") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
Use userinfo URI for user profile info
## Code Before: import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/plus/v1/people/me") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run() ## Instruction: Use userinfo URI for user profile info ## Code After: import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/oauth2/v1/userinfo") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
ad73789f74106a2d6014a2f737578494d2d21fbf
virtool/api/processes.py
virtool/api/processes.py
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/software_update") async def get_software_update(req): db = req.app["db"] document = await db.processes.find_one({"type": "software_update"}) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/hmm_install") async def get_hmm_install(req): db = req.app["db"] document = await db.processes.find_one({"type": "hmm_install"}) return json_response(virtool.utils.base_processor(document))
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document))
Remove specific process API GET endpoints
Remove specific process API GET endpoints
Python
mit
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document)) - - @routes.get("/api/processes/software_update") - async def get_software_update(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "software_update"}) - - return json_response(virtool.utils.base_processor(document)) - - - @routes.get("/api/processes/hmm_install") - async def get_hmm_install(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "hmm_install"}) - - return json_response(virtool.utils.base_processor(document)) -
Remove specific process API GET endpoints
## Code Before: import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/software_update") async def get_software_update(req): db = req.app["db"] document = await db.processes.find_one({"type": "software_update"}) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/hmm_install") async def get_hmm_install(req): db = req.app["db"] document = await db.processes.find_one({"type": "hmm_install"}) return json_response(virtool.utils.base_processor(document)) ## Instruction: Remove specific process API GET endpoints ## Code After: import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document))
d7f3ea41bc3d252d786a339fc34337f01e1cc3eb
django_dbq/migrations/0001_initial.py
django_dbq/migrations/0001_initial.py
from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid try: from django.db.models import UUIDField except ImportError: from django_dbq.fields import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid from django.db.models import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
Remove reference to old UUIDfield in django migration
Remove reference to old UUIDfield in django migration
Python
bsd-2-clause
dabapps/django-db-queue
from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid - try: - from django.db.models import UUIDField + from django.db.models import UUIDField - except ImportError: - from django_dbq.fields import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
Remove reference to old UUIDfield in django migration
## Code Before: from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid try: from django.db.models import UUIDField except ImportError: from django_dbq.fields import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ] ## Instruction: Remove reference to old UUIDfield in django migration ## Code After: from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid from django.db.models import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
a5130e32bffa1dbc4d83f349fc3653b690154d71
vumi/workers/vas2nets/workers.py
vumi/workers/vas2nets/workers.py
from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], 'transport_keyword': data['transport_keyword'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
Add keyword to echo worker.
Add keyword to echo worker.
Python
bsd-3-clause
TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi
from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], + 'transport_keyword': data['transport_keyword'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
Add keyword to echo worker.
## Code Before: from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass ## Instruction: Add keyword to echo worker. ## Code After: from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], 'transport_keyword': data['transport_keyword'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
bbf22dc68202d81a8c7e94fbb8e61d819d808115
wisely_project/pledges/models.py
wisely_project/pledges/models.py
from django.utils import timezone from django.db import models from users.models import Course, BaseModel, User class Pledge(BaseModel): user = models.ForeignKey(User) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
from django.utils import timezone from django.db import models from users.models import Course, BaseModel, UserProfile class Pledge(BaseModel): user = models.ForeignKey(UserProfile) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
Make pledge foreignkey to userprofile
Make pledge foreignkey to userprofile
Python
mit
TejasM/wisely,TejasM/wisely,TejasM/wisely
from django.utils import timezone from django.db import models - from users.models import Course, BaseModel, User + from users.models import Course, BaseModel, UserProfile class Pledge(BaseModel): - user = models.ForeignKey(User) + user = models.ForeignKey(UserProfile) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
Make pledge foreignkey to userprofile
## Code Before: from django.utils import timezone from django.db import models from users.models import Course, BaseModel, User class Pledge(BaseModel): user = models.ForeignKey(User) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True) ## Instruction: Make pledge foreignkey to userprofile ## Code After: from django.utils import timezone from django.db import models from users.models import Course, BaseModel, UserProfile class Pledge(BaseModel): user = models.ForeignKey(UserProfile) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
8eca7b30865e4d02fd440f55ad3215dee6fab8a1
gee_asset_manager/batch_remover.py
gee_asset_manager/batch_remover.py
import fnmatch import logging import sys import ee def delete(asset_path): root = asset_path[:asset_path.rfind('/')] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
import fnmatch import logging import sys import ee def delete(asset_path): root_idx = asset_path.rfind('/') if root_idx == -1: logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') sys.exit(1) root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
Add warning when removing an asset without full path
Add warning when removing an asset without full path
Python
apache-2.0
tracek/gee_asset_manager
import fnmatch import logging import sys import ee def delete(asset_path): - root = asset_path[:asset_path.rfind('/')] + root_idx = asset_path.rfind('/') + if root_idx == -1: + logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') + sys.exit(1) + root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
Add warning when removing an asset without full path
## Code Before: import fnmatch import logging import sys import ee def delete(asset_path): root = asset_path[:asset_path.rfind('/')] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path) ## Instruction: Add warning when removing an asset without full path ## Code After: import fnmatch import logging import sys import ee def delete(asset_path): root_idx = asset_path.rfind('/') if root_idx == -1: logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') sys.exit(1) root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
18a874f312a57b4b9b7a5ce5cf9857585f0f0fef
truffe2/app/utils.py
truffe2/app/utils.py
def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: current_unit = Unit.objects.get(pk=1) return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
from django.conf import settings def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: try: current_unit = Unit.objects.get(pk=settings.ROOT_UNIT_PK) except: current_unit = None return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
Fix error if no units
Fix error if no units
Python
bsd-2-clause
agepoly/truffe2,ArcaniteSolutions/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2
+ from django.conf import settings def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: + try: - current_unit = Unit.objects.get(pk=1) + current_unit = Unit.objects.get(pk=settings.ROOT_UNIT_PK) + except: + current_unit = None return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
Fix error if no units
## Code Before: def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: current_unit = Unit.objects.get(pk=1) return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk ## Instruction: Fix error if no units ## Code After: from django.conf import settings def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: try: current_unit = Unit.objects.get(pk=settings.ROOT_UNIT_PK) except: current_unit = None return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
4c1bf1757baa5beec50377724961c528f5985864
ptest/screencapturer.py
ptest/screencapturer.py
import threading import traceback import plogger __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break try: screen_shot = active_browser.get_screenshot_as_png() except Exception as e: plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc())) return current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot else: pass # todo: take screen shot for desktop
import threading import traceback import StringIO import plogger try: from PIL import ImageGrab except ImportError: PIL_installed = False else: PIL_installed = True try: import wx except ImportError: wxpython_installed = False else: wxpython_installed = True __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break def capture_screen(): return active_browser.get_screenshot_as_png() elif PIL_installed: def capture_screen(): output = StringIO.StringIO() ImageGrab.grab().save(output, format="png") return output.getvalue() elif wxpython_installed: def capture_screen(): app = wx.App(False) screen = wx.ScreenDC() width, height = screen.GetSize() bmp = wx.EmptyBitmap(width, height) mem = wx.MemoryDC(bmp) mem.Blit(0, 0, width, height, screen, 0, 0) output = StringIO.StringIO() bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG) return output.getvalue() else: return try: current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen() except Exception as e: plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
Support capture screenshot for no-selenium test
Support capture screenshot for no-selenium test
Python
apache-2.0
KarlGong/ptest,KarlGong/ptest
import threading import traceback + import StringIO import plogger + + try: + from PIL import ImageGrab + except ImportError: + PIL_installed = False + else: + PIL_installed = True + + try: + import wx + except ImportError: + wxpython_installed = False + else: + wxpython_installed = True __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break - try: + def capture_screen(): - screen_shot = active_browser.get_screenshot_as_png() + return active_browser.get_screenshot_as_png() - except Exception as e: - plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc())) + elif PIL_installed: + def capture_screen(): + output = StringIO.StringIO() + ImageGrab.grab().save(output, format="png") + return output.getvalue() + elif wxpython_installed: + def capture_screen(): + app = wx.App(False) + screen = wx.ScreenDC() + width, height = screen.GetSize() + bmp = wx.EmptyBitmap(width, height) + mem = wx.MemoryDC(bmp) + mem.Blit(0, 0, width, height, screen, 0, 0) + output = StringIO.StringIO() + bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG) + return output.getvalue() + else: - return + return + try: - current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot + current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen() + except Exception as e: + plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc())) - - else: - pass # todo: take screen shot for desktop
Support capture screenshot for no-selenium test
## Code Before: import threading import traceback import plogger __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break try: screen_shot = active_browser.get_screenshot_as_png() except Exception as e: plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc())) return current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot else: pass # todo: take screen shot for desktop ## Instruction: Support capture screenshot for no-selenium test ## Code After: import threading import traceback import StringIO import plogger try: from PIL import ImageGrab except ImportError: PIL_installed = False else: PIL_installed = True try: import wx except ImportError: wxpython_installed = False else: wxpython_installed = True __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break def capture_screen(): return active_browser.get_screenshot_as_png() elif PIL_installed: def capture_screen(): output = StringIO.StringIO() ImageGrab.grab().save(output, format="png") return output.getvalue() elif wxpython_installed: def capture_screen(): app = wx.App(False) screen = wx.ScreenDC() width, height = screen.GetSize() bmp = wx.EmptyBitmap(width, height) mem = wx.MemoryDC(bmp) mem.Blit(0, 0, width, height, screen, 0, 0) output = StringIO.StringIO() bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG) return output.getvalue() else: return try: current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen() except Exception as e: plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
c82f0f10ea8b96377ebed8a6859ff3cd8ed4cd3f
python/turbodbc/exceptions.py
python/turbodbc/exceptions.py
from __future__ import absolute_import from functools import wraps from exceptions import StandardError from turbodbc_intern import Error as InternError class Error(StandardError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
from __future__ import absolute_import from functools import wraps from turbodbc_intern import Error as InternError # Python 2/3 compatibility try: from exceptions import StandardError as _BaseError except ImportError: _BaseError = Exception class Error(_BaseError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
Fix Python 2/3 exception base class compatibility
Fix Python 2/3 exception base class compatibility
Python
mit
blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc
from __future__ import absolute_import from functools import wraps - from exceptions import StandardError from turbodbc_intern import Error as InternError + # Python 2/3 compatibility + try: + from exceptions import StandardError as _BaseError + except ImportError: + _BaseError = Exception + + - class Error(StandardError): + class Error(_BaseError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
Fix Python 2/3 exception base class compatibility
## Code Before: from __future__ import absolute_import from functools import wraps from exceptions import StandardError from turbodbc_intern import Error as InternError class Error(StandardError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper ## Instruction: Fix Python 2/3 exception base class compatibility ## Code After: from __future__ import absolute_import from functools import wraps from turbodbc_intern import Error as InternError # Python 2/3 compatibility try: from exceptions import StandardError as _BaseError except ImportError: _BaseError = Exception class Error(_BaseError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
8abdce9c60c9d2ead839e0065d35128ec16a82a1
chatterbot/__main__.py
chatterbot/__main__.py
import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import nltk.data print('\n'.join(nltk.data.path))
import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import os import nltk.data data_directories = [] # Find each data directory in the NLTK path that has content for path in nltk.data.path: if os.path.exists(path): if os.listdir(path): data_directories.append(path) print(os.linesep.join(data_directories))
Add commad line utility to find NLTK data
Add commad line utility to find NLTK data
Python
bsd-3-clause
gunthercox/ChatterBot,vkosuri/ChatterBot
import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: + import os import nltk.data - print('\n'.join(nltk.data.path)) + data_directories = [] + # Find each data directory in the NLTK path that has content + for path in nltk.data.path: + if os.path.exists(path): + if os.listdir(path): + data_directories.append(path) + + print(os.linesep.join(data_directories)) +
Add commad line utility to find NLTK data
## Code Before: import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import nltk.data print('\n'.join(nltk.data.path)) ## Instruction: Add commad line utility to find NLTK data ## Code After: import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import os import nltk.data data_directories = [] # Find each data directory in the NLTK path that has content for path in nltk.data.path: if os.path.exists(path): if os.listdir(path): data_directories.append(path) print(os.linesep.join(data_directories))
210c7b7fb421a7c083b9d292370b15c0ece17fa7
source/bark/__init__.py
source/bark/__init__.py
from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handle = Distribute()
from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handler = Distribute() handlers = handler.handlers handle = handler.handle
Correct handler reference variable name and add convenient accessors.
Correct handler reference variable name and add convenient accessors.
Python
apache-2.0
4degrees/mill,4degrees/sawmill
from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. - handle = Distribute() + handler = Distribute() + handlers = handler.handlers + handle = handler.handle +
Correct handler reference variable name and add convenient accessors.
## Code Before: from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handle = Distribute() ## Instruction: Correct handler reference variable name and add convenient accessors. ## Code After: from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handler = Distribute() handlers = handler.handlers handle = handler.handle
1c78dfa0e0d1905910476b4052e42de287a70b74
runtests.py
runtests.py
import os import sys import string def main(): """ Executes the tests. Requires the CherryPy live server to be installed. """ command = "python manage.py test" options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver" apps = [] if len(sys.argv) > 1: apps = sys.argv[1:] os.system(command + " " + string.join(apps, " ") + " " + options) if __name__ == "__main__": main()
import os import sys import string def main(): """ Executes the tests. Requires the CherryPy live server to be installed. """ command = "python manage.py test" options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver --noinput" apps = [] if len(sys.argv) > 1: apps = sys.argv[1:] os.system(command + " " + string.join(apps, " ") + " " + options) if __name__ == "__main__": main()
Update to the run tests script to force database deletion if the test database exists.
Update to the run tests script to force database deletion if the test database exists.
Python
mit
jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,csdl/makahiki,yongwen/makahiki,yongwen/makahiki,jtakayama/makahiki-draft,yongwen/makahiki,justinslee/Wai-Not-Makahiki,csdl/makahiki,jtakayama/makahiki-draft,yongwen/makahiki,csdl/makahiki,csdl/makahiki,jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,jtakayama/ics691-setupbooster
import os import sys import string def main(): """ Executes the tests. Requires the CherryPy live server to be installed. """ command = "python manage.py test" - options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver" + options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver --noinput" apps = [] if len(sys.argv) > 1: apps = sys.argv[1:] os.system(command + " " + string.join(apps, " ") + " " + options) if __name__ == "__main__": main()
Update to the run tests script to force database deletion if the test database exists.
## Code Before: import os import sys import string def main(): """ Executes the tests. Requires the CherryPy live server to be installed. """ command = "python manage.py test" options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver" apps = [] if len(sys.argv) > 1: apps = sys.argv[1:] os.system(command + " " + string.join(apps, " ") + " " + options) if __name__ == "__main__": main() ## Instruction: Update to the run tests script to force database deletion if the test database exists. ## Code After: import os import sys import string def main(): """ Executes the tests. Requires the CherryPy live server to be installed. """ command = "python manage.py test" options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver --noinput" apps = [] if len(sys.argv) > 1: apps = sys.argv[1:] os.system(command + " " + string.join(apps, " ") + " " + options) if __name__ == "__main__": main()
78b2978c3e0e56c4c75a3a6b532e02c995ca69ed
openedx/core/djangoapps/user_api/permissions/views.py
openedx/core/djangoapps/user_api/permissions/views.py
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework import permissions from django.db import transaction from django.utils.translation import ugettext as _ from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from openedx.core.lib.api.permissions import IsUserInUrlOrStaff from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
Remove unused import and redundant comment
Remove unused import and redundant comment
Python
agpl-3.0
mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status - from rest_framework import permissions - - from django.db import transaction - from django.utils.translation import ugettext as _ from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser - from openedx.core.lib.api.permissions import IsUserInUrlOrStaff from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
Remove unused import and redundant comment
## Code Before: from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework import permissions from django.db import transaction from django.utils.translation import ugettext as _ from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from openedx.core.lib.api.permissions import IsUserInUrlOrStaff from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff) ## Instruction: Remove unused import and redundant comment ## Code After: from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
cadee051a462de765bab59ac42d6b372fa49c033
examples/logfile.py
examples/logfile.py
from __future__ import unicode_literals, print_function from twisted.internet.task import react from eliot.logwriter import ThreadedFileWriter from eliot import Message, Logger, addDestination _logger = Logger() def main(reactor): print("Logging to example-eliot.log...") logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor) addDestination(logWriter) # Manually start the service. Normally we'd register ThreadedFileWriter # with the usual Twisted Service/Application infrastructure. logWriter.startService() # Log a message: Message.new(value="hello", another=1).write(_logger) # Manually stop the service. done = logWriter.stopService() return done if __name__ == '__main__': react(main, [])
from __future__ import unicode_literals, print_function from twisted.internet.task import react from eliot.logwriter import ThreadedFileWriter from eliot import Message, Logger _logger = Logger() def main(reactor): print("Logging to example-eliot.log...") logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor) # Manually start the service, which will add it as a # destination. Normally we'd register ThreadedFileWriter with the usual # Twisted Service/Application infrastructure. logWriter.startService() # Log a message: Message.new(value="hello", another=1).write(_logger) # Manually stop the service. done = logWriter.stopService() return done if __name__ == '__main__': react(main, [])
Fix bug where the service was added as a destination one time too many.
Fix bug where the service was added as a destination one time too many.
Python
apache-2.0
iffy/eliot,ClusterHQ/eliot,ScatterHQ/eliot,ScatterHQ/eliot,ScatterHQ/eliot
from __future__ import unicode_literals, print_function from twisted.internet.task import react from eliot.logwriter import ThreadedFileWriter - from eliot import Message, Logger, addDestination + from eliot import Message, Logger _logger = Logger() def main(reactor): print("Logging to example-eliot.log...") logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor) - addDestination(logWriter) - # Manually start the service. Normally we'd register ThreadedFileWriter + # Manually start the service, which will add it as a + # destination. Normally we'd register ThreadedFileWriter with the usual - # with the usual Twisted Service/Application infrastructure. + # Twisted Service/Application infrastructure. logWriter.startService() # Log a message: Message.new(value="hello", another=1).write(_logger) # Manually stop the service. done = logWriter.stopService() return done if __name__ == '__main__': react(main, [])
Fix bug where the service was added as a destination one time too many.
## Code Before: from __future__ import unicode_literals, print_function from twisted.internet.task import react from eliot.logwriter import ThreadedFileWriter from eliot import Message, Logger, addDestination _logger = Logger() def main(reactor): print("Logging to example-eliot.log...") logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor) addDestination(logWriter) # Manually start the service. Normally we'd register ThreadedFileWriter # with the usual Twisted Service/Application infrastructure. logWriter.startService() # Log a message: Message.new(value="hello", another=1).write(_logger) # Manually stop the service. done = logWriter.stopService() return done if __name__ == '__main__': react(main, []) ## Instruction: Fix bug where the service was added as a destination one time too many. ## Code After: from __future__ import unicode_literals, print_function from twisted.internet.task import react from eliot.logwriter import ThreadedFileWriter from eliot import Message, Logger _logger = Logger() def main(reactor): print("Logging to example-eliot.log...") logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor) # Manually start the service, which will add it as a # destination. Normally we'd register ThreadedFileWriter with the usual # Twisted Service/Application infrastructure. logWriter.startService() # Log a message: Message.new(value="hello", another=1).write(_logger) # Manually stop the service. done = logWriter.stopService() return done if __name__ == '__main__': react(main, [])
9f10dbdabe61ed841c0def319f021a4735f39217
src/sct/templates/__init__.py
src/sct/templates/__init__.py
''' Copyright 2014 Universitatea de Vest din Timișoara Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @author: Marian Neagul <marian@info.uvt.ro> @contact: marian@info.uvt.ro @copyright: 2014 Universitatea de Vest din Timișoara '''
from sct.templates.hadoop import HadoopServer, HadoopWorker TEMPLATES = { 'hadoop-server': { 'max-node-count': 1, 'cloudinit': HadoopServer }, 'hadoop-worker': { 'max-node-count': None, 'cloudinit': HadoopWorker } } def get_available_templates(): return TEMPLATES.keys() def get_template(name): if name not in TEMPLATES: raise NameError("No such template %s" % name) else: return TEMPLATES.get(name)
Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry
Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry
Python
apache-2.0
mneagul/scape-cloud-toolkit,mneagul/scape-cloud-toolkit,mneagul/scape-cloud-toolkit
- ''' - Copyright 2014 Universitatea de Vest din Timișoara - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - @author: Marian Neagul <marian@info.uvt.ro> - @contact: marian@info.uvt.ro - @copyright: 2014 Universitatea de Vest din Timișoara - ''' + + from sct.templates.hadoop import HadoopServer, HadoopWorker + + TEMPLATES = { + 'hadoop-server': { + 'max-node-count': 1, + 'cloudinit': HadoopServer + }, + 'hadoop-worker': { + 'max-node-count': None, + 'cloudinit': HadoopWorker + } + } + + def get_available_templates(): + return TEMPLATES.keys() + + def get_template(name): + if name not in TEMPLATES: + raise NameError("No such template %s" % name) + else: + return TEMPLATES.get(name) + + + +
Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry
## Code Before: ''' Copyright 2014 Universitatea de Vest din Timișoara Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @author: Marian Neagul <marian@info.uvt.ro> @contact: marian@info.uvt.ro @copyright: 2014 Universitatea de Vest din Timișoara ''' ## Instruction: Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry ## Code After: from sct.templates.hadoop import HadoopServer, HadoopWorker TEMPLATES = { 'hadoop-server': { 'max-node-count': 1, 'cloudinit': HadoopServer }, 'hadoop-worker': { 'max-node-count': None, 'cloudinit': HadoopWorker } } def get_available_templates(): return TEMPLATES.keys() def get_template(name): if name not in TEMPLATES: raise NameError("No such template %s" % name) else: return TEMPLATES.get(name)
e9e4c622ff667e475986e1544ec78b0604b8a511
girder_worker/tasks.py
girder_worker/tasks.py
import core from girder_worker.utils import JobStatus from .app import app def _cleanup(*args, **kwargs): core.events.trigger('cleanup') @app.task(name='girder_worker.run', bind=True, after_return=_cleanup) def run(tasks, *pargs, **kwargs): jobInfo = kwargs.pop('jobInfo', {}) retval = 0 kwargs['_job_manager'] = task.job_manager \ if hasattr(task, 'job_manager') else None kwargs['status'] = JobStatus.RUNNING return core.run(*pargs, **kwargs) @app.task(name='girder_worker.convert') def convert(*pargs, **kwargs): return core.convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def validators(*pargs, **kwargs): _type, _format = pargs nodes = [] for (node, data) in core.format.conv_graph.nodes(data=True): if ((_type is None) or (_type == node.type)) and \ ((_format is None) or (_format == node.format)): nodes.append({'type': node.type, 'format': node.format, 'validator': data}) return nodes
import core from girder_worker.utils import JobStatus from .app import app def _cleanup(*args, **kwargs): core.events.trigger('cleanup') @app.task(name='girder_worker.run', bind=True, after_return=_cleanup) def run(task, *pargs, **kwargs): kwargs['_job_manager'] = task.job_manager \ if hasattr(task, 'job_manager') else None kwargs['status'] = JobStatus.RUNNING return core.run(*pargs, **kwargs) @app.task(name='girder_worker.convert') def convert(*pargs, **kwargs): return core.convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def validators(*pargs, **kwargs): _type, _format = pargs nodes = [] for (node, data) in core.format.conv_graph.nodes(data=True): if ((_type is None) or (_type == node.type)) and \ ((_format is None) or (_format == node.format)): nodes.append({'type': node.type, 'format': node.format, 'validator': data}) return nodes
Fix typo from bad conflict resolution during merge
Fix typo from bad conflict resolution during merge
Python
apache-2.0
girder/girder_worker,girder/girder_worker,girder/girder_worker
import core from girder_worker.utils import JobStatus from .app import app def _cleanup(*args, **kwargs): core.events.trigger('cleanup') @app.task(name='girder_worker.run', bind=True, after_return=_cleanup) - def run(tasks, *pargs, **kwargs): + def run(task, *pargs, **kwargs): - jobInfo = kwargs.pop('jobInfo', {}) - retval = 0 - kwargs['_job_manager'] = task.job_manager \ if hasattr(task, 'job_manager') else None kwargs['status'] = JobStatus.RUNNING return core.run(*pargs, **kwargs) @app.task(name='girder_worker.convert') def convert(*pargs, **kwargs): return core.convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def validators(*pargs, **kwargs): _type, _format = pargs nodes = [] for (node, data) in core.format.conv_graph.nodes(data=True): if ((_type is None) or (_type == node.type)) and \ ((_format is None) or (_format == node.format)): nodes.append({'type': node.type, 'format': node.format, 'validator': data}) return nodes
Fix typo from bad conflict resolution during merge
## Code Before: import core from girder_worker.utils import JobStatus from .app import app def _cleanup(*args, **kwargs): core.events.trigger('cleanup') @app.task(name='girder_worker.run', bind=True, after_return=_cleanup) def run(tasks, *pargs, **kwargs): jobInfo = kwargs.pop('jobInfo', {}) retval = 0 kwargs['_job_manager'] = task.job_manager \ if hasattr(task, 'job_manager') else None kwargs['status'] = JobStatus.RUNNING return core.run(*pargs, **kwargs) @app.task(name='girder_worker.convert') def convert(*pargs, **kwargs): return core.convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def validators(*pargs, **kwargs): _type, _format = pargs nodes = [] for (node, data) in core.format.conv_graph.nodes(data=True): if ((_type is None) or (_type == node.type)) and \ ((_format is None) or (_format == node.format)): nodes.append({'type': node.type, 'format': node.format, 'validator': data}) return nodes ## Instruction: Fix typo from bad conflict resolution during merge ## Code After: import core from girder_worker.utils import JobStatus from .app import app def _cleanup(*args, **kwargs): core.events.trigger('cleanup') @app.task(name='girder_worker.run', bind=True, after_return=_cleanup) def run(task, *pargs, **kwargs): kwargs['_job_manager'] = task.job_manager \ if hasattr(task, 'job_manager') else None kwargs['status'] = JobStatus.RUNNING return core.run(*pargs, **kwargs) @app.task(name='girder_worker.convert') def convert(*pargs, **kwargs): return core.convert(*pargs, **kwargs) @app.task(name='girder_worker.validators') def validators(*pargs, **kwargs): _type, _format = pargs nodes = [] for (node, data) in core.format.conv_graph.nodes(data=True): if ((_type is None) or (_type == node.type)) and \ ((_format is None) or (_format == node.format)): nodes.append({'type': node.type, 'format': node.format, 'validator': data}) return nodes
fd48211548c8c2d5daec0994155ddb7e8d226882
tests/test_anki_sync.py
tests/test_anki_sync.py
import pytest import os import rememberberry from rememberscript import RememberMachine, FileStorage from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story @pytest.mark.asyncio @tmp_data_path('/tmp/data/', delete=True) async def test_anki_account(): storage = FileStorage() m, storage = get_isolated_story('login_anki', storage) await assert_replies(m.reply(''), 'What is your Anki username?') await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password') await assert_replies(m.reply('jkdhskjhgdksjhg'), 'Authentication with ankiweb failed, try again?', 'What is your Anki username?') await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password') await assert_replies(m.reply('ankitest'), 'Authentication worked, now I\'ll try to sync your account', 'Syncing anki database', 'Syncing media files (this may take a while)', 'Syncing done', 'Great, you\'re all synced up!', 'enter init')
import pytest import os import rememberberry from rememberscript import RememberMachine, FileStorage from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story @pytest.mark.asyncio @tmp_data_path('/tmp/data/', delete=True) async def test_anki_account(): storage = FileStorage() storage['username'] = 'alice' m, storage = get_isolated_story('login_anki', storage) await assert_replies(m.reply(''), 'What is your Anki username?') await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password') await assert_replies(m.reply('jkdhskjhgdksjhg'), 'Authentication with ankiweb failed, try again?', 'What is your Anki username?') await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password') await assert_replies(m.reply('ankitest'), 'Authentication worked, now I\'ll try to sync your account', 'Syncing anki database', 'Syncing media files (this may take a while)', 'Syncing done', 'Great, you\'re all synced up!', 'enter init')
Fix missing username in test
Fix missing username in test
Python
agpl-3.0
rememberberry/rememberberry-server,rememberberry/rememberberry-server
import pytest import os import rememberberry from rememberscript import RememberMachine, FileStorage from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story @pytest.mark.asyncio @tmp_data_path('/tmp/data/', delete=True) async def test_anki_account(): storage = FileStorage() + storage['username'] = 'alice' m, storage = get_isolated_story('login_anki', storage) await assert_replies(m.reply(''), 'What is your Anki username?') await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password') await assert_replies(m.reply('jkdhskjhgdksjhg'), 'Authentication with ankiweb failed, try again?', 'What is your Anki username?') await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password') await assert_replies(m.reply('ankitest'), 'Authentication worked, now I\'ll try to sync your account', 'Syncing anki database', 'Syncing media files (this may take a while)', 'Syncing done', 'Great, you\'re all synced up!', 'enter init')
Fix missing username in test
## Code Before: import pytest import os import rememberberry from rememberscript import RememberMachine, FileStorage from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story @pytest.mark.asyncio @tmp_data_path('/tmp/data/', delete=True) async def test_anki_account(): storage = FileStorage() m, storage = get_isolated_story('login_anki', storage) await assert_replies(m.reply(''), 'What is your Anki username?') await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password') await assert_replies(m.reply('jkdhskjhgdksjhg'), 'Authentication with ankiweb failed, try again?', 'What is your Anki username?') await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password') await assert_replies(m.reply('ankitest'), 'Authentication worked, now I\'ll try to sync your account', 'Syncing anki database', 'Syncing media files (this may take a while)', 'Syncing done', 'Great, you\'re all synced up!', 'enter init') ## Instruction: Fix missing username in test ## Code After: import pytest import os import rememberberry from rememberscript import RememberMachine, FileStorage from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story @pytest.mark.asyncio @tmp_data_path('/tmp/data/', delete=True) async def test_anki_account(): storage = FileStorage() storage['username'] = 'alice' m, storage = get_isolated_story('login_anki', storage) await assert_replies(m.reply(''), 'What is your Anki username?') await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password') await assert_replies(m.reply('jkdhskjhgdksjhg'), 'Authentication with ankiweb failed, try again?', 'What is your Anki username?') await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password') await assert_replies(m.reply('ankitest'), 'Authentication worked, now I\'ll try to sync your account', 'Syncing anki database', 'Syncing media files (this may take a while)', 'Syncing done', 'Great, you\'re all synced up!', 'enter init')
2c38fea1434f8591957c2707359412151c4b6c43
tests/test_timezones.py
tests/test_timezones.py
import unittest import datetime from garage.timezones import TimeZone class TimeZoneTest(unittest.TestCase): def test_time_zone(self): utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC) cst = utc.astimezone(TimeZone.CST) print('xxx', utc, cst) self.assertEqual(2000, cst.year) self.assertEqual(1, cst.month) self.assertEqual(2, cst.day) self.assertEqual(11, cst.hour) self.assertEqual(4, cst.minute) self.assertEqual(0, cst.second) self.assertEqual(0, cst.microsecond) if __name__ == '__main__': unittest.main()
import unittest import datetime from garage.timezones import TimeZone class TimeZoneTest(unittest.TestCase): def test_time_zone(self): utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC) cst = utc.astimezone(TimeZone.CST) self.assertEqual(2000, cst.year) self.assertEqual(1, cst.month) self.assertEqual(2, cst.day) self.assertEqual(11, cst.hour) self.assertEqual(4, cst.minute) self.assertEqual(0, cst.second) self.assertEqual(0, cst.microsecond) if __name__ == '__main__': unittest.main()
Remove print in unit test
Remove print in unit test
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
import unittest import datetime from garage.timezones import TimeZone class TimeZoneTest(unittest.TestCase): def test_time_zone(self): utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC) cst = utc.astimezone(TimeZone.CST) - print('xxx', utc, cst) self.assertEqual(2000, cst.year) self.assertEqual(1, cst.month) self.assertEqual(2, cst.day) self.assertEqual(11, cst.hour) self.assertEqual(4, cst.minute) self.assertEqual(0, cst.second) self.assertEqual(0, cst.microsecond) if __name__ == '__main__': unittest.main()
Remove print in unit test
## Code Before: import unittest import datetime from garage.timezones import TimeZone class TimeZoneTest(unittest.TestCase): def test_time_zone(self): utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC) cst = utc.astimezone(TimeZone.CST) print('xxx', utc, cst) self.assertEqual(2000, cst.year) self.assertEqual(1, cst.month) self.assertEqual(2, cst.day) self.assertEqual(11, cst.hour) self.assertEqual(4, cst.minute) self.assertEqual(0, cst.second) self.assertEqual(0, cst.microsecond) if __name__ == '__main__': unittest.main() ## Instruction: Remove print in unit test ## Code After: import unittest import datetime from garage.timezones import TimeZone class TimeZoneTest(unittest.TestCase): def test_time_zone(self): utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC) cst = utc.astimezone(TimeZone.CST) self.assertEqual(2000, cst.year) self.assertEqual(1, cst.month) self.assertEqual(2, cst.day) self.assertEqual(11, cst.hour) self.assertEqual(4, cst.minute) self.assertEqual(0, cst.second) self.assertEqual(0, cst.microsecond) if __name__ == '__main__': unittest.main()
b1d3a0c79a52ca1987ea08a546213e1135539927
tools/bots/ddc_tests.py
tools/bots/ddc_tests.py
import os import os.path import shutil import sys import subprocess import bot import bot_utils utils = bot_utils.GetUtils() BUILD_OS = utils.GuessOS() (bot_name, _) = bot.GetBotName() CHANNEL = bot_utils.GetChannelFromName(bot_name) if __name__ == '__main__': with utils.ChangedWorkingDirectory('pkg/dev_compiler'): dart_exe = utils.CheckedInSdkExecutable() # These two calls mirror pkg/dev_compiler/tool/test.sh. bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test']) bot.RunProcess([dart_exe, 'test/all_tests.dart']) # These mirror pkg/dev_compiler/tool/browser_test.sh. bot.RunProcess(['npm', 'install']) bot.RunProcess(['npm', 'test'])
import os import os.path import shutil import sys import subprocess import bot import bot_utils utils = bot_utils.GetUtils() BUILD_OS = utils.GuessOS() (bot_name, _) = bot.GetBotName() CHANNEL = bot_utils.GetChannelFromName(bot_name) if __name__ == '__main__': with utils.ChangedWorkingDirectory('pkg/dev_compiler'): dart_exe = utils.CheckedInSdkExecutable() # These two calls mirror pkg/dev_compiler/tool/test.sh. bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test']) bot.RunProcess([dart_exe, 'test/all_tests.dart']) # These mirror pkg/dev_compiler/tool/browser_test.sh. bot.RunProcess(['npm', 'install']) bot.RunProcess(['npm', 'test'], {'CHROME_BIN': 'chrome'})
Set CHROME_BIN on DDC bot
Set CHROME_BIN on DDC bot Noticed the Linux bot is failing on this: https://build.chromium.org/p/client.dart.fyi/builders/ddc-linux-release-be/builds/1724/steps/ddc%20tests/logs/stdio R=whesse@google.com Review-Url: https://codereview.chromium.org/2640093002 .
Python
bsd-3-clause
dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk
import os import os.path import shutil import sys import subprocess import bot import bot_utils utils = bot_utils.GetUtils() BUILD_OS = utils.GuessOS() (bot_name, _) = bot.GetBotName() CHANNEL = bot_utils.GetChannelFromName(bot_name) if __name__ == '__main__': with utils.ChangedWorkingDirectory('pkg/dev_compiler'): dart_exe = utils.CheckedInSdkExecutable() # These two calls mirror pkg/dev_compiler/tool/test.sh. bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test']) bot.RunProcess([dart_exe, 'test/all_tests.dart']) # These mirror pkg/dev_compiler/tool/browser_test.sh. bot.RunProcess(['npm', 'install']) - bot.RunProcess(['npm', 'test']) + bot.RunProcess(['npm', 'test'], {'CHROME_BIN': 'chrome'})
Set CHROME_BIN on DDC bot
## Code Before: import os import os.path import shutil import sys import subprocess import bot import bot_utils utils = bot_utils.GetUtils() BUILD_OS = utils.GuessOS() (bot_name, _) = bot.GetBotName() CHANNEL = bot_utils.GetChannelFromName(bot_name) if __name__ == '__main__': with utils.ChangedWorkingDirectory('pkg/dev_compiler'): dart_exe = utils.CheckedInSdkExecutable() # These two calls mirror pkg/dev_compiler/tool/test.sh. bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test']) bot.RunProcess([dart_exe, 'test/all_tests.dart']) # These mirror pkg/dev_compiler/tool/browser_test.sh. bot.RunProcess(['npm', 'install']) bot.RunProcess(['npm', 'test']) ## Instruction: Set CHROME_BIN on DDC bot ## Code After: import os import os.path import shutil import sys import subprocess import bot import bot_utils utils = bot_utils.GetUtils() BUILD_OS = utils.GuessOS() (bot_name, _) = bot.GetBotName() CHANNEL = bot_utils.GetChannelFromName(bot_name) if __name__ == '__main__': with utils.ChangedWorkingDirectory('pkg/dev_compiler'): dart_exe = utils.CheckedInSdkExecutable() # These two calls mirror pkg/dev_compiler/tool/test.sh. bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test']) bot.RunProcess([dart_exe, 'test/all_tests.dart']) # These mirror pkg/dev_compiler/tool/browser_test.sh. bot.RunProcess(['npm', 'install']) bot.RunProcess(['npm', 'test'], {'CHROME_BIN': 'chrome'})
c143bc14be8d486d313056c0d1313e03ac438284
examples/ex_aps_parser.py
examples/ex_aps_parser.py
from __future__ import print_function import os import glob import pyingest.parsers.aps as aps import pyingest.parsers.arxiv as arxiv import pyingest.serializers.classic import traceback import json import xmltodict from datetime import datetime input_list = 'bibc.2.out' testfile=[] xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/' xmltail = '/fulltext.xml' with open(input_list,'rU') as fi: for l in fi.readlines(): doi = l.strip().split('\t')[1] (a,b) = doi.split('/') b = b.replace('.','/') infile = xmldir + a + '/' + b + xmltail testfile.append(infile) for f in testfile: fnord = f[92:] if os.path.isfile(f): print("found! ",fnord) with open(f, 'rU') as fp: parser = aps.APSJATSParser() document = parser.parse(fp) serializer = pyingest.serializers.classic.Tagged() outputfp = open('aps.tag', 'a') serializer.write(document, outputfp) outputfp.close() #except: # print "ERROR!\n%s\n"%f # traceback.print_exc() # pass else: print("not found :( ", fnord)
from __future__ import print_function import os import glob import pyingest.parsers.aps as aps import pyingest.parsers.arxiv as arxiv import pyingest.serializers.classic import traceback import json import xmltodict from datetime import datetime import sys input_list = 'bibc.2.out' testfile=[] xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/' xmltail = '/fulltext.xml' if sys.version_info > (3,): open_mode = 'r' else: open_mode = 'rU' with open(input_list, open_mode) as fi: for l in fi.readlines(): doi = l.strip().split('\t')[1] (a,b) = doi.split('/') b = b.replace('.', '/') infile = xmldir + a + '/' + b + xmltail testfile.append(infile) for f in testfile: fnord = f[92:] if os.path.isfile(f): print("found! ", fnord) with open(f, open_mode) as fp: parser = aps.APSJATSParser() document = parser.parse(fp) serializer = pyingest.serializers.classic.Tagged() outputfp = open('aps.tag', 'a') serializer.write(document, outputfp) outputfp.close() #except: # print "ERROR!\n%s\n"%f # traceback.print_exc() # pass else: print("not found :( ", fnord)
Use open mode syntax on example file
Use open mode syntax on example file
Python
mit
adsabs/adsabs-pyingest,adsabs/adsabs-pyingest,adsabs/adsabs-pyingest
from __future__ import print_function import os import glob import pyingest.parsers.aps as aps import pyingest.parsers.arxiv as arxiv import pyingest.serializers.classic import traceback import json import xmltodict from datetime import datetime + import sys input_list = 'bibc.2.out' testfile=[] xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/' xmltail = '/fulltext.xml' + + if sys.version_info > (3,): + open_mode = 'r' + else: + open_mode = 'rU' + - with open(input_list,'rU') as fi: + with open(input_list, open_mode) as fi: for l in fi.readlines(): doi = l.strip().split('\t')[1] (a,b) = doi.split('/') - b = b.replace('.','/') + b = b.replace('.', '/') infile = xmldir + a + '/' + b + xmltail testfile.append(infile) for f in testfile: fnord = f[92:] if os.path.isfile(f): - print("found! ",fnord) + print("found! ", fnord) - with open(f, 'rU') as fp: + with open(f, open_mode) as fp: parser = aps.APSJATSParser() document = parser.parse(fp) serializer = pyingest.serializers.classic.Tagged() outputfp = open('aps.tag', 'a') serializer.write(document, outputfp) outputfp.close() #except: # print "ERROR!\n%s\n"%f # traceback.print_exc() # pass else: print("not found :( ", fnord)
Use open mode syntax on example file
## Code Before: from __future__ import print_function import os import glob import pyingest.parsers.aps as aps import pyingest.parsers.arxiv as arxiv import pyingest.serializers.classic import traceback import json import xmltodict from datetime import datetime input_list = 'bibc.2.out' testfile=[] xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/' xmltail = '/fulltext.xml' with open(input_list,'rU') as fi: for l in fi.readlines(): doi = l.strip().split('\t')[1] (a,b) = doi.split('/') b = b.replace('.','/') infile = xmldir + a + '/' + b + xmltail testfile.append(infile) for f in testfile: fnord = f[92:] if os.path.isfile(f): print("found! ",fnord) with open(f, 'rU') as fp: parser = aps.APSJATSParser() document = parser.parse(fp) serializer = pyingest.serializers.classic.Tagged() outputfp = open('aps.tag', 'a') serializer.write(document, outputfp) outputfp.close() #except: # print "ERROR!\n%s\n"%f # traceback.print_exc() # pass else: print("not found :( ", fnord) ## Instruction: Use open mode syntax on example file ## Code After: from __future__ import print_function import os import glob import pyingest.parsers.aps as aps import pyingest.parsers.arxiv as arxiv import pyingest.serializers.classic import traceback import json import xmltodict from datetime import datetime import sys input_list = 'bibc.2.out' testfile=[] xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/' xmltail = '/fulltext.xml' if sys.version_info > (3,): open_mode = 'r' else: open_mode = 'rU' with open(input_list, open_mode) as fi: for l in fi.readlines(): doi = l.strip().split('\t')[1] (a,b) = doi.split('/') b = b.replace('.', '/') infile = xmldir + a + '/' + b + xmltail testfile.append(infile) for f in testfile: fnord = f[92:] if os.path.isfile(f): print("found! ", fnord) with open(f, open_mode) as fp: parser = aps.APSJATSParser() document = parser.parse(fp) serializer = pyingest.serializers.classic.Tagged() outputfp = open('aps.tag', 'a') serializer.write(document, outputfp) outputfp.close() #except: # print "ERROR!\n%s\n"%f # traceback.print_exc() # pass else: print("not found :( ", fnord)
bc593f1716a8e36e65cf75a58e524e77d38d5d9c
notation/statistics.py
notation/statistics.py
def mean(values): return float(sum(values)) / len(values) def median(values): middle = (len(values) - 1) // 2 if len(values) % 2: return values[middle] else: return mean(values[middle:middle + 2])
def mean(values): return float(sum(values)) / len(values) def quantile(p): def bound_quantile(values): ix = int(len(values) * p) if len(values) % 2: return values[ix] elif ix < 1: return values[0] else: return mean(values[ix - 1:ix + 1]) return bound_quantile Q0 = min Q1 = quantile(0.25) Q2 = median = quantile(0.5) Q3 = quantile(0.75) Q4 = max
Add a rudimentary quantile factory function.
Add a rudimentary quantile factory function.
Python
isc
debrouwere/python-ballpark
+ def mean(values): return float(sum(values)) / len(values) - def median(values): - middle = (len(values) - 1) // 2 + def quantile(p): + def bound_quantile(values): + ix = int(len(values) * p) - if len(values) % 2: + if len(values) % 2: - return values[middle] + return values[ix] + elif ix < 1: + return values[0] - else: + else: - return mean(values[middle:middle + 2]) + return mean(values[ix - 1:ix + 1]) + return bound_quantile + + Q0 = min + Q1 = quantile(0.25) + Q2 = median = quantile(0.5) + Q3 = quantile(0.75) + Q4 = max +
Add a rudimentary quantile factory function.
## Code Before: def mean(values): return float(sum(values)) / len(values) def median(values): middle = (len(values) - 1) // 2 if len(values) % 2: return values[middle] else: return mean(values[middle:middle + 2]) ## Instruction: Add a rudimentary quantile factory function. ## Code After: def mean(values): return float(sum(values)) / len(values) def quantile(p): def bound_quantile(values): ix = int(len(values) * p) if len(values) % 2: return values[ix] elif ix < 1: return values[0] else: return mean(values[ix - 1:ix + 1]) return bound_quantile Q0 = min Q1 = quantile(0.25) Q2 = median = quantile(0.5) Q3 = quantile(0.75) Q4 = max
ec235e290b4428dec2db03a19d678eba52f02fb5
keyring/getpassbackend.py
keyring/getpassbackend.py
"""Specific support for getpass.""" import os import getpass from keyring.core import get_password as original_get_password def get_password(prompt='Password: ', stream=None, service_name='Python', username=None): if username is None: username = getpass.getuser() return original_get_password(service_name, username)
"""Specific support for getpass.""" import os import getpass import keyring.core def get_password(prompt='Password: ', stream=None, service_name='Python', username=None): if username is None: username = getpass.getuser() return keyring.core.get_password(service_name, username)
Use module namespaces to distinguish names instead of 'original_' prefix
Use module namespaces to distinguish names instead of 'original_' prefix
Python
mit
jaraco/keyring
"""Specific support for getpass.""" import os import getpass - from keyring.core import get_password as original_get_password + import keyring.core def get_password(prompt='Password: ', stream=None, service_name='Python', username=None): if username is None: username = getpass.getuser() - return original_get_password(service_name, username) + return keyring.core.get_password(service_name, username)
Use module namespaces to distinguish names instead of 'original_' prefix
## Code Before: """Specific support for getpass.""" import os import getpass from keyring.core import get_password as original_get_password def get_password(prompt='Password: ', stream=None, service_name='Python', username=None): if username is None: username = getpass.getuser() return original_get_password(service_name, username) ## Instruction: Use module namespaces to distinguish names instead of 'original_' prefix ## Code After: """Specific support for getpass.""" import os import getpass import keyring.core def get_password(prompt='Password: ', stream=None, service_name='Python', username=None): if username is None: username = getpass.getuser() return keyring.core.get_password(service_name, username)
4a711a2709ec5d8a8e04bb0f735fcfaa319cffdf
designate/objects/validation_error.py
designate/objects/validation_error.py
import six from designate.objects import base class ValidationError(base.DesignateObject): FIELDS = { 'path': {}, 'message': {}, 'validator': {}, 'validator_value': {}, 'raw': {}, } @classmethod def from_js_error(cls, js_error): """Convert a JSON Schema ValidationError instance into a ValidationError instance. """ e = cls() e.path = list(getattr(js_error, 'releative_path', js_error.path)) e.message = six.text_type(js_error) e.validator = js_error.validator e.validator_value = js_error.validator_value e.raw = js_error._contents() return e class ValidationErrorList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = ValidationError
from designate.objects import base class ValidationError(base.DesignateObject): FIELDS = { 'path': {}, 'message': {}, 'validator': {}, 'validator_value': {}, 'raw': {}, } @classmethod def from_js_error(cls, js_error): """Convert a JSON Schema ValidationError instance into a ValidationError instance. """ e = cls() e.path = list(getattr(js_error, 'releative_path', js_error.path)) e.message = js_error.message e.validator = js_error.validator e.validator_value = js_error.validator_value e.raw = js_error._contents() return e class ValidationErrorList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = ValidationError
Fix the displayed error message in V2 API
Fix the displayed error message in V2 API Change-Id: I07c3f1ed79fa507dbe9b76eb8f5964475516754c
Python
apache-2.0
tonyli71/designate,openstack/designate,ionrock/designate,ionrock/designate,ramsateesh/designate,grahamhayes/designate,cneill/designate-testing,muraliselva10/designate,muraliselva10/designate,cneill/designate-testing,openstack/designate,tonyli71/designate,muraliselva10/designate,grahamhayes/designate,ionrock/designate,tonyli71/designate,grahamhayes/designate,openstack/designate,ramsateesh/designate,cneill/designate-testing,ramsateesh/designate
- import six - from designate.objects import base class ValidationError(base.DesignateObject): FIELDS = { 'path': {}, 'message': {}, 'validator': {}, 'validator_value': {}, 'raw': {}, } @classmethod def from_js_error(cls, js_error): """Convert a JSON Schema ValidationError instance into a ValidationError instance. """ e = cls() e.path = list(getattr(js_error, 'releative_path', js_error.path)) - e.message = six.text_type(js_error) + e.message = js_error.message e.validator = js_error.validator e.validator_value = js_error.validator_value e.raw = js_error._contents() return e class ValidationErrorList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = ValidationError
Fix the displayed error message in V2 API
## Code Before: import six from designate.objects import base class ValidationError(base.DesignateObject): FIELDS = { 'path': {}, 'message': {}, 'validator': {}, 'validator_value': {}, 'raw': {}, } @classmethod def from_js_error(cls, js_error): """Convert a JSON Schema ValidationError instance into a ValidationError instance. """ e = cls() e.path = list(getattr(js_error, 'releative_path', js_error.path)) e.message = six.text_type(js_error) e.validator = js_error.validator e.validator_value = js_error.validator_value e.raw = js_error._contents() return e class ValidationErrorList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = ValidationError ## Instruction: Fix the displayed error message in V2 API ## Code After: from designate.objects import base class ValidationError(base.DesignateObject): FIELDS = { 'path': {}, 'message': {}, 'validator': {}, 'validator_value': {}, 'raw': {}, } @classmethod def from_js_error(cls, js_error): """Convert a JSON Schema ValidationError instance into a ValidationError instance. """ e = cls() e.path = list(getattr(js_error, 'releative_path', js_error.path)) e.message = js_error.message e.validator = js_error.validator e.validator_value = js_error.validator_value e.raw = js_error._contents() return e class ValidationErrorList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = ValidationError
6fc2e75426eb34755bf6dbedbd21a4345d9c5738
plugins/websites.py
plugins/websites.py
import re from smartbot import utils class Plugin: def on_message(self, bot, msg, reply): match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE) for i, url in enumerate(match): title = utils.web.get_title(url) if title: reply("[{0}]: {1}".format(i, title)) def on_help(self): return "Echos the titles of websites for any HTTP(S) URL."
import io import re import unittest from smartbot import utils class Plugin: def on_message(self, bot, msg, reply): match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE) for i, url in enumerate(match): title = utils.web.get_title(url) if title: reply("[{0}]: {1}".format(i, title)) def on_help(self): return "Echos the titles of websites for any HTTP(S) URL." class Test(unittest.TestCase): def setUp(self): self.plugin = Plugin() def test_message(self): self.plugin.on_message(None, {"message": "http://tomleese.me.uk"}, lambda x: self.assertEqual("[0]: Tom Leese", x)) def test_help(self): self.assertTrue(self.plugin.on_help())
Add tests for website plugin
Add tests for website plugin
Python
mit
Muzer/smartbot,Cyanogenoid/smartbot,thomasleese/smartbot-old,tomleese/smartbot
+ import io import re + import unittest from smartbot import utils class Plugin: def on_message(self, bot, msg, reply): match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE) for i, url in enumerate(match): title = utils.web.get_title(url) if title: reply("[{0}]: {1}".format(i, title)) def on_help(self): return "Echos the titles of websites for any HTTP(S) URL." + + class Test(unittest.TestCase): + def setUp(self): + self.plugin = Plugin() + + def test_message(self): + self.plugin.on_message(None, {"message": "http://tomleese.me.uk"}, lambda x: self.assertEqual("[0]: Tom Leese", x)) + + def test_help(self): + self.assertTrue(self.plugin.on_help()) +
Add tests for website plugin
## Code Before: import re from smartbot import utils class Plugin: def on_message(self, bot, msg, reply): match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE) for i, url in enumerate(match): title = utils.web.get_title(url) if title: reply("[{0}]: {1}".format(i, title)) def on_help(self): return "Echos the titles of websites for any HTTP(S) URL." ## Instruction: Add tests for website plugin ## Code After: import io import re import unittest from smartbot import utils class Plugin: def on_message(self, bot, msg, reply): match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE) for i, url in enumerate(match): title = utils.web.get_title(url) if title: reply("[{0}]: {1}".format(i, title)) def on_help(self): return "Echos the titles of websites for any HTTP(S) URL." class Test(unittest.TestCase): def setUp(self): self.plugin = Plugin() def test_message(self): self.plugin.on_message(None, {"message": "http://tomleese.me.uk"}, lambda x: self.assertEqual("[0]: Tom Leese", x)) def test_help(self): self.assertTrue(self.plugin.on_help())
5f2ab0dcaec5a7826ff0652e7c052971083a8398
openid/test/datadriven.py
openid/test/datadriven.py
import unittest class DataDrivenTestCase(unittest.TestCase): cases = [] @classmethod def generateCases(cls): return cls.cases @classmethod def loadTests(cls): tests = [] for case in cls.generateCases(): if isinstance(case, tuple): test = cls(*case) elif isinstance(case, dict): test = cls(**case) else: test = cls(case) tests.append(test) return tests def __init__(self, description): super(DataDrivenTestCase, self).__init__(self, 'runOneTest') self.description = description def shortDescription(self): return '%s for %s' % (self.__class__.__name__, self.description) def loadTests(module_name): loader = unittest.defaultTestLoader this_module = __import__(module_name, {}, {}, [None]) tests = [] for name in dir(this_module): obj = getattr(this_module, name) if isinstance(obj, unittest.TestCase): if hasattr(obj, 'loadTests'): tests.extend(obj.loadTests()) else: tests.append(loader.loadTestsFromTestCase(obj)) return unittest.TestSuite(tests)
import unittest class DataDrivenTestCase(unittest.TestCase): cases = [] @classmethod def generateCases(cls): return cls.cases @classmethod def loadTests(cls): tests = [] for case in cls.generateCases(): if isinstance(case, tuple): test = cls(*case) elif isinstance(case, dict): test = cls(**case) else: test = cls(case) tests.append(test) return tests def __init__(self, description): super(DataDrivenTestCase, self).__init__(self, 'runOneTest') self.description = description def shortDescription(self): return '%s for %s' % (self.__class__.__name__, self.description) def loadTests(module_name): loader = unittest.defaultTestLoader tests = loader.loadTestsFromName(module_name) if not tests: raise AssertionError("No tests for {0}".format(module_name)) return unittest.TestSuite(tests)
Replace ad-hoc pain with builtin methods
Replace ad-hoc pain with builtin methods
Python
apache-2.0
moreati/python3-openid,isagalaev/sm-openid,moreati/python3-openid,moreati/python3-openid,necaris/python3-openid,misli/python3-openid,necaris/python3-openid,misli/python3-openid,misli/python3-openid
import unittest class DataDrivenTestCase(unittest.TestCase): cases = [] @classmethod def generateCases(cls): return cls.cases @classmethod def loadTests(cls): tests = [] for case in cls.generateCases(): if isinstance(case, tuple): test = cls(*case) elif isinstance(case, dict): test = cls(**case) else: test = cls(case) tests.append(test) return tests def __init__(self, description): super(DataDrivenTestCase, self).__init__(self, 'runOneTest') self.description = description def shortDescription(self): return '%s for %s' % (self.__class__.__name__, self.description) def loadTests(module_name): loader = unittest.defaultTestLoader + tests = loader.loadTestsFromName(module_name) + if not tests: + raise AssertionError("No tests for {0}".format(module_name)) - this_module = __import__(module_name, {}, {}, [None]) - - tests = [] - for name in dir(this_module): - obj = getattr(this_module, name) - if isinstance(obj, unittest.TestCase): - if hasattr(obj, 'loadTests'): - tests.extend(obj.loadTests()) - else: - tests.append(loader.loadTestsFromTestCase(obj)) - return unittest.TestSuite(tests)
Replace ad-hoc pain with builtin methods
## Code Before: import unittest class DataDrivenTestCase(unittest.TestCase): cases = [] @classmethod def generateCases(cls): return cls.cases @classmethod def loadTests(cls): tests = [] for case in cls.generateCases(): if isinstance(case, tuple): test = cls(*case) elif isinstance(case, dict): test = cls(**case) else: test = cls(case) tests.append(test) return tests def __init__(self, description): super(DataDrivenTestCase, self).__init__(self, 'runOneTest') self.description = description def shortDescription(self): return '%s for %s' % (self.__class__.__name__, self.description) def loadTests(module_name): loader = unittest.defaultTestLoader this_module = __import__(module_name, {}, {}, [None]) tests = [] for name in dir(this_module): obj = getattr(this_module, name) if isinstance(obj, unittest.TestCase): if hasattr(obj, 'loadTests'): tests.extend(obj.loadTests()) else: tests.append(loader.loadTestsFromTestCase(obj)) return unittest.TestSuite(tests) ## Instruction: Replace ad-hoc pain with builtin methods ## Code After: import unittest class DataDrivenTestCase(unittest.TestCase): cases = [] @classmethod def generateCases(cls): return cls.cases @classmethod def loadTests(cls): tests = [] for case in cls.generateCases(): if isinstance(case, tuple): test = cls(*case) elif isinstance(case, dict): test = cls(**case) else: test = cls(case) tests.append(test) return tests def __init__(self, description): super(DataDrivenTestCase, self).__init__(self, 'runOneTest') self.description = description def shortDescription(self): return '%s for %s' % (self.__class__.__name__, self.description) def loadTests(module_name): loader = unittest.defaultTestLoader tests = loader.loadTestsFromName(module_name) if not tests: raise AssertionError("No tests for {0}".format(module_name)) return unittest.TestSuite(tests)
89d8ee0b91c9fd579dcf965e9e07f18954625c72
xero/api.py
xero/api.py
from .manager import Manager class Xero(object): """An ORM-like interface to the Xero API""" OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes', u'Currencies', u'Invoices', u'Items', u'Organisation', u'Payments', u'TaxRates', u'TrackingCategories') def __init__(self, credentials): # Iterate through the list of objects we support, for # each of them create an attribute on our self that is # the lowercase name of the object and attach it to an # instance of a Manager object to operate on it for name in self.OBJECT_LIST: setattr(self, name.lower(), Manager(name, credentials.oauth))
from .manager import Manager class Xero(object): """An ORM-like interface to the Xero API""" OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes', u'Currencies', u'Invoices', u'Items', u'Organisation', u'Payments', u'TaxRates', u'TrackingCategories', u'ManualJournals') def __init__(self, credentials): # Iterate through the list of objects we support, for # each of them create an attribute on our self that is # the lowercase name of the object and attach it to an # instance of a Manager object to operate on it for name in self.OBJECT_LIST: setattr(self, name.lower(), Manager(name, credentials.oauth))
Add support for manual journals
Add support for manual journals
Python
bsd-3-clause
wegotpop/pyxero,jarekwg/pyxero,jaymcconnell/pyxero,opendesk/pyxero,thisismyrobot/pyxero,freakboy3742/pyxero,MJMortimer/pyxero,unomena/pyxero,schinckel/pyxero,unomena/pyxeropos,jacobg/pyxero,direvus/pyxero
from .manager import Manager class Xero(object): """An ORM-like interface to the Xero API""" OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes', u'Currencies', u'Invoices', u'Items', u'Organisation', - u'Payments', u'TaxRates', u'TrackingCategories') + u'Payments', u'TaxRates', u'TrackingCategories', u'ManualJournals') def __init__(self, credentials): # Iterate through the list of objects we support, for # each of them create an attribute on our self that is # the lowercase name of the object and attach it to an # instance of a Manager object to operate on it for name in self.OBJECT_LIST: setattr(self, name.lower(), Manager(name, credentials.oauth))
Add support for manual journals
## Code Before: from .manager import Manager class Xero(object): """An ORM-like interface to the Xero API""" OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes', u'Currencies', u'Invoices', u'Items', u'Organisation', u'Payments', u'TaxRates', u'TrackingCategories') def __init__(self, credentials): # Iterate through the list of objects we support, for # each of them create an attribute on our self that is # the lowercase name of the object and attach it to an # instance of a Manager object to operate on it for name in self.OBJECT_LIST: setattr(self, name.lower(), Manager(name, credentials.oauth)) ## Instruction: Add support for manual journals ## Code After: from .manager import Manager class Xero(object): """An ORM-like interface to the Xero API""" OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes', u'Currencies', u'Invoices', u'Items', u'Organisation', u'Payments', u'TaxRates', u'TrackingCategories', u'ManualJournals') def __init__(self, credentials): # Iterate through the list of objects we support, for # each of them create an attribute on our self that is # the lowercase name of the object and attach it to an # instance of a Manager object to operate on it for name in self.OBJECT_LIST: setattr(self, name.lower(), Manager(name, credentials.oauth))
fb9591c4a2801bfe5f5380c3e33aa44a25db3591
customforms/models.py
customforms/models.py
from django.utils.translation import ugettext as _ from django.db import models class Form(models.Model): title = models.CharField(_("Title"), max_length=255) def __unicode__(self): return u'%s' % self.title class Meta: ordering = ('title', ) class Question(models.Model): form = models.ForeignKey(Form) title = models.CharField( _("Title"), max_length=255, default=_("Question Title")) help_text = models.TextField(blank=True, null=True) CHOICES = [ ('C', _('Checkbox')), ('R', _('Radio')), ('S', _('Select')), ('T', _('Text')), ] question_type = models.CharField( max_length=1, choices=CHOICES, default="T") required = models.BooleanField(default=False) position = models.PositiveIntegerField(default=0) def __unicode__(self): return u'%s' % (self.title, ) class Meta: ordering = ('form', 'position', ) class Choice(models.Model): question = models.ForeignKey(Question) title = models.CharField(max_length=200,) position = models.PositiveIntegerField(default=0) class Meta: ordering = ('position', ) def __unicode__(self): return u'%s' % (self.title, )
from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.db import models class Form(models.Model): title = models.CharField(_("Title"), max_length=255) def __unicode__(self): return u'%s' % self.title class Meta: ordering = ('title', ) def get_absolute_url(self): return reverse('customforms.views.view_form', args=[str(self.id)]) class Question(models.Model): form = models.ForeignKey(Form) title = models.CharField( _("Title"), max_length=255, default=_("Question Title")) help_text = models.TextField(blank=True, null=True) CHOICES = [ ('C', _('Checkbox')), ('R', _('Radio')), ('S', _('Select')), ('T', _('Text')), ] question_type = models.CharField( max_length=1, choices=CHOICES, default="T") required = models.BooleanField(default=False) position = models.PositiveIntegerField(default=0) def __unicode__(self): return u'%s' % (self.title, ) class Meta: ordering = ('form', 'position', ) def get_absolute_url(self): return reverse('customforms.views.view_form', args=[str(self.form.id)]) class Choice(models.Model): question = models.ForeignKey(Question) title = models.CharField(max_length=200,) position = models.PositiveIntegerField(default=0) class Meta: ordering = ('position', ) def __unicode__(self): return u'%s' % (self.title, )
Add absolute URLs to form and question admin
Add absolute URLs to form and question admin
Python
apache-2.0
cschwede/django-customforms
+ from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.db import models class Form(models.Model): title = models.CharField(_("Title"), max_length=255) def __unicode__(self): return u'%s' % self.title class Meta: ordering = ('title', ) + def get_absolute_url(self): + return reverse('customforms.views.view_form', args=[str(self.id)]) class Question(models.Model): form = models.ForeignKey(Form) title = models.CharField( _("Title"), max_length=255, default=_("Question Title")) help_text = models.TextField(blank=True, null=True) CHOICES = [ ('C', _('Checkbox')), ('R', _('Radio')), ('S', _('Select')), ('T', _('Text')), ] question_type = models.CharField( max_length=1, choices=CHOICES, default="T") required = models.BooleanField(default=False) position = models.PositiveIntegerField(default=0) def __unicode__(self): return u'%s' % (self.title, ) class Meta: ordering = ('form', 'position', ) + def get_absolute_url(self): + return reverse('customforms.views.view_form', args=[str(self.form.id)]) + class Choice(models.Model): question = models.ForeignKey(Question) title = models.CharField(max_length=200,) position = models.PositiveIntegerField(default=0) class Meta: ordering = ('position', ) def __unicode__(self): return u'%s' % (self.title, )
Add absolute URLs to form and question admin
## Code Before: from django.utils.translation import ugettext as _ from django.db import models class Form(models.Model): title = models.CharField(_("Title"), max_length=255) def __unicode__(self): return u'%s' % self.title class Meta: ordering = ('title', ) class Question(models.Model): form = models.ForeignKey(Form) title = models.CharField( _("Title"), max_length=255, default=_("Question Title")) help_text = models.TextField(blank=True, null=True) CHOICES = [ ('C', _('Checkbox')), ('R', _('Radio')), ('S', _('Select')), ('T', _('Text')), ] question_type = models.CharField( max_length=1, choices=CHOICES, default="T") required = models.BooleanField(default=False) position = models.PositiveIntegerField(default=0) def __unicode__(self): return u'%s' % (self.title, ) class Meta: ordering = ('form', 'position', ) class Choice(models.Model): question = models.ForeignKey(Question) title = models.CharField(max_length=200,) position = models.PositiveIntegerField(default=0) class Meta: ordering = ('position', ) def __unicode__(self): return u'%s' % (self.title, ) ## Instruction: Add absolute URLs to form and question admin ## Code After: from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.db import models class Form(models.Model): title = models.CharField(_("Title"), max_length=255) def __unicode__(self): return u'%s' % self.title class Meta: ordering = ('title', ) def get_absolute_url(self): return reverse('customforms.views.view_form', args=[str(self.id)]) class Question(models.Model): form = models.ForeignKey(Form) title = models.CharField( _("Title"), max_length=255, default=_("Question Title")) help_text = models.TextField(blank=True, null=True) CHOICES = [ ('C', _('Checkbox')), ('R', _('Radio')), ('S', _('Select')), ('T', _('Text')), ] question_type = models.CharField( max_length=1, choices=CHOICES, default="T") required = models.BooleanField(default=False) position = models.PositiveIntegerField(default=0) def __unicode__(self): return u'%s' % (self.title, ) class Meta: ordering = ('form', 'position', ) def get_absolute_url(self): return reverse('customforms.views.view_form', args=[str(self.form.id)]) class Choice(models.Model): question = models.ForeignKey(Question) title = models.CharField(max_length=200,) position = models.PositiveIntegerField(default=0) class Meta: ordering = ('position', ) def __unicode__(self): return u'%s' % (self.title, )
d6ff777c7fb3f645c021da1319bb5d78d13aa9db
meshnet/interface.py
meshnet/interface.py
import serial import struct from siphashc import siphash def _hash(key: str, sender: int, receiver: int, msg_type: int, data: bytes): packed_data = struct.pack(">h>hBs", sender, receiver, msg_type, data) return struct.pack("Q", siphash(key, packed_data)) class SerialMessage(object): def __init__(self): pass def serialize(self): pass class Connection(object): def __init__(self, device): self._device = device self._conn = None def connect(self): self._conn = serial.Serial(self._device, 115200)
import serial import struct from siphashc import siphash def _hash(key: bytes, sender: int, receiver: int, msg_type: int, data: bytes): packed_data = struct.pack(">hhB", sender, receiver, msg_type) + data return struct.pack(">Q", siphash(key, packed_data)) class SerialMessage(object): def __init__(self): pass def serialize(self): pass class Connection(object): def __init__(self, device): self._device = device self._conn = None def connect(self): self._conn = serial.Serial(self._device, 115200)
Fix python siphashing to match c implementation
Fix python siphashing to match c implementation Signed-off-by: Jan Losinski <577c4104c61edf9f052c616c0c23e67bef4a9955@wh2.tu-dresden.de>
Python
bsd-3-clause
janLo/automation_mesh,janLo/automation_mesh,janLo/automation_mesh
import serial import struct from siphashc import siphash + - def _hash(key: str, sender: int, receiver: int, msg_type: int, data: bytes): + def _hash(key: bytes, sender: int, receiver: int, msg_type: int, data: bytes): - packed_data = struct.pack(">h>hBs", sender, receiver, msg_type, data) + packed_data = struct.pack(">hhB", sender, receiver, msg_type) + data - return struct.pack("Q", siphash(key, packed_data)) + return struct.pack(">Q", siphash(key, packed_data)) class SerialMessage(object): def __init__(self): pass def serialize(self): pass - - class Connection(object): - def __init__(self, device): self._device = device self._conn = None def connect(self): self._conn = serial.Serial(self._device, 115200)
Fix python siphashing to match c implementation
## Code Before: import serial import struct from siphashc import siphash def _hash(key: str, sender: int, receiver: int, msg_type: int, data: bytes): packed_data = struct.pack(">h>hBs", sender, receiver, msg_type, data) return struct.pack("Q", siphash(key, packed_data)) class SerialMessage(object): def __init__(self): pass def serialize(self): pass class Connection(object): def __init__(self, device): self._device = device self._conn = None def connect(self): self._conn = serial.Serial(self._device, 115200) ## Instruction: Fix python siphashing to match c implementation ## Code After: import serial import struct from siphashc import siphash def _hash(key: bytes, sender: int, receiver: int, msg_type: int, data: bytes): packed_data = struct.pack(">hhB", sender, receiver, msg_type) + data return struct.pack(">Q", siphash(key, packed_data)) class SerialMessage(object): def __init__(self): pass def serialize(self): pass class Connection(object): def __init__(self, device): self._device = device self._conn = None def connect(self): self._conn = serial.Serial(self._device, 115200)
b2bab786c4af3dcca7d35b1e6ecff8699e542ec4
pytest_girder/pytest_girder/plugin.py
pytest_girder/pytest_girder/plugin.py
from .fixtures import * # noqa def pytest_addoption(parser): group = parser.getgroup('girder') group.addoption('--mock-db', action='store_true', default=False, help='Whether or not to mock the database using mongomock.') group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017', help=('The base URI to the MongoDB instance to use for database connections, ' 'default is mongodb://localhost:27017')) group.addoption('--drop-db', action='store', default='both', choices=('both', 'pre', 'post', 'never'), help='When to destroy testing databases, default is both ' '(before and after running tests)')
import os from .fixtures import * # noqa def pytest_configure(config): """ Create the necessary directories for coverage. This is necessary because neither coverage nor pytest-cov have support for making the data_file directory before running. """ covPlugin = config.pluginmanager.get_plugin('_cov') if covPlugin is not None: covPluginConfig = covPlugin.cov_controller.cov.config covDataFileDir = os.path.dirname(covPluginConfig.data_file) try: os.makedirs(covDataFileDir) except OSError: pass def pytest_addoption(parser): group = parser.getgroup('girder') group.addoption('--mock-db', action='store_true', default=False, help='Whether or not to mock the database using mongomock.') group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017', help=('The base URI to the MongoDB instance to use for database connections, ' 'default is mongodb://localhost:27017')) group.addoption('--drop-db', action='store', default='both', choices=('both', 'pre', 'post', 'never'), help='When to destroy testing databases, default is both ' '(before and after running tests)')
Add a pytest hook for creating the coverage data_file directory
Add a pytest hook for creating the coverage data_file directory
Python
apache-2.0
jbeezley/girder,jbeezley/girder,girder/girder,kotfic/girder,jbeezley/girder,data-exp-lab/girder,Xarthisius/girder,data-exp-lab/girder,girder/girder,RafaelPalomar/girder,jbeezley/girder,girder/girder,kotfic/girder,manthey/girder,kotfic/girder,girder/girder,RafaelPalomar/girder,Xarthisius/girder,RafaelPalomar/girder,Xarthisius/girder,data-exp-lab/girder,manthey/girder,manthey/girder,RafaelPalomar/girder,data-exp-lab/girder,RafaelPalomar/girder,Kitware/girder,manthey/girder,data-exp-lab/girder,Xarthisius/girder,Kitware/girder,Xarthisius/girder,kotfic/girder,Kitware/girder,kotfic/girder,Kitware/girder
+ import os from .fixtures import * # noqa + + + def pytest_configure(config): + """ + Create the necessary directories for coverage. This is necessary because neither coverage nor + pytest-cov have support for making the data_file directory before running. + """ + covPlugin = config.pluginmanager.get_plugin('_cov') + + if covPlugin is not None: + covPluginConfig = covPlugin.cov_controller.cov.config + covDataFileDir = os.path.dirname(covPluginConfig.data_file) + + try: + os.makedirs(covDataFileDir) + except OSError: + pass def pytest_addoption(parser): group = parser.getgroup('girder') group.addoption('--mock-db', action='store_true', default=False, help='Whether or not to mock the database using mongomock.') group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017', help=('The base URI to the MongoDB instance to use for database connections, ' 'default is mongodb://localhost:27017')) group.addoption('--drop-db', action='store', default='both', choices=('both', 'pre', 'post', 'never'), help='When to destroy testing databases, default is both ' '(before and after running tests)')
Add a pytest hook for creating the coverage data_file directory
## Code Before: from .fixtures import * # noqa def pytest_addoption(parser): group = parser.getgroup('girder') group.addoption('--mock-db', action='store_true', default=False, help='Whether or not to mock the database using mongomock.') group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017', help=('The base URI to the MongoDB instance to use for database connections, ' 'default is mongodb://localhost:27017')) group.addoption('--drop-db', action='store', default='both', choices=('both', 'pre', 'post', 'never'), help='When to destroy testing databases, default is both ' '(before and after running tests)') ## Instruction: Add a pytest hook for creating the coverage data_file directory ## Code After: import os from .fixtures import * # noqa def pytest_configure(config): """ Create the necessary directories for coverage. This is necessary because neither coverage nor pytest-cov have support for making the data_file directory before running. """ covPlugin = config.pluginmanager.get_plugin('_cov') if covPlugin is not None: covPluginConfig = covPlugin.cov_controller.cov.config covDataFileDir = os.path.dirname(covPluginConfig.data_file) try: os.makedirs(covDataFileDir) except OSError: pass def pytest_addoption(parser): group = parser.getgroup('girder') group.addoption('--mock-db', action='store_true', default=False, help='Whether or not to mock the database using mongomock.') group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017', help=('The base URI to the MongoDB instance to use for database connections, ' 'default is mongodb://localhost:27017')) group.addoption('--drop-db', action='store', default='both', choices=('both', 'pre', 'post', 'never'), help='When to destroy testing databases, default is both ' '(before and after running tests)')
931e2d1e8ba3fd6b129a6d74e3a1ad9984c1938a
benchmarks/benchmarks/bench_random.py
benchmarks/benchmarks/bench_random.py
from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a)
from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np from numpy.lib import NumpyVersion class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a) class Randint(Benchmark): def time_randint_fast(self): """Compare to uint32 below""" np.random.randint(0, 2**30, size=10**5) def time_randint_slow(self): """Compare to uint32 below""" np.random.randint(0, 2**30 + 1, size=10**5) class Randint_dtype(Benchmark): high = { 'bool': 1, 'uint8': 2**7, 'uint16': 2**15, 'uint32': 2**31, 'uint64': 2**63 } param_names = ['dtype'] params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64'] def setup(self, name): if NumpyVersion(np.__version__) < '1.11.0.dev0': raise NotImplementedError def time_randint_fast(self, name): high = self.high[name] np.random.randint(0, high, size=10**5, dtype=name) def time_randint_slow(self, name): high = self.high[name] np.random.randint(0, high + 1, size=10**5, dtype=name)
Add benchmark tests for numpy.random.randint.
ENH: Add benchmark tests for numpy.random.randint. This add benchmarks randint. There is one set of benchmarks for the default dtype, 'l', that can be tracked back, and another set for the new dtypes 'bool', 'uint8', 'uint16', 'uint32', and 'uint64'.
Python
bsd-3-clause
shoyer/numpy,Dapid/numpy,jakirkham/numpy,WarrenWeckesser/numpy,chatcannon/numpy,WarrenWeckesser/numpy,b-carter/numpy,anntzer/numpy,ssanderson/numpy,simongibbons/numpy,nbeaver/numpy,SiccarPoint/numpy,numpy/numpy,Eric89GXL/numpy,kiwifb/numpy,seberg/numpy,rgommers/numpy,ESSS/numpy,shoyer/numpy,anntzer/numpy,utke1/numpy,dwillmer/numpy,grlee77/numpy,ddasilva/numpy,charris/numpy,tacaswell/numpy,simongibbons/numpy,endolith/numpy,solarjoe/numpy,numpy/numpy,WarrenWeckesser/numpy,stuarteberg/numpy,SiccarPoint/numpy,mhvk/numpy,ahaldane/numpy,rgommers/numpy,bringingheavendown/numpy,anntzer/numpy,ContinuumIO/numpy,Eric89GXL/numpy,kiwifb/numpy,bringingheavendown/numpy,MSeifert04/numpy,solarjoe/numpy,ahaldane/numpy,jakirkham/numpy,maniteja123/numpy,anntzer/numpy,ssanderson/numpy,tacaswell/numpy,WarrenWeckesser/numpy,ContinuumIO/numpy,maniteja123/numpy,njase/numpy,jakirkham/numpy,maniteja123/numpy,drasmuss/numpy,tynn/numpy,shoyer/numpy,endolith/numpy,madphysicist/numpy,stuarteberg/numpy,madphysicist/numpy,jakirkham/numpy,abalkin/numpy,Dapid/numpy,pbrod/numpy,ContinuumIO/numpy,pdebuyl/numpy,pbrod/numpy,mattip/numpy,gmcastil/numpy,rherault-insa/numpy,stuarteberg/numpy,ESSS/numpy,njase/numpy,jonathanunderwood/numpy,jorisvandenbossche/numpy,gfyoung/numpy,b-carter/numpy,jorisvandenbossche/numpy,grlee77/numpy,jonathanunderwood/numpy,pizzathief/numpy,seberg/numpy,drasmuss/numpy,skwbc/numpy,skwbc/numpy,grlee77/numpy,Eric89GXL/numpy,AustereCuriosity/numpy,gfyoung/numpy,SiccarPoint/numpy,pbrod/numpy,rherault-insa/numpy,dwillmer/numpy,ddasilva/numpy,charris/numpy,simongibbons/numpy,chiffa/numpy,chatcannon/numpy,simongibbons/numpy,argriffing/numpy,mhvk/numpy,shoyer/numpy,njase/numpy,grlee77/numpy,pbrod/numpy,WarrenWeckesser/numpy,pizzathief/numpy,pizzathief/numpy,SiccarPoint/numpy,dwillmer/numpy,MSeifert04/numpy,MSeifert04/numpy,seberg/numpy,joferkington/numpy,MSeifert04/numpy,skwbc/numpy,joferkington/numpy,nbeaver/numpy,pdebuyl/numpy,abalkin/numpy,bertrand-l/numpy,madphysicist/numpy,pdebuyl/numpy,bertrand-l/numpy,rherault-insa/numpy,rgommers/numpy,gmcastil/numpy,dwillmer/numpy,tacaswell/numpy,drasmuss/numpy,seberg/numpy,chiffa/numpy,jakirkham/numpy,endolith/numpy,pbrod/numpy,mhvk/numpy,pdebuyl/numpy,mhvk/numpy,charris/numpy,argriffing/numpy,gfyoung/numpy,chatcannon/numpy,pizzathief/numpy,AustereCuriosity/numpy,stuarteberg/numpy,charris/numpy,MSeifert04/numpy,bringingheavendown/numpy,joferkington/numpy,shoyer/numpy,numpy/numpy,jorisvandenbossche/numpy,Dapid/numpy,simongibbons/numpy,mhvk/numpy,mattip/numpy,jorisvandenbossche/numpy,endolith/numpy,ESSS/numpy,behzadnouri/numpy,chiffa/numpy,kiwifb/numpy,argriffing/numpy,jorisvandenbossche/numpy,joferkington/numpy,behzadnouri/numpy,AustereCuriosity/numpy,utke1/numpy,tynn/numpy,grlee77/numpy,ssanderson/numpy,behzadnouri/numpy,madphysicist/numpy,mattip/numpy,Eric89GXL/numpy,ahaldane/numpy,jonathanunderwood/numpy,abalkin/numpy,ahaldane/numpy,madphysicist/numpy,solarjoe/numpy,utke1/numpy,gmcastil/numpy,ddasilva/numpy,numpy/numpy,tynn/numpy,b-carter/numpy,pizzathief/numpy,mattip/numpy,ahaldane/numpy,bertrand-l/numpy,rgommers/numpy,nbeaver/numpy
from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np + from numpy.lib import NumpyVersion class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a) + + class Randint(Benchmark): + + def time_randint_fast(self): + """Compare to uint32 below""" + np.random.randint(0, 2**30, size=10**5) + + def time_randint_slow(self): + """Compare to uint32 below""" + np.random.randint(0, 2**30 + 1, size=10**5) + + + class Randint_dtype(Benchmark): + high = { + 'bool': 1, + 'uint8': 2**7, + 'uint16': 2**15, + 'uint32': 2**31, + 'uint64': 2**63 + } + + param_names = ['dtype'] + params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64'] + + def setup(self, name): + if NumpyVersion(np.__version__) < '1.11.0.dev0': + raise NotImplementedError + + def time_randint_fast(self, name): + high = self.high[name] + np.random.randint(0, high, size=10**5, dtype=name) + + def time_randint_slow(self, name): + high = self.high[name] + np.random.randint(0, high + 1, size=10**5, dtype=name) + +
Add benchmark tests for numpy.random.randint.
## Code Before: from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a) ## Instruction: Add benchmark tests for numpy.random.randint. ## Code After: from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np from numpy.lib import NumpyVersion class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a) class Randint(Benchmark): def time_randint_fast(self): """Compare to uint32 below""" np.random.randint(0, 2**30, size=10**5) def time_randint_slow(self): """Compare to uint32 below""" np.random.randint(0, 2**30 + 1, size=10**5) class Randint_dtype(Benchmark): high = { 'bool': 1, 'uint8': 2**7, 'uint16': 2**15, 'uint32': 2**31, 'uint64': 2**63 } param_names = ['dtype'] params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64'] def setup(self, name): if NumpyVersion(np.__version__) < '1.11.0.dev0': raise NotImplementedError def time_randint_fast(self, name): high = self.high[name] np.random.randint(0, high, size=10**5, dtype=name) def time_randint_slow(self, name): high = self.high[name] np.random.randint(0, high + 1, size=10**5, dtype=name)
ca8e15d50b816c29fc2a0df27d0266826e38b5b8
cellcounter/statistics/serializers.py
cellcounter/statistics/serializers.py
from rest_framework.serializers import ModelSerializer from .models import CountInstance class CountInstanceSerializer(ModelSerializer): class Meta: model = CountInstance
from rest_framework.serializers import ModelSerializer from .models import CountInstance class CountInstanceSerializer(ModelSerializer): class Meta: model = CountInstance fields = ('count_total',)
Update serializer to deal with new model
Update serializer to deal with new model
Python
mit
cellcounter/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter
from rest_framework.serializers import ModelSerializer from .models import CountInstance class CountInstanceSerializer(ModelSerializer): class Meta: model = CountInstance + fields = ('count_total',)
Update serializer to deal with new model
## Code Before: from rest_framework.serializers import ModelSerializer from .models import CountInstance class CountInstanceSerializer(ModelSerializer): class Meta: model = CountInstance ## Instruction: Update serializer to deal with new model ## Code After: from rest_framework.serializers import ModelSerializer from .models import CountInstance class CountInstanceSerializer(ModelSerializer): class Meta: model = CountInstance fields = ('count_total',)
c1b433e5ed4c06b956b4d27f6da4e8b1dab54aaf
services/cloudwatch/sample.py
services/cloudwatch/sample.py
''' =================================== Boto 3 - CloudWatch Service Example =================================== This application implements the CloudWatch service that lets you gets information from Amazon Cloud Watch. See the README for more details. ''' import boto3 ''' Define your AWS credentials: ''' AWS_ACCESS_KEY_ID = 'AKIAJM7BQ4WBJJSVU2JQ' AWS_SECRET_ACCESS_KEY = 'Fq9GmwWEsvbcdHuh4McD+ZUmfowPKrnzFmhczV2U' ''' Connection to AWS. ''' client = boto3.client('cloudwatch', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) # Main program: if __name__ == '__main__': print_results()
''' =================================== Boto 3 - CloudWatch Service Example =================================== This application implements the CloudWatch service that lets you gets information from Amazon Cloud Watch. See the README for more details. ''' import boto3 ''' Define your AWS credentials: ''' AWS_ACCESS_KEY_ID = '<YOUR ACCESS KEY ID>' AWS_SECRET_ACCESS_KEY = '<YOUR SECRET ACCESS KEY>' ''' Connection to AWS. ''' client = boto3.client('cloudwatch', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) # Main program: if __name__ == '__main__': print_results()
Fix issue in cloudwacth service credentials
Fix issue in cloudwacth service credentials
Python
mit
rolandovillca/aws_samples_boto3_sdk
''' =================================== Boto 3 - CloudWatch Service Example =================================== This application implements the CloudWatch service that lets you gets information from Amazon Cloud Watch. See the README for more details. ''' import boto3 ''' Define your AWS credentials: ''' - AWS_ACCESS_KEY_ID = 'AKIAJM7BQ4WBJJSVU2JQ' - AWS_SECRET_ACCESS_KEY = 'Fq9GmwWEsvbcdHuh4McD+ZUmfowPKrnzFmhczV2U' + AWS_ACCESS_KEY_ID = '<YOUR ACCESS KEY ID>' + AWS_SECRET_ACCESS_KEY = '<YOUR SECRET ACCESS KEY>' ''' Connection to AWS. ''' client = boto3.client('cloudwatch', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) # Main program: if __name__ == '__main__': print_results()
Fix issue in cloudwacth service credentials
## Code Before: ''' =================================== Boto 3 - CloudWatch Service Example =================================== This application implements the CloudWatch service that lets you gets information from Amazon Cloud Watch. See the README for more details. ''' import boto3 ''' Define your AWS credentials: ''' AWS_ACCESS_KEY_ID = 'AKIAJM7BQ4WBJJSVU2JQ' AWS_SECRET_ACCESS_KEY = 'Fq9GmwWEsvbcdHuh4McD+ZUmfowPKrnzFmhczV2U' ''' Connection to AWS. ''' client = boto3.client('cloudwatch', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) # Main program: if __name__ == '__main__': print_results() ## Instruction: Fix issue in cloudwacth service credentials ## Code After: ''' =================================== Boto 3 - CloudWatch Service Example =================================== This application implements the CloudWatch service that lets you gets information from Amazon Cloud Watch. See the README for more details. ''' import boto3 ''' Define your AWS credentials: ''' AWS_ACCESS_KEY_ID = '<YOUR ACCESS KEY ID>' AWS_SECRET_ACCESS_KEY = '<YOUR SECRET ACCESS KEY>' ''' Connection to AWS. ''' client = boto3.client('cloudwatch', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) # Main program: if __name__ == '__main__': print_results()
6f7dba3beccca655b84879ccd0f3071d15536b2f
test/utils.py
test/utils.py
import string import random def generate_string(str_len=6, src=string.ascii_lowercase): return "".join(random.choice(src) for x in xrange(str_len)) def lorem_ipsum(): words_count = random.randint(20, 50) lorem = list([]) for i in xrange(words_count): word_length = random.randint(4, 8) lorem.append(generate_string(str_len=word_length)) return " ".join(lorem)
import string import random def generate_string(str_len=6, src=string.ascii_lowercase): return "".join(random.choice(src) for x in xrange(str_len)) def lorem_ipsum(words_count=30): lorem = list([]) for i in xrange(words_count): word_length = random.randint(4, 8) lorem.append(generate_string(str_len=word_length)) return " ".join(lorem)
Add word_count parameter for lorem_ipsum generator
Add word_count parameter for lorem_ipsum generator
Python
mit
sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/Rynda
import string import random + def generate_string(str_len=6, src=string.ascii_lowercase): return "".join(random.choice(src) for x in xrange(str_len)) - def lorem_ipsum(): - words_count = random.randint(20, 50) + + def lorem_ipsum(words_count=30): lorem = list([]) for i in xrange(words_count): word_length = random.randint(4, 8) lorem.append(generate_string(str_len=word_length)) return " ".join(lorem)
Add word_count parameter for lorem_ipsum generator
## Code Before: import string import random def generate_string(str_len=6, src=string.ascii_lowercase): return "".join(random.choice(src) for x in xrange(str_len)) def lorem_ipsum(): words_count = random.randint(20, 50) lorem = list([]) for i in xrange(words_count): word_length = random.randint(4, 8) lorem.append(generate_string(str_len=word_length)) return " ".join(lorem) ## Instruction: Add word_count parameter for lorem_ipsum generator ## Code After: import string import random def generate_string(str_len=6, src=string.ascii_lowercase): return "".join(random.choice(src) for x in xrange(str_len)) def lorem_ipsum(words_count=30): lorem = list([]) for i in xrange(words_count): word_length = random.randint(4, 8) lorem.append(generate_string(str_len=word_length)) return " ".join(lorem)
d80f7a89b5bc23802ad5ec9bb8cc6ad523976718
test_gitnl.py
test_gitnl.py
from __future__ import print_function, division, absolute_import import unittest import gitnl class GitnlTestCase(unittest.TestCase): """Tests from 'gitnl.py'.""" def test_push_remotename_branchfrom(self): desired = 'push remotename branchfrom' actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename') self.assertEqual(actual, desired) if __name__ == '__main__': unittest.main()
from __future__ import print_function, division, absolute_import import unittest import gitnl class GitnlTestCase(unittest.TestCase): """Tests from 'gitnl.py'.""" def test_push_remotename_branchfrom(self): desired = 'push remotename branchfrom' actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename') self.assertEqual(actual, desired) def test_rename_branch(self): desired = 'branch -m old_branch new_branch' actual = gitnl.parse_to_git('branch rename branch old_branch to new_branch') self.assertEqual(actual, desired) if __name__ == '__main__': unittest.main()
Add rename branch locally test
Add rename branch locally test
Python
mit
eteq/gitnl,eteq/gitnl
from __future__ import print_function, division, absolute_import import unittest import gitnl class GitnlTestCase(unittest.TestCase): """Tests from 'gitnl.py'.""" def test_push_remotename_branchfrom(self): desired = 'push remotename branchfrom' actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename') self.assertEqual(actual, desired) + def test_rename_branch(self): + desired = 'branch -m old_branch new_branch' + actual = gitnl.parse_to_git('branch rename branch old_branch to new_branch') + self.assertEqual(actual, desired) + if __name__ == '__main__': unittest.main()
Add rename branch locally test
## Code Before: from __future__ import print_function, division, absolute_import import unittest import gitnl class GitnlTestCase(unittest.TestCase): """Tests from 'gitnl.py'.""" def test_push_remotename_branchfrom(self): desired = 'push remotename branchfrom' actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename') self.assertEqual(actual, desired) if __name__ == '__main__': unittest.main() ## Instruction: Add rename branch locally test ## Code After: from __future__ import print_function, division, absolute_import import unittest import gitnl class GitnlTestCase(unittest.TestCase): """Tests from 'gitnl.py'.""" def test_push_remotename_branchfrom(self): desired = 'push remotename branchfrom' actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename') self.assertEqual(actual, desired) def test_rename_branch(self): desired = 'branch -m old_branch new_branch' actual = gitnl.parse_to_git('branch rename branch old_branch to new_branch') self.assertEqual(actual, desired) if __name__ == '__main__': unittest.main()
fb213097e838ddfa40d9f71f1705d7af661cfbdf
tests/unit.py
tests/unit.py
import unittest from github2.issues import Issue from github2.client import Github class ReprTests(unittest.TestCase): """__repr__ must return strings, not unicode objects.""" def test_issue(self): """Issues can have non-ASCII characters in the title.""" i = Issue(title=u'abcdé') self.assertEqual(str, type(repr(i))) class RateLimits(unittest.TestCase): """ How should we handle actual API calls such that tests can run? Perhaps the library should support a ~/.python_github2.conf from which to get the auth? """ def test_delays(self): import datetime USERNAME = '' API_KEY = '' client = Github(username=USERNAME, api_token=API_KEY, requests_per_second=.5) client.users.show('defunkt') start = datetime.datetime.now() client.users.show('mojombo') end = datetime.datetime.now() self.assertGreaterEqual((end - start).total_seconds(), 2.0, "Expected .5 reqs per second to require a 2 second delay between " "calls.")
import unittest from github2.issues import Issue from github2.client import Github class ReprTests(unittest.TestCase): """__repr__ must return strings, not unicode objects.""" def test_issue(self): """Issues can have non-ASCII characters in the title.""" i = Issue(title=u'abcdé') self.assertEqual(str, type(repr(i))) class RateLimits(unittest.TestCase): """ How should we handle actual API calls such that tests can run? Perhaps the library should support a ~/.python_github2.conf from which to get the auth? """ def test_delays(self): import datetime USERNAME = '' API_KEY = '' client = Github(username=USERNAME, api_token=API_KEY, requests_per_second=.5) client.users.show('defunkt') start = datetime.datetime.now() client.users.show('mojombo') end = datetime.datetime.now() delta = end - start delta_seconds = delta.days * 24 * 60 * 60 + delta.seconds self.assertTrue(delta_seconds >= 2, "Expected .5 reqs per second to require a 2 second delay between " "calls.")
Allow tests to be run with Python <2.6.
Allow tests to be run with Python <2.6.
Python
bsd-3-clause
ask/python-github2
import unittest from github2.issues import Issue from github2.client import Github class ReprTests(unittest.TestCase): """__repr__ must return strings, not unicode objects.""" def test_issue(self): """Issues can have non-ASCII characters in the title.""" i = Issue(title=u'abcdé') self.assertEqual(str, type(repr(i))) class RateLimits(unittest.TestCase): """ How should we handle actual API calls such that tests can run? Perhaps the library should support a ~/.python_github2.conf from which to get the auth? """ def test_delays(self): import datetime USERNAME = '' API_KEY = '' client = Github(username=USERNAME, api_token=API_KEY, requests_per_second=.5) client.users.show('defunkt') start = datetime.datetime.now() client.users.show('mojombo') end = datetime.datetime.now() - self.assertGreaterEqual((end - start).total_seconds(), 2.0, + + delta = end - start + delta_seconds = delta.days * 24 * 60 * 60 + delta.seconds + + self.assertTrue(delta_seconds >= 2, "Expected .5 reqs per second to require a 2 second delay between " "calls.")
Allow tests to be run with Python <2.6.
## Code Before: import unittest from github2.issues import Issue from github2.client import Github class ReprTests(unittest.TestCase): """__repr__ must return strings, not unicode objects.""" def test_issue(self): """Issues can have non-ASCII characters in the title.""" i = Issue(title=u'abcdé') self.assertEqual(str, type(repr(i))) class RateLimits(unittest.TestCase): """ How should we handle actual API calls such that tests can run? Perhaps the library should support a ~/.python_github2.conf from which to get the auth? """ def test_delays(self): import datetime USERNAME = '' API_KEY = '' client = Github(username=USERNAME, api_token=API_KEY, requests_per_second=.5) client.users.show('defunkt') start = datetime.datetime.now() client.users.show('mojombo') end = datetime.datetime.now() self.assertGreaterEqual((end - start).total_seconds(), 2.0, "Expected .5 reqs per second to require a 2 second delay between " "calls.") ## Instruction: Allow tests to be run with Python <2.6. ## Code After: import unittest from github2.issues import Issue from github2.client import Github class ReprTests(unittest.TestCase): """__repr__ must return strings, not unicode objects.""" def test_issue(self): """Issues can have non-ASCII characters in the title.""" i = Issue(title=u'abcdé') self.assertEqual(str, type(repr(i))) class RateLimits(unittest.TestCase): """ How should we handle actual API calls such that tests can run? Perhaps the library should support a ~/.python_github2.conf from which to get the auth? """ def test_delays(self): import datetime USERNAME = '' API_KEY = '' client = Github(username=USERNAME, api_token=API_KEY, requests_per_second=.5) client.users.show('defunkt') start = datetime.datetime.now() client.users.show('mojombo') end = datetime.datetime.now() delta = end - start delta_seconds = delta.days * 24 * 60 * 60 + delta.seconds self.assertTrue(delta_seconds >= 2, "Expected .5 reqs per second to require a 2 second delay between " "calls.")
15996286496d913c25290362ba2dba2d349bd5f6
imageManagerUtils/settings.py
imageManagerUtils/settings.py
import os import sys import subprocess # This path is the location of the caller script MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0])) # Set up the path to settings.sh settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh') if not os.path.isfile(settings_path): print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH) exit(1) # This is a tricky way to read bash envs in the script env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True) # Transform to list of python strings (utf-8 encodings) env_str = env_str.decode('utf-8').split('\n') # Transform from a list to a list of pairs and filter out invalid formats env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2] # Transform from a list to a dictionary env_dict = {kv[0]: kv[1] for kv in env_list} # Update the os.environ globally os.environ.update(env_dict)
import os import sys import subprocess # This path is the location of the caller script MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0])) # Set up the path to settings.sh settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh') if not os.path.isfile(settings_path): print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH) exit(1) # This is a tricky way to read bash envs in the script env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True, executable='/bin/bash') # Transform to list of python strings (utf-8 encodings) env_str = env_str.decode('utf-8').split('\n') # Transform from a list to a list of pairs and filter out invalid formats env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2] # Transform from a list to a dictionary env_dict = {kv[0]: kv[1] for kv in env_list} # Update the os.environ globally os.environ.update(env_dict)
Fix bug of invoking /bin/sh on several OSs
Fix bug of invoking /bin/sh on several OSs
Python
mit
snippits/qemu_image,snippits/qemu_image,snippits/qemu_image
import os import sys import subprocess # This path is the location of the caller script MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0])) # Set up the path to settings.sh settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh') if not os.path.isfile(settings_path): print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH) exit(1) # This is a tricky way to read bash envs in the script - env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True) + env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True, executable='/bin/bash') # Transform to list of python strings (utf-8 encodings) env_str = env_str.decode('utf-8').split('\n') # Transform from a list to a list of pairs and filter out invalid formats env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2] # Transform from a list to a dictionary env_dict = {kv[0]: kv[1] for kv in env_list} # Update the os.environ globally os.environ.update(env_dict)
Fix bug of invoking /bin/sh on several OSs
## Code Before: import os import sys import subprocess # This path is the location of the caller script MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0])) # Set up the path to settings.sh settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh') if not os.path.isfile(settings_path): print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH) exit(1) # This is a tricky way to read bash envs in the script env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True) # Transform to list of python strings (utf-8 encodings) env_str = env_str.decode('utf-8').split('\n') # Transform from a list to a list of pairs and filter out invalid formats env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2] # Transform from a list to a dictionary env_dict = {kv[0]: kv[1] for kv in env_list} # Update the os.environ globally os.environ.update(env_dict) ## Instruction: Fix bug of invoking /bin/sh on several OSs ## Code After: import os import sys import subprocess # This path is the location of the caller script MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0])) # Set up the path to settings.sh settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh') if not os.path.isfile(settings_path): print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH) exit(1) # This is a tricky way to read bash envs in the script env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True, executable='/bin/bash') # Transform to list of python strings (utf-8 encodings) env_str = env_str.decode('utf-8').split('\n') # Transform from a list to a list of pairs and filter out invalid formats env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2] # Transform from a list to a dictionary env_dict = {kv[0]: kv[1] for kv in env_list} # Update the os.environ globally os.environ.update(env_dict)
59b015bb3e45497b7ec86bf1799e8442a30b65da
py/PMUtil.py
py/PMUtil.py
from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush()
from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush() def exitScript(num=1): '''Exit script''' sys.exit(num)
Exit method. - (New) Added exit method.
Exit method. - (New) Added exit method.
Python
mit
dacuevas/PMAnalyzer,dacuevas/PMAnalyzer,dacuevas/PMAnalyzer,dacuevas/PMAnalyzer
from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush() + + def exitScript(num=1): + '''Exit script''' + sys.exit(num) +
Exit method. - (New) Added exit method.
## Code Before: from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush() ## Instruction: Exit method. - (New) Added exit method. ## Code After: from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush() def exitScript(num=1): '''Exit script''' sys.exit(num)
764f8d9d7818076555cde5fcad29f3052b523771
company/autocomplete_light_registry.py
company/autocomplete_light_registry.py
import autocomplete_light from .models import Company class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase): search_fields = ['^name'] model = Company autocomplete_light.register(CompanyAutocomplete)
import autocomplete_light from .models import Company class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase): search_fields = ['name', 'official_name', 'common_name'] model = Company autocomplete_light.register(CompanyAutocomplete)
Add more search fields to autocomplete
Add more search fields to autocomplete
Python
bsd-3-clause
KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend
import autocomplete_light from .models import Company class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase): - search_fields = ['^name'] + search_fields = ['name', 'official_name', 'common_name'] model = Company autocomplete_light.register(CompanyAutocomplete)
Add more search fields to autocomplete
## Code Before: import autocomplete_light from .models import Company class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase): search_fields = ['^name'] model = Company autocomplete_light.register(CompanyAutocomplete) ## Instruction: Add more search fields to autocomplete ## Code After: import autocomplete_light from .models import Company class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase): search_fields = ['name', 'official_name', 'common_name'] model = Company autocomplete_light.register(CompanyAutocomplete)
6f822cf46957d038588e7a71eb91f8ca9f9c95f1
scaffolder/commands/install.py
scaffolder/commands/install.py
from optparse import make_option from optparse import OptionParser from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default='~/.cookiejar', help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
from optparse import make_option from optparse import OptionParser from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
Use get_minion_path to get default dir.
InstallCommand: Use get_minion_path to get default dir.
Python
mit
goliatone/minions
from optparse import make_option from optparse import OptionParser + from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", - default='~/.cookiejar', + default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
Use get_minion_path to get default dir.
## Code Before: from optparse import make_option from optparse import OptionParser from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default='~/.cookiejar', help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt) ## Instruction: Use get_minion_path to get default dir. ## Code After: from optparse import make_option from optparse import OptionParser from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
95d9bb3a9500d80b5064c5fb4d5bd7b30406d1ae
conanfile.py
conanfile.py
from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.17.2@inexorgame/stable", generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing build_requires = "PremakeGen/0.1@memsharded/testing" exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.44.0@", generators = "cmake", "premake" # The builtin premake generator exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
Fix update remote to ConanCenter and grpc to highest buildable/supported version
Fix update remote to ConanCenter and grpc to highest buildable/supported version
Python
apache-2.0
jinq0123/grpc_cb_core,jinq0123/grpc_cb_core,jinq0123/grpc_cb_core
from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" - requires = "grpc/1.17.2@inexorgame/stable", + requires = "grpc/1.44.0@", + generators = "cmake", "premake" # The builtin premake generator - generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing - build_requires = "PremakeGen/0.1@memsharded/testing" exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
Fix update remote to ConanCenter and grpc to highest buildable/supported version
## Code Before: from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.17.2@inexorgame/stable", generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing build_requires = "PremakeGen/0.1@memsharded/testing" exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"] ## Instruction: Fix update remote to ConanCenter and grpc to highest buildable/supported version ## Code After: from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.44.0@", generators = "cmake", "premake" # The builtin premake generator exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
306e6939c5b369f4a4ef4bb4d16948dc1f027f53
tests/test_initial_ismaster.py
tests/test_initial_ismaster.py
import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertIsNone(client.address) server.receives('ismaster').ok() wait_until(lambda: client.address is not None, 'update address', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok() self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertFalse(client.nodes) server.receives('ismaster').ok(ismaster=True) wait_until(lambda: client.nodes, 'update nodes', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok(ismaster=True) self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
Update for PYTHON 985: MongoClient properties now block until connected.
Update for PYTHON 985: MongoClient properties now block until connected.
Python
apache-2.0
ajdavis/pymongo-mockup-tests
import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. - self.assertIsNone(client.address) + self.assertFalse(client.nodes) - server.receives('ismaster').ok() + server.receives('ismaster').ok(ismaster=True) - wait_until(lambda: client.address is not None, + wait_until(lambda: client.nodes, - 'update address', timeout=1) + 'update nodes', timeout=1) # At least 10 seconds before next heartbeat. - server.receives('ismaster').ok() + server.receives('ismaster').ok(ismaster=True) self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
Update for PYTHON 985: MongoClient properties now block until connected.
## Code Before: import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertIsNone(client.address) server.receives('ismaster').ok() wait_until(lambda: client.address is not None, 'update address', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok() self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main() ## Instruction: Update for PYTHON 985: MongoClient properties now block until connected. ## Code After: import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertFalse(client.nodes) server.receives('ismaster').ok(ismaster=True) wait_until(lambda: client.nodes, 'update nodes', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok(ismaster=True) self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
af5e90cb544e2e37819302f5750084fc17f7ee12
make_example.py
make_example.py
import os import sys import yaml import subprocess class SDBUSPlus(object): def __init__(self, path): self.path = path def __call__(self, *a, **kw): args = [ os.path.join(self.path, 'sdbus++'), '-t', os.path.join(self.path, 'templates') ] subprocess.call(args + list(a), **kw) if __name__ == '__main__': sdbusplus = None for p in os.environ.get('PATH', "").split(os.pathsep): if os.path.exists(os.path.join(p, 'sdbus++')): sdbusplus = SDBUSPlus(p) break if sdbusplus is None: sys.stderr.write('Cannot find sdbus++\n') sys.exit(1) genfiles = { 'server-cpp': lambda x: '%s.cpp' % x, 'server-header': lambda x: os.path.join( os.path.join(*x.split('.')), 'server.hpp') } with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd: interfaces = yaml.load(fd.read()) for i in interfaces: for process, f in genfiles.iteritems(): dest = f(i) parent = os.path.dirname(dest) if parent and not os.path.exists(parent): os.makedirs(parent) with open(dest, 'w') as fd: sdbusplus( '-r', os.path.join('example', 'interfaces'), 'interface', process, i, stdout=fd) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
import os import sys import yaml import subprocess if __name__ == '__main__': genfiles = { 'server-cpp': lambda x: '%s.cpp' % x, 'server-header': lambda x: os.path.join( os.path.join(*x.split('.')), 'server.hpp') } with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd: interfaces = yaml.load(fd.read()) for i in interfaces: for process, f in genfiles.iteritems(): dest = f(i) parent = os.path.dirname(dest) if parent and not os.path.exists(parent): os.makedirs(parent) with open(dest, 'w') as fd: subprocess.call([ 'sdbus++', '-r', os.path.join('example', 'interfaces'), 'interface', process, i], stdout=fd) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Remove sdbus++ template search workaround
Remove sdbus++ template search workaround sdbus++ was fixed upstream to find its templates automatically. Change-Id: I29020b9d1ea4ae8baaca5fe869625a3d96cd6eaf Signed-off-by: Brad Bishop <713d098c0be4c8fd2bf36a94cd08699466677ecd@fuzziesquirrel.com>
Python
apache-2.0
openbmc/phosphor-inventory-manager,openbmc/phosphor-inventory-manager
import os import sys import yaml import subprocess - class SDBUSPlus(object): - def __init__(self, path): - self.path = path - - def __call__(self, *a, **kw): - args = [ - os.path.join(self.path, 'sdbus++'), - '-t', - os.path.join(self.path, 'templates') - ] - - subprocess.call(args + list(a), **kw) - - if __name__ == '__main__': - sdbusplus = None - for p in os.environ.get('PATH', "").split(os.pathsep): - if os.path.exists(os.path.join(p, 'sdbus++')): - sdbusplus = SDBUSPlus(p) - break - - if sdbusplus is None: - sys.stderr.write('Cannot find sdbus++\n') - sys.exit(1) - genfiles = { 'server-cpp': lambda x: '%s.cpp' % x, 'server-header': lambda x: os.path.join( os.path.join(*x.split('.')), 'server.hpp') } with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd: interfaces = yaml.load(fd.read()) for i in interfaces: for process, f in genfiles.iteritems(): dest = f(i) parent = os.path.dirname(dest) if parent and not os.path.exists(parent): os.makedirs(parent) with open(dest, 'w') as fd: + subprocess.call([ - sdbusplus( + 'sdbus++', '-r', os.path.join('example', 'interfaces'), 'interface', process, - i, + i], stdout=fd) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Remove sdbus++ template search workaround
## Code Before: import os import sys import yaml import subprocess class SDBUSPlus(object): def __init__(self, path): self.path = path def __call__(self, *a, **kw): args = [ os.path.join(self.path, 'sdbus++'), '-t', os.path.join(self.path, 'templates') ] subprocess.call(args + list(a), **kw) if __name__ == '__main__': sdbusplus = None for p in os.environ.get('PATH', "").split(os.pathsep): if os.path.exists(os.path.join(p, 'sdbus++')): sdbusplus = SDBUSPlus(p) break if sdbusplus is None: sys.stderr.write('Cannot find sdbus++\n') sys.exit(1) genfiles = { 'server-cpp': lambda x: '%s.cpp' % x, 'server-header': lambda x: os.path.join( os.path.join(*x.split('.')), 'server.hpp') } with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd: interfaces = yaml.load(fd.read()) for i in interfaces: for process, f in genfiles.iteritems(): dest = f(i) parent = os.path.dirname(dest) if parent and not os.path.exists(parent): os.makedirs(parent) with open(dest, 'w') as fd: sdbusplus( '-r', os.path.join('example', 'interfaces'), 'interface', process, i, stdout=fd) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 ## Instruction: Remove sdbus++ template search workaround ## Code After: import os import sys import yaml import subprocess if __name__ == '__main__': genfiles = { 'server-cpp': lambda x: '%s.cpp' % x, 'server-header': lambda x: os.path.join( os.path.join(*x.split('.')), 'server.hpp') } with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd: interfaces = yaml.load(fd.read()) for i in interfaces: for process, f in genfiles.iteritems(): dest = f(i) parent = os.path.dirname(dest) if parent and not os.path.exists(parent): os.makedirs(parent) with open(dest, 'w') as fd: subprocess.call([ 'sdbus++', '-r', os.path.join('example', 'interfaces'), 'interface', process, i], stdout=fd) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
1e07e9424a1ac69e1e660e6a6f1e58bba15472c1
make_spectra.py
make_spectra.py
import halospectra as hs import randspectra as rs import sys snapnum=sys.argv[1] sim=sys.argv[2] #base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/" #savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0') base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256" savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0') #halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir) halo = rs.RandSpectra(snapnum, base,numlos=3000,savedir=savedir, savefile="rand_spectra_DLA.hdf5") halo.get_tau("Si",2,2) halo.get_tau("H",1,1) halo.get_col_density("Z",-1) halo.get_col_density("H",-1) halo.save_file()
import halospectra as hs import randspectra as rs import sys snapnum=sys.argv[1] sim=sys.argv[2] #base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/" #savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0') base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256" savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0') #halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir) halo = rs.RandSpectra(snapnum, base,numlos=10000,savedir=savedir, savefile="rand_spectra.hdf5") #halo.get_observer_tau("Si",2) halo.get_tau("H",1,1) #halo.get_col_density("Z",-1) #halo.get_col_density("H",-1) halo.save_file()
Implement saving and loading the observer tau
Implement saving and loading the observer tau
Python
mit
sbird/vw_spectra
import halospectra as hs import randspectra as rs import sys snapnum=sys.argv[1] sim=sys.argv[2] #base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/" #savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0') base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256" savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0') #halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir) - halo = rs.RandSpectra(snapnum, base,numlos=3000,savedir=savedir, savefile="rand_spectra_DLA.hdf5") + halo = rs.RandSpectra(snapnum, base,numlos=10000,savedir=savedir, savefile="rand_spectra.hdf5") - halo.get_tau("Si",2,2) + #halo.get_observer_tau("Si",2) halo.get_tau("H",1,1) - halo.get_col_density("Z",-1) + #halo.get_col_density("Z",-1) - halo.get_col_density("H",-1) + #halo.get_col_density("H",-1) halo.save_file()
Implement saving and loading the observer tau
## Code Before: import halospectra as hs import randspectra as rs import sys snapnum=sys.argv[1] sim=sys.argv[2] #base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/" #savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0') base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256" savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0') #halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir) halo = rs.RandSpectra(snapnum, base,numlos=3000,savedir=savedir, savefile="rand_spectra_DLA.hdf5") halo.get_tau("Si",2,2) halo.get_tau("H",1,1) halo.get_col_density("Z",-1) halo.get_col_density("H",-1) halo.save_file() ## Instruction: Implement saving and loading the observer tau ## Code After: import halospectra as hs import randspectra as rs import sys snapnum=sys.argv[1] sim=sys.argv[2] #base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/" #savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0') base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256" savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0') #halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir) halo = rs.RandSpectra(snapnum, base,numlos=10000,savedir=savedir, savefile="rand_spectra.hdf5") #halo.get_observer_tau("Si",2) halo.get_tau("H",1,1) #halo.get_col_density("Z",-1) #halo.get_col_density("H",-1) halo.save_file()
8316a60ba2887a511579e8cedb90b3a02fc1889a
dope/util.py
dope/util.py
from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID to_url = str
from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID def to_url(self, obj): return str(obj).replace('-', '')
Drop dashes from download urls.
Drop dashes from download urls.
Python
mit
mbr/dope,mbr/dope
from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID - to_url = str + def to_url(self, obj): + return str(obj).replace('-', '') +
Drop dashes from download urls.
## Code Before: from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID to_url = str ## Instruction: Drop dashes from download urls. ## Code After: from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID def to_url(self, obj): return str(obj).replace('-', '')
9d46df1680e3d799971e73ec73043c2a6c0590ce
scripts/build_tar.py
scripts/build_tar.py
import os import subprocess root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tarfile = os.path.join(root_dir, "src_pkg.tar") def _is_dir_newer(directory, filename): file_mtime = os.stat(filename).st_mtime for dirname, _, filenames in os.walk(directory): for filename in filenames: if filename.endswith(".pyc"): continue if _is_file_newer(os.path.join(dirname, filename), file_mtime): return True return False def _is_file_newer(filename, file_mtime): return os.stat(filename).st_mtime > file_mtime def _tar(): if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir): raise Exception("Tar failed") if __name__ == '__main__': if not os.path.exists(tarfile) or \ _is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \ _is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \ _is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime): _tar()
import os import subprocess root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tarfile = os.path.join(root_dir, "src_pkg.tar") def _is_dir_newer(directory, filename): file_mtime = os.stat(filename).st_mtime for dirname, _, filenames in os.walk(directory): if _is_file_newer(dirname, file_mtime): return True for filename in filenames: if filename.endswith(".pyc"): continue if _is_file_newer(os.path.join(dirname, filename), file_mtime): return True return False def _is_file_newer(filename, file_mtime): returned = os.stat(filename).st_mtime > file_mtime return returned def _tar(): if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir): raise Exception("Tar failed") if __name__ == '__main__': if not os.path.exists(tarfile) or \ _is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \ _is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \ _is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime): _tar()
Fix building tar in deployment
Fix building tar in deployment
Python
bsd-3-clause
vmalloc/mailboxer,Infinidat/lanister,vmalloc/mailboxer,Infinidat/lanister,getslash/mailboxer,vmalloc/mailboxer,getslash/mailboxer,getslash/mailboxer
import os import subprocess root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tarfile = os.path.join(root_dir, "src_pkg.tar") def _is_dir_newer(directory, filename): file_mtime = os.stat(filename).st_mtime for dirname, _, filenames in os.walk(directory): + if _is_file_newer(dirname, file_mtime): + return True for filename in filenames: if filename.endswith(".pyc"): continue if _is_file_newer(os.path.join(dirname, filename), file_mtime): return True return False def _is_file_newer(filename, file_mtime): - return os.stat(filename).st_mtime > file_mtime + returned = os.stat(filename).st_mtime > file_mtime + return returned def _tar(): if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir): raise Exception("Tar failed") if __name__ == '__main__': if not os.path.exists(tarfile) or \ _is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \ _is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \ _is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime): _tar()
Fix building tar in deployment
## Code Before: import os import subprocess root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tarfile = os.path.join(root_dir, "src_pkg.tar") def _is_dir_newer(directory, filename): file_mtime = os.stat(filename).st_mtime for dirname, _, filenames in os.walk(directory): for filename in filenames: if filename.endswith(".pyc"): continue if _is_file_newer(os.path.join(dirname, filename), file_mtime): return True return False def _is_file_newer(filename, file_mtime): return os.stat(filename).st_mtime > file_mtime def _tar(): if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir): raise Exception("Tar failed") if __name__ == '__main__': if not os.path.exists(tarfile) or \ _is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \ _is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \ _is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime): _tar() ## Instruction: Fix building tar in deployment ## Code After: import os import subprocess root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tarfile = os.path.join(root_dir, "src_pkg.tar") def _is_dir_newer(directory, filename): file_mtime = os.stat(filename).st_mtime for dirname, _, filenames in os.walk(directory): if _is_file_newer(dirname, file_mtime): return True for filename in filenames: if filename.endswith(".pyc"): continue if _is_file_newer(os.path.join(dirname, filename), file_mtime): return True return False def _is_file_newer(filename, file_mtime): returned = os.stat(filename).st_mtime > file_mtime return returned def _tar(): if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir): raise Exception("Tar failed") if __name__ == '__main__': if not os.path.exists(tarfile) or \ _is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \ _is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \ _is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime): _tar()
166bff52496bfb47c5a3a03585bd10fb449b8d77
Lib/curses/__init__.py
Lib/curses/__init__.py
__revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper
__revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper # Some constants, most notably the ACS_* ones, are only added to the C # _curses module's dictionary after initscr() is called. (Some # versions of SGI's curses don't define values for those constants # until initscr() has been called.) This wrapper function calls the # underlying C initscr(), and then copies the constants from the # _curses module to the curses package's dictionary. Don't do 'from # curses import *' if you'll be needing the ACS_* constants. def initscr(): import _curses, curses stdscr = _curses.initscr() for key, value in _curses.__dict__.items(): if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'): setattr(curses, key, value) return stdscr
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
__revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper + # Some constants, most notably the ACS_* ones, are only added to the C + # _curses module's dictionary after initscr() is called. (Some + # versions of SGI's curses don't define values for those constants + # until initscr() has been called.) This wrapper function calls the + # underlying C initscr(), and then copies the constants from the + # _curses module to the curses package's dictionary. Don't do 'from + # curses import *' if you'll be needing the ACS_* constants. + + def initscr(): + import _curses, curses + stdscr = _curses.initscr() + for key, value in _curses.__dict__.items(): + if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'): + setattr(curses, key, value) + + return stdscr
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
## Code Before: __revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper ## Instruction: Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings ## Code After: __revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper # Some constants, most notably the ACS_* ones, are only added to the C # _curses module's dictionary after initscr() is called. (Some # versions of SGI's curses don't define values for those constants # until initscr() has been called.) This wrapper function calls the # underlying C initscr(), and then copies the constants from the # _curses module to the curses package's dictionary. Don't do 'from # curses import *' if you'll be needing the ACS_* constants. def initscr(): import _curses, curses stdscr = _curses.initscr() for key, value in _curses.__dict__.items(): if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'): setattr(curses, key, value) return stdscr
17faea99343e37036b7ee35e5d3273f98a52dba9
Python/tomviz/utils.py
Python/tomviz/utils.py
import numpy as np import vtk.numpy_interface.dataset_adapter as dsa def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name)
import numpy as np import vtk.numpy_interface.dataset_adapter as dsa import vtk.util.numpy_support as np_s def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars # handle the case if the newscalars array has a type that # cannot be passed on to VTK. In which case, we convert to # convert to float64 vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype) if vtk_typecode is None: newscalars = newscalars.astype(np.float64) do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name)
Fix numpy related errors on Mavericks.
Fix numpy related errors on Mavericks. The problem was due to the fact that operations (like sqrt) can return a float16 arrays which cannot be passed back to VTK directly. Added a temporary conversion to float64. We should potentially handle this in VTK.
Python
bsd-3-clause
cryos/tomviz,thewtex/tomviz,cjh1/tomviz,cryos/tomviz,cryos/tomviz,Hovden/tomviz,Hovden/tomviz,yijiang1/tomviz,cjh1/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,yijiang1/tomviz,cjh1/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,OpenChemistry/tomviz
import numpy as np import vtk.numpy_interface.dataset_adapter as dsa + import vtk.util.numpy_support as np_s def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars + + # handle the case if the newscalars array has a type that + # cannot be passed on to VTK. In which case, we convert to + # convert to float64 + vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype) + if vtk_typecode is None: + newscalars = newscalars.astype(np.float64) do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name)
Fix numpy related errors on Mavericks.
## Code Before: import numpy as np import vtk.numpy_interface.dataset_adapter as dsa def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name) ## Instruction: Fix numpy related errors on Mavericks. ## Code After: import numpy as np import vtk.numpy_interface.dataset_adapter as dsa import vtk.util.numpy_support as np_s def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars # handle the case if the newscalars array has a type that # cannot be passed on to VTK. In which case, we convert to # convert to float64 vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype) if vtk_typecode is None: newscalars = newscalars.astype(np.float64) do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name)
e753038de039fd23f0d59bb0094f59fc73efe22b
flask_apscheduler/json.py
flask_apscheduler/json.py
import flask import json from datetime import datetime from apscheduler.job import Job from .utils import job_to_dict class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() if isinstance(obj, Job): return job_to_dict(obj) return super(JSONEncoder, self).default(obj) def dumps(obj, indent=None): return json.dumps(obj, indent=indent, cls=JSONEncoder) def jsonify(data, status=None): indent = None if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr: indent = 2 return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
import datetime import flask import json from apscheduler.job import Job from .utils import job_to_dict loads = json.loads def dumps(obj, indent=None): return json.dumps(obj, indent=indent, cls=JSONEncoder) def jsonify(data, status=None): indent = None if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr: indent = 2 return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json') class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat() if isinstance(obj, datetime.date): return obj.isoformat() if isinstance(obj, Job): return job_to_dict(obj) return super(JSONEncoder, self).default(obj)
Set a custom JSON Encoder to serialize date class.
Set a custom JSON Encoder to serialize date class.
Python
apache-2.0
viniciuschiele/flask-apscheduler
+ import datetime import flask import json - from datetime import datetime from apscheduler.job import Job from .utils import job_to_dict + loads = json.loads - - class JSONEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, datetime): - return obj.isoformat() - - if isinstance(obj, Job): - return job_to_dict(obj) - - return super(JSONEncoder, self).default(obj) def dumps(obj, indent=None): return json.dumps(obj, indent=indent, cls=JSONEncoder) def jsonify(data, status=None): indent = None if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr: indent = 2 return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json') + + class JSONEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, datetime.datetime): + return obj.isoformat() + + if isinstance(obj, datetime.date): + return obj.isoformat() + + if isinstance(obj, Job): + return job_to_dict(obj) + + return super(JSONEncoder, self).default(obj) +
Set a custom JSON Encoder to serialize date class.
## Code Before: import flask import json from datetime import datetime from apscheduler.job import Job from .utils import job_to_dict class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() if isinstance(obj, Job): return job_to_dict(obj) return super(JSONEncoder, self).default(obj) def dumps(obj, indent=None): return json.dumps(obj, indent=indent, cls=JSONEncoder) def jsonify(data, status=None): indent = None if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr: indent = 2 return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json') ## Instruction: Set a custom JSON Encoder to serialize date class. ## Code After: import datetime import flask import json from apscheduler.job import Job from .utils import job_to_dict loads = json.loads def dumps(obj, indent=None): return json.dumps(obj, indent=indent, cls=JSONEncoder) def jsonify(data, status=None): indent = None if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr: indent = 2 return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json') class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat() if isinstance(obj, datetime.date): return obj.isoformat() if isinstance(obj, Job): return job_to_dict(obj) return super(JSONEncoder, self).default(obj)
edcfe2b156af23943478bc86592b4c8d5dc07e10
flask_mongoengine/json.py
flask_mongoengine/json.py
from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument from mongoengine import QuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and querysets. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, QuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument try: from mongoengine.base import BaseQuerySet except ImportError as ie: # support mongoengine < 0.7 from mongoengine.queryset import QuerySet as BaseQuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and queryset objects. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, BaseQuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
Support older versions of MongoEngine
Support older versions of MongoEngine
Python
bsd-3-clause
gerasim13/flask-mongoengine-1,rochacbruno/flask-mongoengine,quokkaproject/flask-mongoengine,quokkaproject/flask-mongoengine,gerasim13/flask-mongoengine-1,losintikfos/flask-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine
from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument + try: - from mongoengine import QuerySet + from mongoengine.base import BaseQuerySet - + except ImportError as ie: # support mongoengine < 0.7 + from mongoengine.queryset import QuerySet as BaseQuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine - documents and querysets. + documents and queryset objects. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) - elif isinstance(obj, QuerySet): + elif isinstance(obj, BaseQuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
Support older versions of MongoEngine
## Code Before: from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument from mongoengine import QuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and querysets. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, QuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder) ## Instruction: Support older versions of MongoEngine ## Code After: from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument try: from mongoengine.base import BaseQuerySet except ImportError as ie: # support mongoengine < 0.7 from mongoengine.queryset import QuerySet as BaseQuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and queryset objects. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, BaseQuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
3d7b5d61b7e985d409cd50c98d4bcbdc8ab9c723
mailer.py
mailer.py
from marrow.mailer import Mailer as MarrowMailer from message import Message import sys class Mailer: MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail'))) @staticmethod def send(message): Mailer.MAILER.send(message) @staticmethod def start(): Mailer.MAILER.start() @staticmethod def stop(): Mailer.MAILER.stop() @staticmethod def send_transactions(transactions, to_addr): Mailer.start() message = Message( to=to_addr, subject='New transactions', plain=repr(transactions) ) Mailer.send(message) Mailer.stop() @staticmethod def get_cli_email_addr(): try: return sys.argv[1] except IndexError: return None
from marrow.mailer import Mailer as MarrowMailer from message import Message import sys import os import pwd import socket class Mailer: MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail'))) DEFAULT_AUTHOR = pwd.getpwuid(os.getuid()).pw_name + '@' + socket.getfqdn() @staticmethod def send(message): Mailer.MAILER.send(message) @staticmethod def start(): Mailer.MAILER.start() @staticmethod def stop(): Mailer.MAILER.stop() @staticmethod def send_transactions(transactions, to_addr): Mailer.start() message = Message( author=Mailer.DEFAULT_AUTHOR, to=to_addr, subject='New transactions', plain=repr(transactions) ) Mailer.send(message) Mailer.stop() @staticmethod def get_cli_email_addr(): try: return sys.argv[1] except IndexError: return None
Use current user as email author
Use current user as email author
Python
isc
2mv/raapija
from marrow.mailer import Mailer as MarrowMailer from message import Message import sys + import os + import pwd + import socket class Mailer: MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail'))) + DEFAULT_AUTHOR = pwd.getpwuid(os.getuid()).pw_name + '@' + socket.getfqdn() @staticmethod def send(message): Mailer.MAILER.send(message) @staticmethod def start(): Mailer.MAILER.start() @staticmethod def stop(): Mailer.MAILER.stop() @staticmethod def send_transactions(transactions, to_addr): Mailer.start() message = Message( + author=Mailer.DEFAULT_AUTHOR, to=to_addr, subject='New transactions', plain=repr(transactions) ) Mailer.send(message) Mailer.stop() @staticmethod def get_cli_email_addr(): try: return sys.argv[1] except IndexError: return None
Use current user as email author
## Code Before: from marrow.mailer import Mailer as MarrowMailer from message import Message import sys class Mailer: MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail'))) @staticmethod def send(message): Mailer.MAILER.send(message) @staticmethod def start(): Mailer.MAILER.start() @staticmethod def stop(): Mailer.MAILER.stop() @staticmethod def send_transactions(transactions, to_addr): Mailer.start() message = Message( to=to_addr, subject='New transactions', plain=repr(transactions) ) Mailer.send(message) Mailer.stop() @staticmethod def get_cli_email_addr(): try: return sys.argv[1] except IndexError: return None ## Instruction: Use current user as email author ## Code After: from marrow.mailer import Mailer as MarrowMailer from message import Message import sys import os import pwd import socket class Mailer: MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail'))) DEFAULT_AUTHOR = pwd.getpwuid(os.getuid()).pw_name + '@' + socket.getfqdn() @staticmethod def send(message): Mailer.MAILER.send(message) @staticmethod def start(): Mailer.MAILER.start() @staticmethod def stop(): Mailer.MAILER.stop() @staticmethod def send_transactions(transactions, to_addr): Mailer.start() message = Message( author=Mailer.DEFAULT_AUTHOR, to=to_addr, subject='New transactions', plain=repr(transactions) ) Mailer.send(message) Mailer.stop() @staticmethod def get_cli_email_addr(): try: return sys.argv[1] except IndexError: return None
445f244ddac6001b65f03d058a14178a19919eed
diamondash/config.py
diamondash/config.py
import yaml from diamondash import utils class ConfigError(Exception): """Raised when there is an error parsing a configuration""" class ConfigMetaClass(type): def __new__(mcs, name, bases, dict): cls = type.__new__(mcs, name, bases, dict) defaults = {} for base in bases: if hasattr(base, 'DEFAULTS'): defaults.update(base.DEFAULTS) defaults.update(cls.DEFAULTS) cls.DEFAULTS = defaults return cls class Config(dict): __metaclass__ = ConfigMetaClass DEFAULTS = {} def __init__(self, items): super(Config, self).__init__(self._parse(items)) @classmethod def parse(cls, items): return items @classmethod def _parse(cls, items): items = utils.add_dicts(cls.DEFAULTS, items) return cls.parse(items) @classmethod def from_file(cls, filename, **defaults): items = utils.add_dicts(defaults, yaml.safe_load(open(filename))) return cls(items) @classmethod def for_type(cls, type_name): type_cls = utils.load_class_by_string(type_name) return type_cls.CONFIG_CLS
import yaml from diamondash import utils class ConfigError(Exception): """Raised when there is an error parsing a configuration""" class ConfigMetaClass(type): def __new__(mcs, name, bases, dict): cls = type.__new__(mcs, name, bases, dict) defaults = {} for base in bases: if hasattr(base, 'DEFAULTS'): defaults.update(base.DEFAULTS) defaults.update(cls.DEFAULTS) cls.DEFAULTS = defaults return cls class Config(dict): __metaclass__ = ConfigMetaClass DEFAULTS = {} def __init__(self, items=None): super(Config, self).__init__(self._parse(items or {})) @classmethod def parse(cls, items): return items @classmethod def _parse(cls, items): items = utils.add_dicts(cls.DEFAULTS, items) return cls.parse(items) @classmethod def from_file(cls, filename, **defaults): items = utils.add_dicts(defaults, yaml.safe_load(open(filename))) return cls(items) @classmethod def for_type(cls, type_name): type_cls = utils.load_class_by_string(type_name) return type_cls.CONFIG_CLS
Allow Config to be initialised without any args
Allow Config to be initialised without any args
Python
bsd-3-clause
praekelt/diamondash,praekelt/diamondash,praekelt/diamondash
import yaml from diamondash import utils class ConfigError(Exception): """Raised when there is an error parsing a configuration""" class ConfigMetaClass(type): def __new__(mcs, name, bases, dict): cls = type.__new__(mcs, name, bases, dict) defaults = {} for base in bases: if hasattr(base, 'DEFAULTS'): defaults.update(base.DEFAULTS) defaults.update(cls.DEFAULTS) cls.DEFAULTS = defaults return cls class Config(dict): __metaclass__ = ConfigMetaClass DEFAULTS = {} - def __init__(self, items): + def __init__(self, items=None): - super(Config, self).__init__(self._parse(items)) + super(Config, self).__init__(self._parse(items or {})) @classmethod def parse(cls, items): return items @classmethod def _parse(cls, items): items = utils.add_dicts(cls.DEFAULTS, items) return cls.parse(items) @classmethod def from_file(cls, filename, **defaults): items = utils.add_dicts(defaults, yaml.safe_load(open(filename))) return cls(items) @classmethod def for_type(cls, type_name): type_cls = utils.load_class_by_string(type_name) return type_cls.CONFIG_CLS
Allow Config to be initialised without any args
## Code Before: import yaml from diamondash import utils class ConfigError(Exception): """Raised when there is an error parsing a configuration""" class ConfigMetaClass(type): def __new__(mcs, name, bases, dict): cls = type.__new__(mcs, name, bases, dict) defaults = {} for base in bases: if hasattr(base, 'DEFAULTS'): defaults.update(base.DEFAULTS) defaults.update(cls.DEFAULTS) cls.DEFAULTS = defaults return cls class Config(dict): __metaclass__ = ConfigMetaClass DEFAULTS = {} def __init__(self, items): super(Config, self).__init__(self._parse(items)) @classmethod def parse(cls, items): return items @classmethod def _parse(cls, items): items = utils.add_dicts(cls.DEFAULTS, items) return cls.parse(items) @classmethod def from_file(cls, filename, **defaults): items = utils.add_dicts(defaults, yaml.safe_load(open(filename))) return cls(items) @classmethod def for_type(cls, type_name): type_cls = utils.load_class_by_string(type_name) return type_cls.CONFIG_CLS ## Instruction: Allow Config to be initialised without any args ## Code After: import yaml from diamondash import utils class ConfigError(Exception): """Raised when there is an error parsing a configuration""" class ConfigMetaClass(type): def __new__(mcs, name, bases, dict): cls = type.__new__(mcs, name, bases, dict) defaults = {} for base in bases: if hasattr(base, 'DEFAULTS'): defaults.update(base.DEFAULTS) defaults.update(cls.DEFAULTS) cls.DEFAULTS = defaults return cls class Config(dict): __metaclass__ = ConfigMetaClass DEFAULTS = {} def __init__(self, items=None): super(Config, self).__init__(self._parse(items or {})) @classmethod def parse(cls, items): return items @classmethod def _parse(cls, items): items = utils.add_dicts(cls.DEFAULTS, items) return cls.parse(items) @classmethod def from_file(cls, filename, **defaults): items = utils.add_dicts(defaults, yaml.safe_load(open(filename))) return cls(items) @classmethod def for_type(cls, type_name): type_cls = utils.load_class_by_string(type_name) return type_cls.CONFIG_CLS
bfcec696308ee8bfd226a54c17a7e15d49e2aed7
var/spack/repos/builtin/packages/nextflow/package.py
var/spack/repos/builtin/packages/nextflow/package.py
from spack import * from glob import glob import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): chmod = which('chmod') mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) chmod('+x', join_path(prefix.bin, "nextflow"))
from spack import * class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) set_executable( join_path(prefix.bin, "nextflow"))
Add standard header, use spack helpers
Add standard header, use spack helpers Added the standard header (stolen from R). Touched up the install to use set_executable rather than doing it myself.
Python
lgpl-2.1
matthiasdiener/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,lgarren/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,krafczyk/spack
from spack import * + - from glob import glob - import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): - chmod = which('chmod') - mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) - chmod('+x', join_path(prefix.bin, "nextflow")) + set_executable( join_path(prefix.bin, "nextflow"))
Add standard header, use spack helpers
## Code Before: from spack import * from glob import glob import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): chmod = which('chmod') mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) chmod('+x', join_path(prefix.bin, "nextflow")) ## Instruction: Add standard header, use spack helpers ## Code After: from spack import * class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) set_executable( join_path(prefix.bin, "nextflow"))
0324d220872ef063cb39ce62264bd4835f260920
test_project/urls.py
test_project/urls.py
from django.conf.urls import include, url from django.contrib import admin from django.views.generic import RedirectView from test_app.models import DummyModel, MushroomSpot from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint from mapentity.registry import registry handler403 = 'mapentity.views.handler403' admin.autodiscover() models_urls = registry.register(DummyModel) + registry.register(MushroomSpot) urlpatterns = [ url(r'', include(models_urls, namespace='test_app')), url(r'', include('mapentity.urls', namespace='mapentity', app_name='mapentity')), url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',), url(r'^paperclip/', include('paperclip.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"), url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"), ]
from django.conf.urls import include, url from django.contrib import admin from django.views.generic import RedirectView from test_app.models import DummyModel, MushroomSpot from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint from mapentity.registry import registry from django.contrib.auth import views as auth_views handler403 = 'mapentity.views.handler403' admin.autodiscover() models_urls = registry.register(DummyModel) + registry.register(MushroomSpot) urlpatterns = [ url(r'', include(models_urls, namespace='test_app')), url(r'', include('mapentity.urls', namespace='mapentity', app_name='mapentity')), url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'), url(r'^login/$', auth_views.login, name='login'), url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',), url(r'^paperclip/', include('paperclip.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"), url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"), ]
Replace str into call in url
Replace str into call in url
Python
bsd-3-clause
makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity
from django.conf.urls import include, url from django.contrib import admin from django.views.generic import RedirectView from test_app.models import DummyModel, MushroomSpot from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint from mapentity.registry import registry + from django.contrib.auth import views as auth_views handler403 = 'mapentity.views.handler403' admin.autodiscover() models_urls = registry.register(DummyModel) + registry.register(MushroomSpot) urlpatterns = [ url(r'', include(models_urls, namespace='test_app')), url(r'', include('mapentity.urls', namespace='mapentity', app_name='mapentity')), url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'), - url(r'^login/$', 'django.contrib.auth.views.login', name='login'), + url(r'^login/$', auth_views.login, name='login'), - url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',), + url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',), url(r'^paperclip/', include('paperclip.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"), url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"), ]
Replace str into call in url
## Code Before: from django.conf.urls import include, url from django.contrib import admin from django.views.generic import RedirectView from test_app.models import DummyModel, MushroomSpot from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint from mapentity.registry import registry handler403 = 'mapentity.views.handler403' admin.autodiscover() models_urls = registry.register(DummyModel) + registry.register(MushroomSpot) urlpatterns = [ url(r'', include(models_urls, namespace='test_app')), url(r'', include('mapentity.urls', namespace='mapentity', app_name='mapentity')), url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',), url(r'^paperclip/', include('paperclip.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"), url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"), ] ## Instruction: Replace str into call in url ## Code After: from django.conf.urls import include, url from django.contrib import admin from django.views.generic import RedirectView from test_app.models import DummyModel, MushroomSpot from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint from mapentity.registry import registry from django.contrib.auth import views as auth_views handler403 = 'mapentity.views.handler403' admin.autodiscover() models_urls = registry.register(DummyModel) + registry.register(MushroomSpot) urlpatterns = [ url(r'', include(models_urls, namespace='test_app')), url(r'', include('mapentity.urls', namespace='mapentity', app_name='mapentity')), url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'), url(r'^login/$', auth_views.login, name='login'), url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',), url(r'^paperclip/', include('paperclip.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"), url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"), ]
d1ea64d6645f60df38221cbd194c26dff9686dcd
scripts/utils.py
scripts/utils.py
import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): message = e(message) if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): message = e(message) if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
Handle logging unicode messages in python2.
Handle logging unicode messages in python2. Former-commit-id: 257d94eb71d5597ff52a18ec1530d73496901ef4
Python
mit
guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt
import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): + message = e(message) if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): + message = e(message) if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
Handle logging unicode messages in python2.
## Code Before: import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO' ## Instruction: Handle logging unicode messages in python2. ## Code After: import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): message = e(message) if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): message = e(message) if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
9a698d1428fbe0744c9dba3532b778569dbe1dd4
server.py
server.py
import socket import sys class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ self.sock.connect((host, port))
import socket import sys __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "facundovt@gmail.com" class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ server_address = (host, port) self.sock.connect(server_address) print('starting up on %s port %s' % server_address)
Add docstrings and author reference
Add docstrings and author reference
Python
mit
facundovictor/non-blocking-socket-samples
+ import socket import sys + + + __author__ = "Facundo Victor" + __license__ = "MIT" + __email__ = "facundovt@gmail.com" class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ - self.sock.connect((host, port)) + server_address = (host, port) + self.sock.connect(server_address) + print('starting up on %s port %s' % server_address)
Add docstrings and author reference
## Code Before: import socket import sys class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ self.sock.connect((host, port)) ## Instruction: Add docstrings and author reference ## Code After: import socket import sys __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "facundovt@gmail.com" class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ server_address = (host, port) self.sock.connect(server_address) print('starting up on %s port %s' % server_address)
5f501af61b416dae0e46236a8e1f9684dcc66e21
python/decoder_test.py
python/decoder_test.py
import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = (scanner_frame - video_frame).sum() if frame_diff != 0: print('Frame {} does not match!'.format(frame_num)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = np.abs(scanner_frame - video_frame) if frame_diff.sum() != 0: print('Frame {} does not match!'.format(frame_num)) cv2.imwrite('decode_frames_' + str(frame_num) + '.jpg', np.concatenate( (scanner_frame, video_frame, frame_diff), 1)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
Write out concatenated frame on decode test failure
Write out concatenated frame on decode test failure
Python
apache-2.0
scanner-research/scanner,scanner-research/scanner,scanner-research/scanner,scanner-research/scanner
import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) - frame_diff = (scanner_frame - video_frame).sum() + frame_diff = np.abs(scanner_frame - video_frame) - if frame_diff != 0: + if frame_diff.sum() != 0: print('Frame {} does not match!'.format(frame_num)) - + cv2.imwrite('decode_frames_' + str(frame_num) + '.jpg', + np.concatenate( + (scanner_frame, video_frame, frame_diff), 1)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
Write out concatenated frame on decode test failure
## Code Before: import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = (scanner_frame - video_frame).sum() if frame_diff != 0: print('Frame {} does not match!'.format(frame_num)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__) ## Instruction: Write out concatenated frame on decode test failure ## Code After: import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = np.abs(scanner_frame - video_frame) if frame_diff.sum() != 0: print('Frame {} does not match!'.format(frame_num)) cv2.imwrite('decode_frames_' + str(frame_num) + '.jpg', np.concatenate( (scanner_frame, video_frame, frame_diff), 1)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
e2cba02550dfbe8628daf024a2a35c0dffb234e9
python/cli/request.py
python/cli/request.py
import requests import os aport = os.environ.get('MYAPORT') if aport is None: aport = "80" aport = "23456" url1 = 'http://localhost:' + aport + '/' url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest' url3 = 'http://localhost:' + aport + '/action/autosimulateinvest' url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest' #headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} #headers={'Content-type':'application/json', 'Accept':'application/json'} headers={'Content-Type' : 'application/json;charset=utf-8'} def request1(param, webpath): return requests.post(url1 + webpath, json=param, headers=headers) def request2(market, data): return requests.post(url2 + '/market/' + str(market), json=data, headers=headers) def request3(market, data): return requests.post(url3 + '/market/' + str(market), json=data, headers=headers) def request4(market, data): return requests.post(url4 + '/market/' + str(market), json=data, headers=headers) def request0(data): return requests.post(url, data='', headers=headers) #return requests.post(url, data=json.dumps(data), headers=headers)
import requests import os aport = os.environ.get('MYAPORT') if aport is None: aport = "80" aport = "23456" ahost = os.environ.get('MYAHOST') if ahost is None: ahost = "localhost" url1 = 'http://' + ahost + ':' + aport + '/' #headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} #headers={'Content-type':'application/json', 'Accept':'application/json'} headers={'Content-Type' : 'application/json;charset=utf-8'} def request1(param, webpath): return requests.post(url1 + webpath, json=param, headers=headers) def request0(data): return requests.post(url, data='', headers=headers) #return requests.post(url, data=json.dumps(data), headers=headers)
Handle different environments, for automation (I4).
Handle different environments, for automation (I4).
Python
agpl-3.0
rroart/aether,rroart/aether,rroart/aether,rroart/aether,rroart/aether
import requests import os aport = os.environ.get('MYAPORT') if aport is None: aport = "80" + aport = "23456" - aport = "23456" - + ahost = os.environ.get('MYAHOST') + if ahost is None: + ahost = "localhost" + - url1 = 'http://localhost:' + aport + '/' + url1 = 'http://' + ahost + ':' + aport + '/' - url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest' - url3 = 'http://localhost:' + aport + '/action/autosimulateinvest' - url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest' #headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} #headers={'Content-type':'application/json', 'Accept':'application/json'} headers={'Content-Type' : 'application/json;charset=utf-8'} def request1(param, webpath): return requests.post(url1 + webpath, json=param, headers=headers) - def request2(market, data): - return requests.post(url2 + '/market/' + str(market), json=data, headers=headers) - - def request3(market, data): - return requests.post(url3 + '/market/' + str(market), json=data, headers=headers) - - def request4(market, data): - return requests.post(url4 + '/market/' + str(market), json=data, headers=headers) - def request0(data): return requests.post(url, data='', headers=headers) #return requests.post(url, data=json.dumps(data), headers=headers)
Handle different environments, for automation (I4).
## Code Before: import requests import os aport = os.environ.get('MYAPORT') if aport is None: aport = "80" aport = "23456" url1 = 'http://localhost:' + aport + '/' url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest' url3 = 'http://localhost:' + aport + '/action/autosimulateinvest' url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest' #headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} #headers={'Content-type':'application/json', 'Accept':'application/json'} headers={'Content-Type' : 'application/json;charset=utf-8'} def request1(param, webpath): return requests.post(url1 + webpath, json=param, headers=headers) def request2(market, data): return requests.post(url2 + '/market/' + str(market), json=data, headers=headers) def request3(market, data): return requests.post(url3 + '/market/' + str(market), json=data, headers=headers) def request4(market, data): return requests.post(url4 + '/market/' + str(market), json=data, headers=headers) def request0(data): return requests.post(url, data='', headers=headers) #return requests.post(url, data=json.dumps(data), headers=headers) ## Instruction: Handle different environments, for automation (I4). ## Code After: import requests import os aport = os.environ.get('MYAPORT') if aport is None: aport = "80" aport = "23456" ahost = os.environ.get('MYAHOST') if ahost is None: ahost = "localhost" url1 = 'http://' + ahost + ':' + aport + '/' #headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} #headers={'Content-type':'application/json', 'Accept':'application/json'} headers={'Content-Type' : 'application/json;charset=utf-8'} def request1(param, webpath): return requests.post(url1 + webpath, json=param, headers=headers) def request0(data): return requests.post(url, data='', headers=headers) #return requests.post(url, data=json.dumps(data), headers=headers)
0adadcb3f04e2ecb98b5ca5de1afba2ba7208d23
spacy/tests/parser/test_beam_parse.py
spacy/tests/parser/test_beam_parse.py
import spacy import pytest @pytest.mark.models def test_beam_parse(): nlp = spacy.load('en_core_web_sm') doc = nlp(u'Australia is a country', disable=['ner']) ents = nlp.entity(doc, beam_width=2) print(ents)
from __future__ import unicode_literals import pytest @pytest.mark.models('en') def test_beam_parse(EN): doc = EN(u'Australia is a country', disable=['ner']) ents = EN.entity(doc, beam_width=2) print(ents)
Fix beam parse model test
Fix beam parse model test
Python
mit
aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy
- import spacy + from __future__ import unicode_literals + import pytest + - @pytest.mark.models + @pytest.mark.models('en') - def test_beam_parse(): + def test_beam_parse(EN): - nlp = spacy.load('en_core_web_sm') - doc = nlp(u'Australia is a country', disable=['ner']) + doc = EN(u'Australia is a country', disable=['ner']) - ents = nlp.entity(doc, beam_width=2) + ents = EN.entity(doc, beam_width=2) print(ents) -
Fix beam parse model test
## Code Before: import spacy import pytest @pytest.mark.models def test_beam_parse(): nlp = spacy.load('en_core_web_sm') doc = nlp(u'Australia is a country', disable=['ner']) ents = nlp.entity(doc, beam_width=2) print(ents) ## Instruction: Fix beam parse model test ## Code After: from __future__ import unicode_literals import pytest @pytest.mark.models('en') def test_beam_parse(EN): doc = EN(u'Australia is a country', disable=['ner']) ents = EN.entity(doc, beam_width=2) print(ents)
8d3931fd5effabf9c5d56cb03ae15630ae984963
postalcodes_mexico/cli.py
postalcodes_mexico/cli.py
"""Console script for postalcodes_mexico.""" import sys import click @click.command() def main(args=None): """Console script for postalcodes_mexico.""" click.echo("Replace this message by putting your code into " "postalcodes_mexico.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
"""Console script for postalcodes_mexico.""" import sys import click from postalcodes_mexico import postalcodes_mexico @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
Create simple CLI for the `places` function
Create simple CLI for the `places` function
Python
mit
FlowFX/postalcodes_mexico
"""Console script for postalcodes_mexico.""" import sys import click + from postalcodes_mexico import postalcodes_mexico + @click.command() - def main(args=None): + @click.argument('postalcode', type=str) + def main(postalcode): """Console script for postalcodes_mexico.""" + places = postalcodes_mexico.places(postalcode) + click.echo(places) - click.echo("Replace this message by putting your code into " - "postalcodes_mexico.cli.main") - click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
Create simple CLI for the `places` function
## Code Before: """Console script for postalcodes_mexico.""" import sys import click @click.command() def main(args=None): """Console script for postalcodes_mexico.""" click.echo("Replace this message by putting your code into " "postalcodes_mexico.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover ## Instruction: Create simple CLI for the `places` function ## Code After: """Console script for postalcodes_mexico.""" import sys import click from postalcodes_mexico import postalcodes_mexico @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
8be6b576007f89fad50ea1dfacad46614c0a97c5
apps/domain/src/main/core/exceptions.py
apps/domain/src/main/core/exceptions.py
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class EnvironmentNotFoundError(PyGridError): def __init__(self): message = "Environment not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
ADD new exception -> EnvironmentNotFound!
ADD new exception -> EnvironmentNotFound!
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) + class EnvironmentNotFoundError(PyGridError): + def __init__(self): + message = "Environment not found!" + super().__init__(message) + + class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
ADD new exception -> EnvironmentNotFound!
## Code Before: """Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message) ## Instruction: ADD new exception -> EnvironmentNotFound! ## Code After: """Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class EnvironmentNotFoundError(PyGridError): def __init__(self): message = "Environment not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
e0b82cf9ed24870cb313328e5539acc5fe7f6508
stock_awesome/levels/chock_a_block.py
stock_awesome/levels/chock_a_block.py
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') #collection of orders placed orders = {} filled = 0 upper_limit = 3300 #try to buy 100000 to_send = 1000 while to_send > 0: quote = m.quote() ask = quote.get('ask') if ask and ask < upper_limit: r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') to_send -= 1 orders[r['id']] = r orders = update_orders(m, orders) filled += update_filled(orders) else: time.sleep(1) def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 upper_limit = 2450 #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: time.sleep(1) print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
Add some (inefective) score maximizing attempts
Add some (inefective) score maximizing attempts
Python
mit
ForeverWintr/stock_awesome
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ - m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') + m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 - upper_limit = 3300 + upper_limit = 2450 #try to buy 100000 - to_send = 1000 + to_buy = 100000 - while to_send > 0: + while to_buy > 0: quote = m.quote() - ask = quote.get('ask') + ask = quote.get('ask', 0) + bid = quote.get('bid') - if ask and ask < upper_limit: + if ask < upper_limit: - r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') + r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') - to_send -= 1 + to_buy -= r['totalFilled'] + print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) - orders[r['id']] = r - - orders = update_orders(m, orders) - filled += update_filled(orders) else: time.sleep(1) - + print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
Add some (inefective) score maximizing attempts
## Code Before: import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') #collection of orders placed orders = {} filled = 0 upper_limit = 3300 #try to buy 100000 to_send = 1000 while to_send > 0: quote = m.quote() ask = quote.get('ask') if ask and ask < upper_limit: r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') to_send -= 1 orders[r['id']] = r orders = update_orders(m, orders) filled += update_filled(orders) else: time.sleep(1) def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main() ## Instruction: Add some (inefective) score maximizing attempts ## Code After: import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 upper_limit = 2450 #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: time.sleep(1) print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
6bfb23294c2cc445479f4c8098b8e62647cf01bd
test/test_notification_integration.py
test/test_notification_integration.py
import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = watcher.read() client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() watcher.kill()
import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from groundstation.utils import path2id from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = path2id(watcher.read()) client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() peer.send() client.recv() data = client.packet_queue.pop() gizmo = self.stations[0].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" self.assertEqual(gizmo.verb, "FETCHOBJECT") self.assertEqual(gizmo.payload, obj_name) gizmo.process() watcher.kill()
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket + + from groundstation.utils import path2id from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) - obj_name = watcher.read() + obj_name = path2id(watcher.read()) client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() + peer.send() + + client.recv() + data = client.packet_queue.pop() + gizmo = self.stations[0].gizmo_factory.hydrate(data, peer) + assert gizmo is not None, "gizmo_factory returned None" + self.assertEqual(gizmo.verb, "FETCHOBJECT") + self.assertEqual(gizmo.payload, obj_name) + gizmo.process() watcher.kill()
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
## Code Before: import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = watcher.read() client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() watcher.kill() ## Instruction: Validate that we can translate a NEWOBJECT into a FETCHOBJECT ## Code After: import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from groundstation.utils import path2id from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = path2id(watcher.read()) client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() peer.send() client.recv() data = client.packet_queue.pop() gizmo = self.stations[0].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" self.assertEqual(gizmo.verb, "FETCHOBJECT") self.assertEqual(gizmo.payload, obj_name) gizmo.process() watcher.kill()
6fe5a416ed229e7ec8efab9d6b3dac43f16515b6
corehq/apps/domain/__init__.py
corehq/apps/domain/__init__.py
from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta'))
from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_DOMAINS_DB, settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta', ))
Add the new domains db
Add the new domains db
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( - settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta')) + settings.NEW_DOMAINS_DB, + settings.NEW_USERS_GROUPS_DB, + settings.NEW_FIXTURES_DB, + 'meta', + ))
Add the new domains db
## Code Before: from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta')) ## Instruction: Add the new domains db ## Code After: from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_DOMAINS_DB, settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta', ))