commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 51
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
ee49f4f592cf04199f9d82c2da2af9e34dd1d9d4 | avwx_api/views.py | avwx_api/views.py |
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
|
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
| Add error handling to station endpoint | Add error handling to station endpoint
| Python | mit | flyinactor91/AVWX-API,flyinactor91/AVWX-API,flyinactor91/AVWX-API |
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
- data = avwx.Station.from_icao(station)
- if data:
+ try:
+ return jsonify(asdict(avwx.Station.from_icao(station)))
+ except avwx.exceptions.BadStation:
- return jsonify(asdict(data))
+ return jsonify({
- return jsonify({'error': f'Station ident "{station}" not found'})
+ 'error': f'Station ident "{station}" not found. Email me if data is missing :)'
+ })
| Add error handling to station endpoint | ## Code Before:
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
## Instruction:
Add error handling to station endpoint
## Code After:
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
c5c12e1f5aaeb56921b69cbb64a7d6a1b7585936 | languages_plus/admin.py | languages_plus/admin.py | from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
| from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
| Define `search_fields` for Admin classes | Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361
| Python | mit | cordery/django-languages-plus | from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
+ search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
+ search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
| Define `search_fields` for Admin classes | ## Code Before:
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
## Instruction:
Define `search_fields` for Admin classes
## Code After:
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
2b08ce1d980ff01c2f0ac258aaba52f2ca758427 | beethoven/urls.py | beethoven/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
| Fix static file 404 error | Fix static file 404 error
| Python | mit | lockhawksp/beethoven,lockhawksp/beethoven | from django.conf.urls import patterns, include, url
from django.contrib import admin
+
+ from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
+ if not settings.PRODUCTION:
+ urlpatterns += patterns(
+ '',
+ (r'^static/(?P<path>.*)$',
+ 'django.views.static.serve',
+ {'document_root': settings.STATIC_ROOT})
+ )
+ | Fix static file 404 error | ## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
## Instruction:
Fix static file 404 error
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
6d0fa6dda7613e734ce958f88bc0eaf55cfddf3c | st2common/st2common/persistence/pack.py | st2common/st2common/persistence/pack.py |
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
| Add persistance class for ConfigSchema. | Add persistance class for ConfigSchema.
| Python | apache-2.0 | pixelrebel/st2,Plexxi/st2,emedvedev/st2,lakshmi-kannan/st2,StackStorm/st2,punalpatel/st2,Plexxi/st2,peak6/st2,StackStorm/st2,Plexxi/st2,pixelrebel/st2,StackStorm/st2,Plexxi/st2,punalpatel/st2,nzlosh/st2,emedvedev/st2,peak6/st2,emedvedev/st2,punalpatel/st2,peak6/st2,lakshmi-kannan/st2,tonybaloney/st2,nzlosh/st2,tonybaloney/st2,pixelrebel/st2,StackStorm/st2,tonybaloney/st2,lakshmi-kannan/st2,nzlosh/st2,nzlosh/st2 |
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
- 'Pack'
+ 'Pack',
+ 'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
+
+ class ConfigSchema(base.Access):
+ impl = pack_access
+
+ @classmethod
+ def _get_impl(cls):
+ return cls.impl
+ | Add persistance class for ConfigSchema. | ## Code Before:
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
## Instruction:
Add persistance class for ConfigSchema.
## Code After:
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
f7e218b72a09615259b4d77e9169f5237a4cae32 | mopidy/core/mixer.py | mopidy/core/mixer.py | from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
| from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
| Remove test-only code paths in MixerController | core: Remove test-only code paths in MixerController
| Python | apache-2.0 | jmarsik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,pacificIT/mopidy,vrs01/mopidy,diandiankan/mopidy,jcass77/mopidy,tkem/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,bencevans/mopidy,bencevans/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,swak/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,vrs01/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,ZenithDK/mopidy,rawdlite/mopidy,mopidy/mopidy,jmarsik/mopidy,mokieyue/mopidy,swak/mopidy,mopidy/mopidy,bencevans/mopidy,bacontext/mopidy,dbrgn/mopidy,hkariti/mopidy,bacontext/mopidy,quartz55/mopidy,dbrgn/mopidy,adamcik/mopidy,kingosticks/mopidy,hkariti/mopidy,jmarsik/mopidy,ali/mopidy,kingosticks/mopidy,jcass77/mopidy,mokieyue/mopidy,ali/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,rawdlite/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,bacontext/mopidy,ali/mopidy,ali/mopidy,adamcik/mopidy,dbrgn/mopidy,quartz55/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,bencevans/mopidy,ZenithDK/mopidy,vrs01/mopidy,jodal/mopidy,swak/mopidy,tkem/mopidy,glogiotatidis/mopidy,swak/mopidy,jodal/mopidy,ZenithDK/mopidy,tkem/mopidy,quartz55/mopidy,diandiankan/mopidy,hkariti/mopidy,jmarsik/mopidy,diandiankan/mopidy,pacificIT/mopidy,quartz55/mopidy | from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
return self._mixer.get_volume().get()
- else:
- # For testing
- return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
self._mixer.set_volume(volume)
- else:
- # For testing
- self._volume = volume
def get_mute(self):
"""Get mute state.
- :class:`True` if muted, :class:`False` otherwise.
+ :class:`True` if muted, :class:`False` unmuted, :class:`None` if
+ unknown.
"""
- if self._mixer:
+ if self._mixer is not None:
return self._mixer.get_mute().get()
- else:
- # For testing
- return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
- mute = bool(mute)
- if self._mixer:
+ if self._mixer is not None:
- self._mixer.set_mute(mute)
+ self._mixer.set_mute(bool(mute))
- else:
- # For testing
- self._mute = mute
| Remove test-only code paths in MixerController | ## Code Before:
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
## Instruction:
Remove test-only code paths in MixerController
## Code After:
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
47273357ac7bd646e8a9326c87688191eb8a1a89 | airesources/Python/MyBot.py | airesources/Python/MyBot.py | from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
| from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
| Revert python mybot to random bot | Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507 | Python | mit | yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II | from hlt import *
from networking import *
playerTag, gameMap = getInit()
- sendInit("BasicBot"+str(playerTag))
+ sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
- direction = random.randint(0, 5)
- if site.strength < 5*site.production:
- direction = STILL
- else:
- for d in CARDINALS:
- if gameMap.getSite(Location(x, y), d).owner != playerTag:
- direction = d
- break
- moves.append(Move(Location(x, y), direction))
+ moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
| Revert python mybot to random bot | ## Code Before:
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
## Instruction:
Revert python mybot to random bot
## Code After:
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
6a9524502ebf3c04dede24fb937baec5c48342ef | widgy/contrib/widgy_mezzanine/search_indexes.py | widgy/contrib/widgy_mezzanine/search_indexes.py | from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
| from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
| Use a more realistic context to render pages for search | Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
| Python | apache-2.0 | j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy | from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
+ context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
- html = render_root({}, obj, 'root_node')
+ html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
| Use a more realistic context to render pages for search | ## Code Before:
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
## Instruction:
Use a more realistic context to render pages for search
## Code After:
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
edf08b9928558688c2402d1c144f04777f4b4bc5 | gb/helpers.py | gb/helpers.py | """Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase | """Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
| Add caching feature to API lookup requests | Add caching feature to API lookup requests
| Python | mit | jaykwon/giantanswers | """Helpers to facilitate API interaction."""
+ from functools import wraps
+ from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
-
+
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
-
+
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
+
+
+ CACHE = {}
+ MAX_AGE = 60 * 60 * 24 # a day
+
+
+ def memoize_class(func):
+ """Decorator to assist with the memoization of class methods."""
+ @wraps(func)
+ def wrapper(*args):
+ expired = False
+ sig = (func, args)
+ cached, timestamp = CACHE.get(sig, (None, None,))
+ if timestamp:
+ age = datetime.utcnow() - timestamp
+ if age.total_seconds() > MAX_AGE:
+ expired = True
+ if cached and not expired:
+ return cached
+ value = func(*args)
+ CACHE[sig] = value, datetime.utcnow()
+ return value
+ return wrapper
+ | Add caching feature to API lookup requests | ## Code Before:
"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
## Instruction:
Add caching feature to API lookup requests
## Code After:
"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
750c7bef1483c914e195e26a179a3b362fa3f059 | pmg/admin/validators.py | pmg/admin/validators.py | from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
| from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
| Format event title error message titles in quotation marks | Format event title error message titles in quotation marks
| Python | apache-2.0 | Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2 | from wtforms.validators import AnyOf
+ from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
- any_of = AnyOf(self.ALLOWED_TITLES, message=message)
+ any_of = AnyOf(self.ALLOWED_TITLES, message=message,
+ values_formatter=self.values_formatter)
return any_of(form, field)
+ @classmethod
+ def values_formatter(cls, values):
+ return ', '.join(cls.quoted(text_type(x)) for x in values)
+
+ @classmethod
+ def quoted(cls, value):
+ return '"%s"' % value
+ | Format event title error message titles in quotation marks | ## Code Before:
from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
## Instruction:
Format event title error message titles in quotation marks
## Code After:
from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
fd176b8eae33cac5fa7b2ba4f7a7586d9e6ebf14 | mlat/connection.py | mlat/connection.py |
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
|
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
| Raise NotImplemented if methods aren't overridden | Raise NotImplemented if methods aren't overridden
| Python | agpl-3.0 | tmuic/mlat-server,mutability/mlat-server,mutability/mlat-server,tmuic/mlat-server |
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
- pass
+ raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
- pass
+ raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
- pass
+ raise NotImplementedError
| Raise NotImplemented if methods aren't overridden | ## Code Before:
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
## Instruction:
Raise NotImplemented if methods aren't overridden
## Code After:
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
8f2d6d2714aa1b60950a2fc355d39297b7f2cdfb | keras/activations.py | keras/activations.py | from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
| from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
| Add support for time-distributed softmax. | Add support for time-distributed softmax. | Python | mit | daviddiazvico/keras,DeepGnosis/keras,kemaswill/keras,keras-team/keras,relh/keras,keras-team/keras,dolaameng/keras,kuza55/keras,nebw/keras | from __future__ import absolute_import
from . import backend as K
def softmax(x):
+ ndim = K.ndim(x)
+ if ndim == 2:
- return K.softmax(x)
+ return K.softmax(x)
+ elif ndim == 3:
+ # apply softmax to each timestep
+ def step(x, states):
+ return K.softmax(x), []
+ last_output, outputs, states = K.rnn(step, x, [], masking=False)
+ return outputs
+ else:
+ raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
+ 'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
| Add support for time-distributed softmax. | ## Code Before:
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
## Instruction:
Add support for time-distributed softmax.
## Code After:
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
0c833808e9c761a98e11ffb4834b8344221db1d5 | matador/commands/deployment/deploy_sql_script.py | matador/commands/deployment/deploy_sql_script.py | import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
| import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
| Remove lines which deleted and checked out file for substitution | Remove lines which deleted and checked out file for substitution
| Python | mit | Empiria/matador | import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
- os.remove(scriptPath)
-
- subprocess.run(
- ['git', '-C', repo_folder, 'checkout', scriptPath],
- stderr=subprocess.STDOUT,
- stdout=open(os.devnull, 'w'),
- check=True)
-
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
| Remove lines which deleted and checked out file for substitution | ## Code Before:
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
## Instruction:
Remove lines which deleted and checked out file for substitution
## Code After:
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
242f27f943a107bf7dd2a472f08a71a8382f6467 | mopidy/__init__.py | mopidy/__init__.py | import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
| import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
| Use subprocess instead of os.popen | Use subprocess instead of os.popen
| Python | apache-2.0 | ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,kingosticks/mopidy,jcass77/mopidy,jmarsik/mopidy,bacontext/mopidy,mopidy/mopidy,hkariti/mopidy,bencevans/mopidy,ZenithDK/mopidy,dbrgn/mopidy,hkariti/mopidy,SuperStarPL/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jodal/mopidy,vrs01/mopidy,ali/mopidy,diandiankan/mopidy,swak/mopidy,rawdlite/mopidy,hkariti/mopidy,ali/mopidy,bencevans/mopidy,diandiankan/mopidy,liamw9534/mopidy,rawdlite/mopidy,tkem/mopidy,swak/mopidy,glogiotatidis/mopidy,quartz55/mopidy,bacontext/mopidy,bencevans/mopidy,dbrgn/mopidy,mokieyue/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,priestd09/mopidy,pacificIT/mopidy,mopidy/mopidy,jodal/mopidy,swak/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,abarisain/mopidy,woutervanwijk/mopidy,adamcik/mopidy,ZenithDK/mopidy,vrs01/mopidy,tkem/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,jcass77/mopidy,abarisain/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,priestd09/mopidy,swak/mopidy,kingosticks/mopidy,bencevans/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,liamw9534/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,rawdlite/mopidy,quartz55/mopidy,ali/mopidy,rawdlite/mopidy,kingosticks/mopidy,bacontext/mopidy,mokieyue/mopidy,mopidy/mopidy,jmarsik/mopidy,vrs01/mopidy,pacificIT/mopidy,dbrgn/mopidy,jmarsik/mopidy,priestd09/mopidy,quartz55/mopidy,quartz55/mopidy,vrs01/mopidy | - import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
+ from subprocess import PIPE, Popen
+
VERSION = (0, 4, 0)
- def is_in_git_repo():
- git_dir = os.path.abspath(os.path.join(
- os.path.dirname(__file__), '../.git'))
- return os.path.exists(git_dir)
-
def get_git_version():
- if not is_in_git_repo():
- return None
- git_version = os.popen('git describe').read().strip()
+ process = Popen(['git', 'describe'], stdout=PIPE)
+ if process.wait() != 0:
+ raise Exception|('Execution of "git describe" failed')
+ version = process.stdout.read().strip()
- if git_version.startswith('v'):
+ if version.startswith('v'):
- git_version = git_version[1:]
+ version = version[1:]
- return git_version
+ return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
- if is_in_git_repo():
+ try:
return get_git_version()
- else:
+ except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
| Use subprocess instead of os.popen | ## Code Before:
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
## Instruction:
Use subprocess instead of os.popen
## Code After:
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
5ac7c07277ef1c7e714336e1b96571cdfea15a13 | ktbs_bench_manager/benchable_graph.py | ktbs_bench_manager/benchable_graph.py | import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
| from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
| Remove unnecessary import of logging | Remove unnecessary import of logging
| Python | mit | vincent-octo/ktbs_bench_manager,vincent-octo/ktbs_bench_manager | - import logging
-
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
| Remove unnecessary import of logging | ## Code Before:
import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
## Instruction:
Remove unnecessary import of logging
## Code After:
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
81f4f4b1318ff800e3febbc1bd7bbd9ff8e868b1 | node/dictionary.py | node/dictionary.py |
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
| Add some exception handling for dict | Add some exception handling for dict
| Python | mit | muddyfish/PYKE,muddyfish/PYKE |
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
- assert(word in words)
+ if word not in words:
+ rtn += "Word %s not in wordlist" % word
+ else:
- rtn += chr(words.index(word))
+ rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
| Add some exception handling for dict | ## Code Before:
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
## Instruction:
Add some exception handling for dict
## Code After:
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
bb3605bd99892bed37ecb2b6371d2bc88d599e1a | caso/__init__.py | caso/__init__.py |
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
|
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
| Include "OpenStack" string in the user agent | Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38
| Python | apache-2.0 | alvarolopez/caso,IFCA/caso,IFCA/caso |
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
- user_agent = "caso/%s" % __version__
+ user_agent = "caso/%s (OpenStack)" % __version__
| Include "OpenStack" string in the user agent | ## Code Before:
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
## Instruction:
Include "OpenStack" string in the user agent
## Code After:
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
ebf5e05acfb7f1edce0c0987576ee712f3fdea54 | test/scripts/test_sequana_coverage.py | test/scripts/test_sequana_coverage.py | from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
| from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
| Fix tests to use pytest | Fix tests to use pytest
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | from sequana.scripts import coverage
- from nose.plugins.attrib import attr
from sequana import sequana_data
+ import pytest
+ prog = "sequana_coverage"
- #@attr("skip")
- class TestPipeline(object):
+ @pytest.fixture
+ def coveragefix():
- @classmethod
- def setup_class(klass):
- """This method is run once for each class before any tests are run"""
- klass.prog = "sequana_coverage"
- klass.params = {'prog': klass.prog}
-
- @classmethod
- def teardown_class(klass):
- """This method is run once for each class _after_ all tests are run"""
- import os
+ import os
- # local nosetests execution
+ # local nosetests execution
- try:os.remove('README')
+ try:os.remove('README')
- except:pass
+ except:pass
- try:os.remove('quality.rules')
+ try:os.remove('quality.rules')
- except:pass
+ except:pass
- try:os.remove('config.yaml')
+ try:os.remove('config.yaml')
- except:pass
+ except:pass
-
- def _test_version(self):
- coverage.main([self.prog, '--version'])
-
- def test_help(self):
- try:
- coverage.main([self.prog, '--help'])
- assert False
- except SystemExit:
- pass
- else:
- raise Exception
-
- def test_input(self):
- filename = sequana_data('virus.bed', 'data')
- reference = sequana_data('tofill.fa', 'data')
- coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
+ def test_version():
+ try:
+ coverage.main([prog, '--version'])
+ assert False
+ except SystemExit:
+ pass
+ else:
+ raise Exception
+
+ def test_help():
+ try:
+ coverage.main([prog, '--help'])
+ assert False
+ except SystemExit:
+ pass
+ else:
+ raise Exception
+
+
+ def test_input(tmpdir):
+
+ import os
+ directory = tmpdir.mkdir("report")
+ name = directory.__str__()
+
+ filename = sequana_data('virus.bed', 'data')
+ reference = sequana_data('tofill.fa', 'data')
+ coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
+ assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
+ | Fix tests to use pytest | ## Code Before:
from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
## Instruction:
Fix tests to use pytest
## Code After:
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
2b5c186337bcb396f630c0b86938e43eb06d3e5b | tests/test_i10knobs.py | tests/test_i10knobs.py | from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
| from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
| Add test checking only for imports | Add test checking only for imports
| Python | apache-2.0 | dls-controls/i10switching,dls-controls/i10switching | from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
+ def test_import(self):
+ pass
+
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
| Add test checking only for imports | ## Code Before:
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
## Instruction:
Add test checking only for imports
## Code After:
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
a55dd124d54955476411ee8ae830c9fd3c4f00dc | tests/test_pdfbuild.py | tests/test_pdfbuild.py | from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
| from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
| Test get_errors() method of LatexBuildError. | Test get_errors() method of LatexBuildError.
| Python | bsd-3-clause | mbr/latex | - from latex import build_pdf
- from latex.exc import LatexBuildError
+ from latex import build_pdf, LatexBuildError
+ from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
+
+ def test_finds_errors_correctly():
+ broken_latex = r"""
+ \documentclass{article}
+ \begin{document}
+ All good
+ \undefinedcontrolsequencehere
+ \end{document}
+ """
+
+ try:
+ build_pdf(broken_latex)
+ except LatexBuildError as e:
+ assert parse_log(e.log) == e.get_errors()
+ else:
+ assert False, 'no exception raised'
+ | Test get_errors() method of LatexBuildError. | ## Code Before:
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
## Instruction:
Test get_errors() method of LatexBuildError.
## Code After:
from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
8535c59c26e2c5badfd3637d41901f1bc987e200 | tests/test_requests.py | tests/test_requests.py | """Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
| """Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
| Add a test for the __call__ method of the APIRequest class. | Add a test for the __call__ method of the APIRequest class.
| Python | mit | openspending/gobble | """Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
+
+ def test_call():
+ request = APIRequest('google.com')
+ assert request().status_code == 200
+ | Add a test for the __call__ method of the APIRequest class. | ## Code Before:
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
## Instruction:
Add a test for the __call__ method of the APIRequest class.
## Code After:
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
ca7403462588f374cf1af39d537765c02fc7726c | mctrl/rest.py | mctrl/rest.py | from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
| from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
| Fix status codes of handled responses | Fix status codes of handled responses
| Python | apache-2.0 | h2020-endeavour/endeavour,h2020-endeavour/endeavour | from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
- return handle_response(success)
+ return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
- return handle_response(success)
+ return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
- def handle_response(self, success):
+ def handle_response(self, success, data):
+ json_data = json.dumps(data)
if success:
- return Response("OK\n" + data, status=status)
+ return Response("OK\n" + json_data, status=200)
else:
- return Response("BAD REQUEST\n" + data, status=status)
+ return Response("BAD REQUEST\n" + json_data, status=400)
| Fix status codes of handled responses | ## Code Before:
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
## Instruction:
Fix status codes of handled responses
## Code After:
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
87bf261345919e90cb88853165fb1556046c80ef | tests/mpd/protocol/test_connection.py | tests/mpd/protocol/test_connection.py | from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
| from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
| Fix typo in mock usage | tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.
| Python | apache-2.0 | hkariti/mopidy,bencevans/mopidy,diandiankan/mopidy,dbrgn/mopidy,kingosticks/mopidy,mopidy/mopidy,ali/mopidy,jmarsik/mopidy,quartz55/mopidy,mopidy/mopidy,vrs01/mopidy,diandiankan/mopidy,ali/mopidy,adamcik/mopidy,pacificIT/mopidy,tkem/mopidy,pacificIT/mopidy,dbrgn/mopidy,adamcik/mopidy,hkariti/mopidy,jmarsik/mopidy,vrs01/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bacontext/mopidy,jcass77/mopidy,pacificIT/mopidy,jodal/mopidy,ZenithDK/mopidy,swak/mopidy,kingosticks/mopidy,bacontext/mopidy,jcass77/mopidy,diandiankan/mopidy,quartz55/mopidy,swak/mopidy,kingosticks/mopidy,mopidy/mopidy,quartz55/mopidy,jodal/mopidy,dbrgn/mopidy,dbrgn/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,swak/mopidy,jcass77/mopidy,SuperStarPL/mopidy,bacontext/mopidy,adamcik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bencevans/mopidy,SuperStarPL/mopidy,bencevans/mopidy,ZenithDK/mopidy,tkem/mopidy,bencevans/mopidy,ali/mopidy,tkem/mopidy,diandiankan/mopidy,jodal/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,vrs01/mopidy,hkariti/mopidy,swak/mopidy,bacontext/mopidy,mokieyue/mopidy,mokieyue/mopidy,quartz55/mopidy,hkariti/mopidy | from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
- close_mock.assertEqualResponsecalled_once_with()
+ close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
| Fix typo in mock usage | ## Code Before:
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
## Instruction:
Fix typo in mock usage
## Code After:
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
e34bcec834bf4d84168d04a1ea0a98613ad0df4e | corehq/apps/locations/management/commands/migrate_new_location_fixture.py | corehq/apps/locations/management/commands/migrate_new_location_fixture.py | from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
| import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
| Update migration to fetch domains with applications using old location fixture | Update migration to fetch domains with applications using old location fixture | Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | + import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
- from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
+ from corehq.apps.locations.models import SQLLocation
- from corehq.toggles import FLAT_LOCATION_FIXTURE
+ from corehq.apps.domain.models import Domain
+ from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
+ To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
+ apps with locations and having commtrack:enabled in app files
- To migrate to new flat fixture for locations. Update apps with locations and not having
- FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
- sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
- The Feature Flag should be removed after this
+ The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
- domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
+ domains_having_locations = (
+ SQLLocation.objects.order_by('domain').distinct('domain')
+ .values_list('domain', flat=True)
+ )
+ domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
+ domains_having_locations
+ )
- toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
+ toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
+ for domain in domains_with_hierarchical_fixture:
+ toggle.add(domain, True, NAMESPACE_DOMAIN)
- enabled_users = toggle.enabled_users
- enabled_domains = [user.split('domain:')[1] for user in enabled_users]
- for domain_name in domains_having_locations:
- if domain_name not in enabled_domains:
- domain_config = LocationFixtureConfiguration.for_domain(domain_name)
- # update configs that had not been changed which means both values are at default True
- if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
- # update them to use hierarchical fixture
- domain_config.sync_flat_fixture = False
- domain_config.sync_hierarchical_fixture = True
- domain_config.save()
+
+ def find_applications_with_hierarchical_fixture(domains):
+ search_string = 'commtrack:enabled'
+ domain_with_application = {}
+ for domain in domains:
+ domain_obj = Domain.get_by_name(domain)
+ for application in domain_obj.applications():
+ raw_doc = json.dumps(application.get_db().get(application.id))
+ if search_string in raw_doc:
+ search_string[domain] = application.id
+ continue
+ return domain_with_application
+ | Update migration to fetch domains with applications using old location fixture | ## Code Before:
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
## Instruction:
Update migration to fetch domains with applications using old location fixture
## Code After:
import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
a17ed4f65b7fa5a035efb7c6ff19fcf477a65429 | categories_i18n/managers.py | categories_i18n/managers.py | import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
| import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
| Remove remaining django-mptt 0.7 compatibility code | Remove remaining django-mptt 0.7 compatibility code
| Python | apache-2.0 | edoburu/django-categories-i18n,edoburu/django-categories-i18n | import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
- def get_queryset(self):
- # Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
- # Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
- return self._queryset_class(self.model, using=self._db).order_by(
- self.tree_id_attr, self.left_attr
- )
- | Remove remaining django-mptt 0.7 compatibility code | ## Code Before:
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
## Instruction:
Remove remaining django-mptt 0.7 compatibility code
## Code After:
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
e775613d43dac702565cf266d9995c9cd706d7c8 | pwndbg/commands/cpsr.py | pwndbg/commands/cpsr.py | from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
| from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
| Add documentation for the CPSR command | Add documentation for the CPSR command
| Python | mit | 0xddaa/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,chubbymaggie/pwndbg,chubbymaggie/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,cebrusfs/217gdb,0xddaa/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg | from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
+ 'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
| Add documentation for the CPSR command | ## Code Before:
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
## Instruction:
Add documentation for the CPSR command
## Code After:
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
91eca37144d0c378761e47c143e66a79af37c226 | repo_manage/forms.py | repo_manage/forms.py |
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
| Fix IntegrityError and DoesNotExist 500s | Fix IntegrityError and DoesNotExist 500s
| Python | mit | vault/bugit,vault/bugit,vault/bugit |
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
- print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
- def save(self, **kwargs):
+ def clean(self):
+ cleaned_data = super(CollaborationForm, self).clean()
+ self.instance.full_clean()
+ return cleaned_data
+
+ def clean_user(self):
username = self.cleaned_data['user']
+ user = None
+ try:
- user = User.objects.get(username=username)
+ user = User.objects.get(username=username)
- self.instance.user = user
+ self.instance.user = user
-
- return super(CollaborationForm, self).save(**kwargs)
-
+ except User.DoesNotExist:
+ raise forms.ValidationError("User %(username_s does not exist",
+ params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
| Fix IntegrityError and DoesNotExist 500s | ## Code Before:
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
## Instruction:
Fix IntegrityError and DoesNotExist 500s
## Code After:
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
f6ddd5c4d79ada59d9db4b467849d9b52c5fef75 | landlab/field/__init__.py | landlab/field/__init__.py | from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
| from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
| Add GraphFields to package import. | Add GraphFields to package import.
| Python | mit | cmshobe/landlab,cmshobe/landlab,cmshobe/landlab,RondaStrauch/landlab,amandersillinois/landlab,RondaStrauch/landlab,landlab/landlab,Carralex/landlab,RondaStrauch/landlab,landlab/landlab,amandersillinois/landlab,csherwood-usgs/landlab,Carralex/landlab,Carralex/landlab,csherwood-usgs/landlab,landlab/landlab | from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
+ from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
- 'FieldError', 'GroupError', 'GroupSizeError']
+ 'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
| Add GraphFields to package import. | ## Code Before:
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
## Instruction:
Add GraphFields to package import.
## Code After:
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
75f236f8fd0ba368197da3070002b60233a01d49 | tests/test_track_bed.py | tests/test_track_bed.py |
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
|
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
| Test routines to the BED writer added | Test routines to the BED writer added
| Python | mit | gtamazian/Chromosomer |
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
-
+ from chromosomer.track.bed import Writer
+ from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
- suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
- unittest.TextTestRunner(verbosity=2).run(suite)
+ class TestBedWriter(unittest.TestCase):
+ def setUp(self):
+ self.__input_file = os.path.join(
+ 'data', 'bed', 'correct.bed'
+ )
+ self.__output_file = os.path.join(
+ 'data', 'bed', 'test.bed'
+ )
+ # silence the logging messages
+ logging.disable(logging.ERROR)
+
+ def tearDown(self):
+ os.unlink(self.__output_file)
+
+ def test_write(self):
+ """
+ Check if BED records are written in the correct way.
+ """
+ bed_input = Reader(self.__input_file)
+ with Writer(self.__output_file) as bed_output:
+ for record in bed_input.records():
+ bed_output.write(record)
+
+ # check if the lines are identical
+ with open(self.__input_file) as original_file, \
+ open(self.__output_file) as written_file:
+ for x, y in izip(original_file, written_file):
+ self.assertEqual(x, y)
+ | Test routines to the BED writer added | ## Code Before:
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
## Instruction:
Test routines to the BED writer added
## Code After:
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
9e666e97b07d7c08e434791a061086010da6e6eb | main.py | main.py |
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
|
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
| Add ability to get the latest TwoHeadlines tweet | Add ability to get the latest TwoHeadlines tweet
| Python | mit | underyx/TheMajorNews |
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
+ def get_latest_tweet(token):
+ parameters = {'screen_name': 'TwoHeadlines',
+ 'count': 1,
+ 'trim_user': True}
+
+ headers = {'Authorization': 'Bearer ' + token}
+
+ r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
+ params=parameters, headers=headers)
+
+ return r.json(encoding='utf8')[0]['text']
+
+
def main():
bearer_token = get_access_token()
+ latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
| Add ability to get the latest TwoHeadlines tweet | ## Code Before:
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
## Instruction:
Add ability to get the latest TwoHeadlines tweet
## Code After:
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
789ac1de1e94eda1224fb314ccad14c061c58ad4 | pact/group.py | pact/group.py | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| Create empty PactGroup if no arguments given | Create empty PactGroup if no arguments given | Python | bsd-3-clause | vmalloc/pact | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
- def __init__(self, pacts):
+ def __init__(self, pacts=None):
- self._pacts = list(pacts)
+ self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| Create empty PactGroup if no arguments given | ## Code Before:
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
## Instruction:
Create empty PactGroup if no arguments given
## Code After:
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
d1ec190f1a4dc84db0540481f2489f1db8421799 | oemof_pg/db.py | oemof_pg/db.py | from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
| from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
| Enable specifying the password in `config.ini` | Enable specifying the password in `config.ini`
| Python | mit | oemof/oemof.db | + from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
+ pw = keyring.get_password(cfg.get("postGIS", "database"),
+ cfg.get("postGIS", "username"))
+
+ if pw is None:
+ try: pw = cfg.get("postGIS", "pw")
+ except option:
+ print("Unable to find the database password in " +
+ "the oemof config or keyring." +
+ "\nExiting.")
+ exit(-1)
+ except section:
+ print("Unable to find the 'postGIS' section in oemof's config." +
+ "\nExiting.")
+ exit(-1)
+
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
+ passwd=pw,
- passwd=keyring.get_password(
- cfg.get("postGIS", "database"),
- cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
+
return engine.connect()
| Enable specifying the password in `config.ini` | ## Code Before:
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
## Instruction:
Enable specifying the password in `config.ini`
## Code After:
from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
901a47adf6726d50c01ac743e9661c0caac2b555 | test_openfolder.py | test_openfolder.py | import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| Check to ensure the excpetions return the text we expect. | Check to ensure the excpetions return the text we expect.
| Python | mit | golliher/dg-tickler-file | import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
- with pytest.raises(Exception):
+ with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
+ assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
+
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
- with pytest.raises(Exception):
+ with pytest.raises(Exception) as excinfo:
- result = open_folder("/")
+ open_folder("/")
+ assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| Check to ensure the excpetions return the text we expect. | ## Code Before:
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
## Instruction:
Check to ensure the excpetions return the text we expect.
## Code After:
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
ea972c89cd7abe4fdb772ce359dd9acd83817242 | tests/test.py | tests/test.py | from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| Add http_transport and websocket_transport methods | Add http_transport and websocket_transport methods
| Python | apache-2.0 | devicehive/devicehive-python | from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
+ def http_transport(self):
+ return self._transport_name == 'http'
+
+ def websocket_transport(self):
+ return self._transport_name == 'websocket'
+
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| Add http_transport and websocket_transport methods | ## Code Before:
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
## Instruction:
Add http_transport and websocket_transport methods
## Code After:
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
42a4a8b4480bc481e0467ae7ee46c60400d63f77 | theme-installer.py | theme-installer.py | import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) | import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) | Create tmp directory if it doesn't exist | Create tmp directory if it doesn't exist
| Python | lgpl-2.1 | kmklr72/LMMS-Theme-Installer | import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
+
+ # Create tmp directory if it doesn't exist
+ if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
+ os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) | Create tmp directory if it doesn't exist | ## Code Before:
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
## Instruction:
Create tmp directory if it doesn't exist
## Code After:
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) |
dd40b392b73ddc1bcf88d932418b4f891bcc6a89 | twine/__init__.py | twine/__init__.py | from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| Allow star imports from twine | Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)
| Python | apache-2.0 | pypa/twine | from __future__ import absolute_import, division, print_function
- from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| Allow star imports from twine | ## Code Before:
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
## Instruction:
Allow star imports from twine
## Code After:
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
0c84f6dd314ea62019356b09363f98118a4da776 | txircd/factory.py | txircd/factory.py | from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) | from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) | Use built-in IP address functionality to unmap IPv4 addresses | Use built-in IP address functionality to unmap IPv4 addresses
| Python | bsd-3-clause | Heufneutje/txircd | from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
- import re
+ from typing import Union
+ def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
- ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
- def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
- mapped = ipv4MappedAddr.match(ip)
- if mapped:
- return mapped.group(1)
- return ip
+ addr = ip_address(ip)
+ if addr.ipv4_mapped is None:
+ return addr
+ return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) | Use built-in IP address functionality to unmap IPv4 addresses | ## Code Before:
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
## Instruction:
Use built-in IP address functionality to unmap IPv4 addresses
## Code After:
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) |
bee5ed1d9815a4c4291179d0de3ec54fe467b219 | project.py | project.py | import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
| import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
| Save sessions in JSON format instead of pickle. | Save sessions in JSON format instead of pickle.
| Python | mit | shaurz/devo | - import os, cPickle as pickle
+ import os
+ import json
+
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
- session = pickle.loads(f.read())
+ session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
- data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
+ data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
| Save sessions in JSON format instead of pickle. | ## Code Before:
import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
## Instruction:
Save sessions in JSON format instead of pickle.
## Code After:
import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
a587d48694690957934a159bad98cacd3f012a6a | cms/tests/test_externals.py | cms/tests/test_externals.py | from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
| from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
| Change contextlib import to handle the new location in Python 3. | Change contextlib import to handle the new location in Python 3.
| Python | bsd-3-clause | danielsamuels/cms,jamesfoley/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,danielsamuels/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,danielsamuels/cms,lewiscollard/cms | from django.test import TestCase
from ..externals import External
+ try:
- from contextlib import GeneratorContextManager
+ from contextlib import GeneratorContextManager
+ except ImportError:
+ from contextlib import _GeneratorContextManager as GeneratorContextManager
+
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
| Change contextlib import to handle the new location in Python 3. | ## Code Before:
from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
## Instruction:
Change contextlib import to handle the new location in Python 3.
## Code After:
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
278069a0637f7f329ceaff0975e3b95d609a7b9f | cosmoscope/cli.py | cosmoscope/cli.py |
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| Improve the command line interface | Improve the command line interface
| Python | mit | cosmoscope/cosmoscope |
"""Console script for cosmoscope."""
import sys
import click
+ from .core.server import launch
+
@click.command()
+ @click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
+ @click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
+ def main(server_address=None, publisher_address=None):
+ """Console interface for the cosmoscope server."""
+ launch(server_address, publisher_address)
- def main(args=None):
- """Console script for cosmoscope."""
- click.echo("Replace this message by putting your code into "
- "cosmoscope.cli.main")
- click.echo("See click documentation at http://click.pocoo.org/")
- return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| Improve the command line interface | ## Code Before:
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
## Instruction:
Improve the command line interface
## Code After:
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
4efa9c87264eabb6712f4fb787ab0de42be18de6 | places/urls.py | places/urls.py | from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
| from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
| Move places urlpatterns to Django 2.0 preferred method | Move places urlpatterns to Django 2.0 preferred method
| Python | mit | evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca | - from django.conf.urls import url
+ from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
- url(r'^$', views.IndexView.as_view(), name='index'),
+ path('', views.IndexView.as_view(), name='index'),
- url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
+ path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
| Move places urlpatterns to Django 2.0 preferred method | ## Code Before:
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
## Instruction:
Move places urlpatterns to Django 2.0 preferred method
## Code After:
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
a9dc245f99e5c29f3b11cadc77dcfa0f44274b74 | ctfbackend/backend/urls.py | ctfbackend/backend/urls.py | from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
| from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
| Add login_required decorator to protected sites | Add login_required decorator to protected sites
| Python | agpl-3.0 | c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend | from django.conf.urls import url, include
- from django.http import HttpResponseRedirect
- from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
+ from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
- url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
+ url(r'^accounts/logout/$',
+ auth_views.logout,
+ {'next_page': '/'},
+ name='auth_logout'),
+ url(r'^accounts/',
- url(r'^accounts/', include('registration.backends.hmac.urls')),
+ include('registration.backends.hmac.urls')),
# Backend urls
- url(r'^$', views.HomeView.as_view(), name='home'),
- url(r'^submit$', views.SubmitView.as_view(), name='submit'),
- url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
- url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
- url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
- url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
+ url(r'^$',
+ views.HomeView.as_view(),
+ name='home'),
+ url(r'^submit$',
+ login_required(views.SubmitView.as_view()),
+ name='submit'),
+ url(r'^scores$',
+ views.ScoreboardView.as_view(),
+ name='scores'),
+ url(r'^chals$',
+ login_required(views.ChallengesView.as_view()),
+ name='chals'),
+ url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
+ login_required(views.ChallengesView.as_view()),
+ name='buy_hint'),
+ url(r'^stats$',
+ views.StatisticsView.as_view(),
+ name='stats'),
]
| Add login_required decorator to protected sites | ## Code Before:
from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
## Instruction:
Add login_required decorator to protected sites
## Code After:
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
6ff6f7ecf75551dc49685c4bb0501e6f4b2de854 | packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py | packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py |
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
| Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category. | Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb |
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
+ @add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
| Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category. | ## Code Before:
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
## Instruction:
Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
## Code After:
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
df7e834b8418aeeeaee7fb90b953468c2490b93d | pypiup/cli.py | pypiup/cli.py | import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| Add Ascii Art & Version Number | Add Ascii Art & Version Number
| Python | bsd-2-clause | ekonstantinidis/pypiup | + import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
+
+ print("\n ______ __ __ ______ __ __ __ ______ ")
+ print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
+ print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
+ print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
+ print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
+ print("\nhttps://github.com/ekonstantinidis/pypiup")
+ print("Version %s" % __init__.__version__)
+
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| Add Ascii Art & Version Number | ## Code Before:
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
## Instruction:
Add Ascii Art & Version Number
## Code After:
import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
6c9a3e5133115a4724c8499380ee690a9cca0552 | pmagpy/__init__.py | pmagpy/__init__.py | from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
| from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
| Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….) | Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….) | Python | bsd-3-clause | lfairchild/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,Caoimhinmg/PmagPy | from __future__ import absolute_import
+ import sys
+ if sys.version_info <= (3,):
+ raise Exception("""
+ You are running Python {}.
+ This version of pmagpy is only compatible with Python 3.
+ Make sure you have pip >= 9.0 to avoid this kind of issue,
+ as well as setuptools >= 24.2:
+
+ $ pip install pip setuptools --upgrade
+
+ Then you should be able to download the correct version of pmagpy:
+
+ $ pip install pmagpy --upgrade
+
+ If this still gives you an error, please report the issue:
+ https://github.com/PmagPy/PmagPy/issues
+
+ Thanks!
+
+ """.format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
| Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….) | ## Code Before:
from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
## Instruction:
Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)
## Code After:
from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
d3cbcfa3d134ef7ce158f229eff75a83418afc52 | tools/dmqmc/extract_n_k.py | tools/dmqmc/extract_n_k.py | '''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
| '''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
| Write the extraction script properly. | Write the extraction script properly.
| Python | lgpl-2.1 | hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande | - '''Extract the momentum distribution from a analysed DMQMC simulation.'''
+ '''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
- # [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
+ def main(args):
- if (len(sys.argv) < 2):
- print ("Usage: extract_n_k.py file bval")
- sys.exit()
- bval = float(sys.argv[2])
+ if (len(sys.argv) < 2):
+ print ("Usage: extract_n_k.py file bval")
+ sys.exit()
- data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
+ bval = float(sys.argv[2])
+ data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
- mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
- mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
- vals = [float(c.split('_')[1]) for c in mom]
+ mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
+ mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
+ vals = [float(c.split('_')[1]) for c in mom]
- n_k = (data[mom].transpose()).values
- n_k_error = (data[mome].transpose()).values
- n_k_error[np.isnan(n_k_error)] = 0
- frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
- n_k_error.ravel()})
- print (frame.to_string(index=False))
+ n_k = (data[mom].transpose()).values
+ n_k_error = (data[mome].transpose()).values
+ n_k_error[np.isnan(n_k_error)] = 0
+ frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
+ n_k_error.ravel()})
+ print (frame.to_string(index=False))
+
+
+ if __name__ == '__main__':
+
+ main(sys.argv[1:])
+ | Write the extraction script properly. | ## Code Before:
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
## Instruction:
Write the extraction script properly.
## Code After:
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
bea80411c13ed72b1e7d5a5ac79fdba64b4b4661 | benchmarks/benchmarks/sparse_csgraph_djisktra.py | benchmarks/benchmarks/sparse_csgraph_djisktra.py | """benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
| """benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
| Add star graph for sparse.csgraph.dijkstra benchmark | ENH: Add star graph for sparse.csgraph.dijkstra benchmark
| Python | bsd-3-clause | scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy | """benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
- [True, False]
+ [True, False],
+ ['random', 'star']
]
- param_names = ['n', 'min_only']
+ param_names = ['n', 'min_only', 'format']
- def setup(self, n, min_only):
+ def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
+ if format == 'random':
- # make a random connectivity matrix
+ # make a random connectivity matrix
- data = scipy.sparse.rand(n, n, density=0.2, format='csc',
+ data = scipy.sparse.rand(n, n, density=0.2, format='csc',
- random_state=42, dtype=np.bool_)
+ random_state=42, dtype=np.bool_)
- data.setdiag(np.zeros(n, dtype=np.bool_))
+ data.setdiag(np.zeros(n, dtype=np.bool_))
- self.data = data
+ self.data = data
+ elif format == 'star':
+ rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
+ cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
+ weights = [i + 1 for i in range(n - 1)] * 2
+ self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
+ shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
- def time_dijkstra_multi(self, n, min_only):
+ def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
| Add star graph for sparse.csgraph.dijkstra benchmark | ## Code Before:
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
## Instruction:
Add star graph for sparse.csgraph.dijkstra benchmark
## Code After:
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
bca6ca83ce43f6d9b96ac590bda9c6253384ab69 | winthrop/people/viaf.py | winthrop/people/viaf.py | import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
| import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
| Refactor for other search options later (search -> suggest) | Refactor for other search options later (search -> suggest)
| Python | apache-2.0 | Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django | + import json
import requests
- from django.conf import settings
class ViafAPI(object):
- """Wrapper for ViafAPI"""
+ """Wrapper for Viaf API"""
def __init__(self):
+ self.base_url = "https://www.viaf.org/"
- default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
- self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
- def search(self, query):
+ def suggest(self, query):
"""Do a GET request to pull in JSON"""
+ url = self.base_url + "viaf/AutoSuggest?query="
- r = requests.get('%s%s' % (self.base_url, query))
+ r = requests.get("%s%s" % (url, query))
- # Check to make sure we have a sucesss (i.e. a 200 code)
- if 200 <= r.status_code < 300:
+ # If result is empty, return an empty list instead of None
+ if not (r.json())['result']:
+ return json.dumps({'result': []})
+
- return r.json()
+ return r.json()
- else:
- return None
@classmethod
def uri_from_id(cls, viaf_id):
- return 'https://viaf.org/viaf/%s/' % viaf_id
+ return "https://viaf.org/viaf/%s/" % viaf_id
| Refactor for other search options later (search -> suggest) | ## Code Before:
import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
## Instruction:
Refactor for other search options later (search -> suggest)
## Code After:
import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
e503ef58e801cfbc3ba72ba84bc2150c79a401d3 | girder/molecules/molecules/models/geometry.py | girder/molecules/molecules/models/geometry.py | from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
| from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
| Save creatorId as well for geometries | Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>
| Python | bsd-3-clause | OpenChemistry/mongochemserver | from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
- 'cjson': cjson
+ 'cjson': cjson,
+ 'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
| Save creatorId as well for geometries | ## Code Before:
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
## Instruction:
Save creatorId as well for geometries
## Code After:
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
5006ba3124cd80a4529b9ed645aa8981d06a9886 | publishconf.py | publishconf.py | from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
| Stop generate feeds when publishing | Stop generate feeds when publishing
| Python | mit | andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org | - from __future__ import unicode_literals
-
- # This file is only used if you use `make publish` or
- # explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
- FEED_ALL_ATOM = 'feeds/all.atom.xml'
- CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
-
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
| Stop generate feeds when publishing | ## Code Before:
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
## Instruction:
Stop generate feeds when publishing
## Code After:
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
ddd3373ce078cf9bf40da7ebd8591995e819b750 | phell/utils.py | phell/utils.py | import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
| import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
| Add function to swap byte order | Add function to swap byte order
| Python | mit | bjoernricks/phell | import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
+ def swap_bytes(value):
+ if sys.version_info.major < 3:
+ return "".join([bytes(b) for b in reversed(value)])
+ return bytes(reversed(value))
+
# vim: set ts=4 sw=4 tw=80:
| Add function to swap byte order | ## Code Before:
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
## Instruction:
Add function to swap byte order
## Code After:
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
c8ffd1fc4c4e06cd71e86d1d48749a3fe527a54e | biosys/apps/main/tests/api/test_serializers.py | biosys/apps/main/tests/api/test_serializers.py | from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
| from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
| Fix test to accommodate change of error message. | Fix test to accommodate change of error message.
| Python | apache-2.0 | gaiaresources/biosys,parksandwildlife/biosys,gaiaresources/biosys,serge-gaia/biosys,ropable/biosys,parksandwildlife/biosys,serge-gaia/biosys,ropable/biosys,gaiaresources/biosys,ropable/biosys,serge-gaia/biosys,parksandwildlife/biosys | from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
- self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
+ self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
| Fix test to accommodate change of error message. | ## Code Before:
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
## Instruction:
Fix test to accommodate change of error message.
## Code After:
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
27c9da3129c6fbdd8d54276cf054c1f46e665aaf | flask_app.py | flask_app.py | from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
| import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
| Remove trailing slashes, add origin url to responses | Remove trailing slashes, add origin url to responses
| Python | bsd-3-clause | talavis/kimenu | - from flask import Flask, abort, jsonify
- from flask_caching import Cache
- from flask_cors import CORS
+ import flask
+ import flask_caching
+ import flask_cors
import main
import slack
- app = Flask(__name__)
+ app = flask.Flask(__name__)
- cache = Cache(app, config={"CACHE_TYPE": "simple"})
+ cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
- cors = CORS(app, resources={r"/*": {"origins": "*"}})
+ cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
- @app.route("/api/")
+ @app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
- return jsonify({"entities": ["restaurant"]})
+ return flask.jsonify({"entities": ["restaurant"],
+ "url": flask.url_for("list_entities", _external=True)})
- @app.route("/api/restaurant/")
+ @app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
- return jsonify({"restaurants": main.list_restaurants()})
+ return flask.jsonify({"restaurants": main.list_restaurants(),
+ "url": flask.url_for("list_restaurants", _external=True)})
- @app.route("/api/restaurant/<name>/")
+ @app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
- return jsonify({"restaurant": data})
+ return flask.jsonify({"restaurant": data,
+ "url": flask.url_for("get_restaurant", name=name, _external=True)})
| Remove trailing slashes, add origin url to responses | ## Code Before:
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
## Instruction:
Remove trailing slashes, add origin url to responses
## Code After:
import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
e2f83a6a5d43ebc52d03d4059a7526a579a425c1 | darkoob/social/models.py | darkoob/social/models.py | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) | Set User Profile Unicode Function | Set User Profile Unicode Function
| Python | mit | s1na/darkoob,s1na/darkoob,s1na/darkoob | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
+ def __unicode__(self):
+ return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) | Set User Profile Unicode Function | ## Code Before:
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
## Instruction:
Set User Profile Unicode Function
## Code After:
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) |
9a8fd944fb78d582f06d7165f097c1e54cb870dc | project/asylum/mixins.py | project/asylum/mixins.py | from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
| from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
| Add a mixin for calling full_clean() on save() | Add a mixin for calling full_clean() on save()
| Python | mit | ojousima/asylum,rambo/asylum,HelsinkiHacklab/asylum,ojousima/asylum,hacklab-fi/asylum,hacklab-fi/asylum,jautero/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,jautero/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,ojousima/asylum,rambo/asylum,ojousima/asylum | from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
-
+ """Makes sure saves and deletes go via transactions and version control
+ even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
+
+ class CleanSaveMixin(object):
+ """Makes sure clean() is checked before object is saved"""
+ def save(self, *args, **kwargs):
+ if not kwargs.pop('skip_clean', False):
+ self.full_clean()
+ return super().save(*args, **kwargs)
+ | Add a mixin for calling full_clean() on save() | ## Code Before:
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
## Instruction:
Add a mixin for calling full_clean() on save()
## Code After:
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
31e4da5e782c29d7d0c893a3fc9af48260c50a3a | src/ansible/views.py | src/ansible/views.py | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
| from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
| Save form data to DB on each step | Save form data to DB on each step
| Python | bsd-3-clause | lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
- from ansible.models import Github
+ from ansible.models import Github, Playbook
+ import sys
+
def index(request):
return HttpResponse("200")
+
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
+ def get_form_step_data(self, form):
+ data = {}
+ if self.get_form_prefix() == '0':
+ github = Github()
+ github.repository = form.data.dict()['0-repository']
+ github.username = form.data.dict()['0-username']
+ github.save()
+
+ if self.get_form_prefix() == '1':
+ playbook = Playbook()
+ playbook.name = form.data.dict()['1-name']
+ playbook.inventory = form.data.dict()['1-inventory']
+ playbook.user = form.data.dict()['1-user']
+ playbook.save()
+
+ return form.data
+
+
def done(self, form_list, **kwargs):
- form_data = {}
- for form in form_list:
- form.save()
-
return HttpResponseRedirect('/ansible')
+ | Save form data to DB on each step | ## Code Before:
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
## Instruction:
Save form data to DB on each step
## Code After:
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
cd48c66406c39ca6dd6bdc6ba7c2be0df623e6ae | src/leap/mx/check_recipient_access.py | src/leap/mx/check_recipient_access.py |
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
| Fix return codes for check recipient | Fix return codes for check recipient
| Python | agpl-3.0 | meskio/leap_mx,meskio/leap_mx,leapcode/leap_mx,micah/leap_mx,leapcode/leap_mx,micah/leap_mx |
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
+ # For more info, see:
+ # http://www.postfix.org/tcp_table.5.html
+ # http://www.postfix.org/access.5.html
if value is None:
- self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
+ self.sendCode(500, postfix.quote("REJECT"))
else:
+ self.sendCode(200, postfix.quote("OK"))
- # We do not send the value in this case
- self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
| Fix return codes for check recipient | ## Code Before:
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
## Instruction:
Fix return codes for check recipient
## Code After:
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
aefd972c7fb423396f59da03a1d460cd3559d1e1 | duplicate_questions/data/tokenizers/word_tokenizers.py | duplicate_questions/data/tokenizers/word_tokenizers.py | class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
| class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
| Remove unnecesssary comments of old function signatures | Remove unnecesssary comments of old function signatures
| Python | mit | nelson-liu/paraphrase-id-tensorflow,nelson-liu/paraphrase-id-tensorflow | class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
- # def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
- # def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
- # def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
| Remove unnecesssary comments of old function signatures | ## Code Before:
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
## Instruction:
Remove unnecesssary comments of old function signatures
## Code After:
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
d9c677a35d18a878ef8d253a9453e93da3341e96 | runTwircBot.py | runTwircBot.py |
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
|
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
| Add extremely basic template for command modules | Add extremely basic template for command modules
| Python | mit | johnmarcampbell/twircBot |
from src.TwircBot import TwircBot
+ from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
+ module = CommandModule()
+
bot.print_config()
- bot.start()
+ # bot.start()
| Add extremely basic template for command modules | ## Code Before:
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
## Instruction:
Add extremely basic template for command modules
## Code After:
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
9f3356d06067dbcc77a79afee6bccf80600dab28 | server/systeminfo.py | server/systeminfo.py | import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
| import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
| Add a method to get the idle time. Also data are directly readed in /proc/uptime. | Add a method to get the idle time. Also data are directly readed in /proc/uptime.
| Python | mit | juliendelplanque/raspirestmonitor | import subprocess
+ from datetime import timedelta
def get_uptime():
- """ Return the uptime of the system as a str using the command: $ uptime
+ """ Return the uptime of the system as a timedelta object.
"""
- proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
+ proc = subprocess.Popen(["cat /proc/uptime"],
+ stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
- uptime = output.decode("utf-8").split(",")[0]
+ uptime = int(output.decode("utf-8").split()[0].split(".")[0])
- uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
- return uptime
+ s = uptime % 60
+ m = int((uptime/60) % 60)
+ h = int((uptime/(60*60) % 24))
+ return timedelta(hours=h, minutes=m, seconds=s)
+ def get_idletime():
+ """ Return the idle time of the system as a timedelta object.
+ """
+ proc = subprocess.Popen(["cat /proc/uptime"],
+ stdout=subprocess.PIPE, shell=True)
+ (output, error) = proc.communicate()
+ idletime = int(output.decode("utf-8").split()[1].split(".")[0])
+ s = idletime % 60
+ m = int((idletime/60) % 60)
+ h = int((idletime/(60*60) % 24))
+ return timedelta(hours=h, minutes=m, seconds=s)
+ | Add a method to get the idle time. Also data are directly readed in /proc/uptime. | ## Code Before:
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
## Instruction:
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
## Code After:
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
329e74f280537aab41d5b810f8650bfd8d6d81f5 | tests/test_generate_files.py | tests/test_generate_files.py |
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
| Add teardown specific to the former TestCase class | Add teardown specific to the former TestCase class
| Python | bsd-3-clause | michaeljoseph/cookiecutter,christabor/cookiecutter,cguardia/cookiecutter,janusnic/cookiecutter,michaeljoseph/cookiecutter,cguardia/cookiecutter,vincentbernat/cookiecutter,drgarcia1986/cookiecutter,Vauxoo/cookiecutter,cichm/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,terryjbates/cookiecutter,atlassian/cookiecutter,lucius-feng/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,0k/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,jhermann/cookiecutter,ramiroluz/cookiecutter,kkujawinski/cookiecutter,agconti/cookiecutter,sp1rs/cookiecutter,lgp171188/cookiecutter,kkujawinski/cookiecutter,jhermann/cookiecutter,venumech/cookiecutter,sp1rs/cookiecutter,luzfcb/cookiecutter,janusnic/cookiecutter,vintasoftware/cookiecutter,atlassian/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,ionelmc/cookiecutter,takeflight/cookiecutter,letolab/cookiecutter,letolab/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,takeflight/cookiecutter,lgp171188/cookiecutter,agconti/cookiecutter,vintasoftware/cookiecutter,Springerle/cookiecutter,cichm/cookiecutter,ionelmc/cookiecutter,benthomasson/cookiecutter,lucius-feng/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,foodszhang/cookiecutter,foodszhang/cookiecutter,vincentbernat/cookiecutter,ramiroluz/cookiecutter,tylerdave/cookiecutter,tylerdave/cookiecutter,nhomar/cookiecutter,dajose/cookiecutter,stevepiercy/cookiecutter,nhomar/cookiecutter,willingc/cookiecutter,Vauxoo/cookiecutter,drgarcia1986/cookiecutter,moi65/cookiecutter,christabor/cookiecutter,dajose/cookiecutter |
+ from __future__ import unicode_literals
+ import os
import pytest
+
from cookiecutter import generate
from cookiecutter import exceptions
+ from cookiecutter import utils
+ @pytest.fixture(scope="function")
+ def clean_system_remove_additional_folders(request, clean_system):
+ def remove_additional_folders():
+ if os.path.exists('inputpizzä'):
+ utils.rmtree('inputpizzä')
+ if os.path.exists('inputgreen'):
+ utils.rmtree('inputgreen')
+ if os.path.exists('inputbinary_files'):
+ utils.rmtree('inputbinary_files')
+ if os.path.exists('tests/custom_output_dir'):
+ utils.rmtree('tests/custom_output_dir')
+ if os.path.exists('inputpermissions'):
+ utils.rmtree('inputpermissions')
+ request.addfinalizer(remove_additional_folders)
+
+
- @pytest.mark.usefixtures("clean_system")
+ @pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
| Add teardown specific to the former TestCase class | ## Code Before:
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
## Instruction:
Add teardown specific to the former TestCase class
## Code After:
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
56bc9c79522fd534f2a756bd5a18193635e2adae | tests/test_default_security_groups.py | tests/test_default_security_groups.py | """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| Fix missing mock and rename variable | tests: Fix missing mock and rename variable
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
+ @mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
- def test_default_security_groups(mock_properties):
+ def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
+ test_sg = {
+ 'myapp': [
- test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
+ {'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
+ ]
+ }
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
- test_sg = SpinnakerSecurityGroup()
+ sg = SpinnakerSecurityGroup()
- ingress = test_sg.update_default_securitygroup_rules()
+ ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| Fix missing mock and rename variable | ## Code Before:
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
## Instruction:
Fix missing mock and rename variable
## Code After:
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
84929e01bfb9236fd0f51d82ee514d513d018408 | triangle/triangle.py | triangle/triangle.py | class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
| class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
| Sort dimensins to reduce code | Sort dimensins to reduce code
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
- self.dims = dims
+ self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
- if a == b and b == c:
+ if a == b and b == c: # implies a == c
return "equilateral"
- elif a == b or b == c or a == c:
+ elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
- return False
+ raise ValueError("Triangles have 3 sides")
- a, b, c = dims
+ a, b, c = sorted(dims)
- return (a > 0 and b > 0 and c > 0) \
+ return a > 0 and a + b > c
- and (a + b > c and a + c > b and b + c > a)
| Sort dimensins to reduce code | ## Code Before:
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
## Instruction:
Sort dimensins to reduce code
## Code After:
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
3dd23df07d7d1f84e361c87345aafcfefeff636a | jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py | jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
| Order agonistic options to control vacuum gripper | Order agonistic options to control vacuum gripper
| Python | bsd-3-clause | pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
- parser.add_argument('action', type=str, choices=['start', 'stop'])
- limbs = ['left', 'right']
- parser.add_argument('limb', type=str, choices=limbs, nargs='?')
+ parser.add_argument('-l', '--left', action='store_true',
+ help='Control left gripper')
+ parser.add_argument('-r', '--right', action='store_true',
+ help='Control right gripper')
+ parser.add_argument('-t', '--start', action='store_true',
+ help='Start vacuum gripper')
+ parser.add_argument('-p', '--stop', action='store_true',
+ help='Stop vacuum gripper')
args = parser.parse_args()
- action = args.action
- limbs = ['left', 'right'] if args.limb is None else [args.limb]
+ if args.start and not args.stop:
+ action = 'start'
+ elif args.stop:
+ action = 'stop'
+ else:
+ print('Please specify one of start or stop action.')
+ parser.print_help()
+ quit(1)
+ if args.left and not args.right:
+ limbs = ['left']
+ elif args.right:
+ limbs = ['right']
+ else:
+ limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
| Order agonistic options to control vacuum gripper | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
## Instruction:
Order agonistic options to control vacuum gripper
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
27b0a5b95e188a5bd77ae662bbb43e06dfde4749 | slack/views.py | slack/views.py | from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 | from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 | Use the id of the channel and unquote all of the text first. | Use the id of the channel and unquote all of the text first.
| Python | mit | DuaneGarber/slack-meme,joeynebula/slack-meme,tezzutezzu/slack-meme,nicolewhite/slack-meme | from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
- channel = request.args["channel_name"]
+ channel = request.args["channel_id"]
+ text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
- params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
- url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
+ url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 | Use the id of the channel and unquote all of the text first. | ## Code Before:
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
## Instruction:
Use the id of the channel and unquote all of the text first.
## Code After:
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 |
ddd45afa0708682bb11d606e03e38aed111d7b9c | fireplace/cards/game/all.py | fireplace/cards/game/all.py | from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
| from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
| Implement Big Banana, Deviate Banana, Rotten Banana | Implement Big Banana, Deviate Banana, Rotten Banana
| Python | agpl-3.0 | liujimj/fireplace,Ragowit/fireplace,butozerca/fireplace,butozerca/fireplace,smallnamespace/fireplace,amw2104/fireplace,smallnamespace/fireplace,beheh/fireplace,NightKev/fireplace,Meerkov/fireplace,Meerkov/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,amw2104/fireplace,jleclanche/fireplace,oftc-ftw/fireplace | from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
+
+ # Big Banana
+ class TB_006:
+ play = Buff(TARGET, "TB_006e")
+
+
+ # Deviate Banana
+ class TB_007:
+ play = Buff(TARGET, "TB_007e")
+
+
+ # Rotten Banana
+ class TB_008:
+ play = Hit(TARGET, 1)
+ | Implement Big Banana, Deviate Banana, Rotten Banana | ## Code Before:
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
## Instruction:
Implement Big Banana, Deviate Banana, Rotten Banana
## Code After:
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
b0bde22e3ff0d2df2773f41aeaf8eb0ba6d0fa3f | tools/getapifield.py | tools/getapifield.py |
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
| Allow a default value to be specified when fetching a field value | Allow a default value to be specified when fetching a field value
| Python | apache-2.0 | jskeet/gcloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet |
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
+ parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
+ elif not query[0] and args.default:
+ print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
| Allow a default value to be specified when fetching a field value | ## Code Before:
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
## Instruction:
Allow a default value to be specified when fetching a field value
## Code After:
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
f51915a6c373de39785d8273b2a9f6e11ff67b9e | test_dimuon.py | test_dimuon.py | from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
| from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
| Test for no pairs from one particle | Test for no pairs from one particle
| Python | mit | benwaugh/dimuon | from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
-
+
+ def test_one_particle():
+ particles = [None]
+ pairs = find_pairs(particles)
+ assert len(pairs) == 0
+ | Test for no pairs from one particle | ## Code Before:
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
## Instruction:
Test for no pairs from one particle
## Code After:
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
7f06cb8ceff3f2515f01662622e3c5149bcb8646 | xm/main.py | xm/main.py |
from __future__ import print_function
from __future__ import unicode_literals
import argparse
DEFAULT_CONFIG_FILE = '~/.config/xmrc'
def _new_argument_parser():
parser = argparse.ArgumentParser(
description='Build the appropriate make command'
)
parser.add_argument(
'-u', '--unittest', help='run unittest',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-s', '--sync', help='sync local copy on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-f', '--file', help='specify the configuration file',
default=DEFAULT_CONFIG_FILE,
)
parser.add_argument(
'--setup', help='run commands that should be run before sync',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-d', '--dep', help='install missing dependencies on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'project', metavar='project', type=str, nargs='+',
help='The selected project',
)
return parser
def main():
parsed_args = _new_argument_parser().parse_args()
print(parsed_args)
if __name__ == '__main__':
main()
|
from __future__ import print_function
from __future__ import unicode_literals
import argparse
DEFAULT_CONFIG_FILE = '~/.config/xmrc'
def _new_argument_parser():
parser = argparse.ArgumentParser(
description='Build the appropriate make command'
)
parser.add_argument(
'-u', '--unittest', help='run unittest',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-s', '--sync', help='sync local copy on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-f', '--file', help='specify the configuration file',
default=DEFAULT_CONFIG_FILE,
)
parser.add_argument(
'--setup', help='run commands that should be run before sync',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-d', '--dep', help='install missing dependencies on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-t', '--target', help='the target on which to run'
)
parser.add_argument(
'extra', metavar='extra', type=str, nargs='*',
help='Extra variables, usage vary depending on the context',
)
return parser
def main():
parsed_args = _new_argument_parser().parse_args()
print(parsed_args)
if __name__ == '__main__':
main()
| Add a --target argument and make trailling arguments context dependant | Add a --target argument and make trailling arguments context dependant
| Python | bsd-2-clause | pcadottemichaud/xm,pc-m/xm,pcadottemichaud/xm,pc-m/xm |
from __future__ import print_function
from __future__ import unicode_literals
import argparse
DEFAULT_CONFIG_FILE = '~/.config/xmrc'
def _new_argument_parser():
parser = argparse.ArgumentParser(
description='Build the appropriate make command'
)
parser.add_argument(
'-u', '--unittest', help='run unittest',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-s', '--sync', help='sync local copy on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-f', '--file', help='specify the configuration file',
default=DEFAULT_CONFIG_FILE,
)
parser.add_argument(
'--setup', help='run commands that should be run before sync',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-d', '--dep', help='install missing dependencies on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
+ '-t', '--target', help='the target on which to run'
+ )
+ parser.add_argument(
- 'project', metavar='project', type=str, nargs='+',
+ 'extra', metavar='extra', type=str, nargs='*',
- help='The selected project',
+ help='Extra variables, usage vary depending on the context',
)
return parser
def main():
parsed_args = _new_argument_parser().parse_args()
print(parsed_args)
if __name__ == '__main__':
main()
| Add a --target argument and make trailling arguments context dependant | ## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
import argparse
DEFAULT_CONFIG_FILE = '~/.config/xmrc'
def _new_argument_parser():
parser = argparse.ArgumentParser(
description='Build the appropriate make command'
)
parser.add_argument(
'-u', '--unittest', help='run unittest',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-s', '--sync', help='sync local copy on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-f', '--file', help='specify the configuration file',
default=DEFAULT_CONFIG_FILE,
)
parser.add_argument(
'--setup', help='run commands that should be run before sync',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-d', '--dep', help='install missing dependencies on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'project', metavar='project', type=str, nargs='+',
help='The selected project',
)
return parser
def main():
parsed_args = _new_argument_parser().parse_args()
print(parsed_args)
if __name__ == '__main__':
main()
## Instruction:
Add a --target argument and make trailling arguments context dependant
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
import argparse
DEFAULT_CONFIG_FILE = '~/.config/xmrc'
def _new_argument_parser():
parser = argparse.ArgumentParser(
description='Build the appropriate make command'
)
parser.add_argument(
'-u', '--unittest', help='run unittest',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-s', '--sync', help='sync local copy on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-f', '--file', help='specify the configuration file',
default=DEFAULT_CONFIG_FILE,
)
parser.add_argument(
'--setup', help='run commands that should be run before sync',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-d', '--dep', help='install missing dependencies on the server',
action='store_const', const=True, default=False,
)
parser.add_argument(
'-t', '--target', help='the target on which to run'
)
parser.add_argument(
'extra', metavar='extra', type=str, nargs='*',
help='Extra variables, usage vary depending on the context',
)
return parser
def main():
parsed_args = _new_argument_parser().parse_args()
print(parsed_args)
if __name__ == '__main__':
main()
|
ab802204d84511765a701cad48e9e22dc4e84be1 | tests/rules/conftest.py | tests/rules/conftest.py | import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True, scope="session")
def configured_cache():
cache.configure()
| import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True)
def configured_cache():
if not cache.region.is_configured:
cache.configure()
yield
cache.region.invalidate()
| Fix intermittent failures of test_guard_http_exception | Fix intermittent failures of test_guard_http_exception
Signed-off-by: Ryan Lerch <e809e25f3c554b2b195ccd768cd9a485288f896f@redhat.com>
| Python | lgpl-2.1 | fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn | import pytest
from fmn.rules.cache import cache
- @pytest.fixture(autouse=True, scope="session")
+ @pytest.fixture(autouse=True)
def configured_cache():
+ if not cache.region.is_configured:
- cache.configure()
+ cache.configure()
+ yield
+ cache.region.invalidate()
| Fix intermittent failures of test_guard_http_exception | ## Code Before:
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True, scope="session")
def configured_cache():
cache.configure()
## Instruction:
Fix intermittent failures of test_guard_http_exception
## Code After:
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True)
def configured_cache():
if not cache.region.is_configured:
cache.configure()
yield
cache.region.invalidate()
|
f5463ae38c4cd46af043f30d0e7d28cf5d1727db | flow/commands/update_software_command.py | flow/commands/update_software_command.py | import subprocess
from command import Command
from . import ListVersionsCommand
from ..git_tools import git_base_command
class UpdateSoftwareCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
release = self.params['release']
list_cmd = ListVersionsCommand(None, None, {})
list_cmd.exec_cmd()
if list_cmd.get_response().success is False:
self.response = {
'success': False,
'message': 'Unable to list available versions.' }
return
if not release in list_cmd.get_response()['version_list']:
self.response = {
'success': False,
'message': 'Version %s is not available' % (release) }
return
self.shell_helper(git_base_command() + ['checkout', 'tags/'+release])
if self.flow is not None:
self.flow.set_operational_status(self.flow.OP_STATUS_UPDATING)
self.response = {
'success': True,
'message': 'Software version updating to %s' % (tag) }
def post_exec(self):
if self.flow is not None:
self.flow.send_status()
self.shell_helper(['sudo', 'reboot'])
| import subprocess
from command import Command
from list_versions_command import ListVersionsCommand
from ..git_tools import git_base_command
class UpdateSoftwareCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
release = self.params['release']
list_cmd = ListVersionsCommand(None, None, {})
list_cmd.exec_cmd()
if list_cmd.get_response()['success'] is False:
self.response = {
'success': False,
'message': 'Unable to list available versions.' }
return
if not release in list_cmd.get_response()['version_list']:
self.response = {
'success': False,
'message': 'Version %s is not available' % (release) }
return
self.shell_helper(git_base_command() + ['checkout', 'tags/'+release])
if self.flow is not None:
self.flow.set_operational_status(self.flow.OP_STATUS_UPDATING)
self.response = {
'success': True,
'message': 'Software version updating to %s' % (tag) }
def post_exec(self):
if self.flow is not None:
self.flow.send_status()
self.shell_helper(['sudo', 'reboot'])
| Fix version list validation check. | Fix version list validation check.
[#152092418]
| Python | mit | manylabs/flow,manylabs/flow | import subprocess
- from command import Command
+ from command import Command
- from . import ListVersionsCommand
+ from list_versions_command import ListVersionsCommand
- from ..git_tools import git_base_command
+ from ..git_tools import git_base_command
class UpdateSoftwareCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
release = self.params['release']
list_cmd = ListVersionsCommand(None, None, {})
list_cmd.exec_cmd()
- if list_cmd.get_response().success is False:
+ if list_cmd.get_response()['success'] is False:
self.response = {
'success': False,
'message': 'Unable to list available versions.' }
return
if not release in list_cmd.get_response()['version_list']:
self.response = {
'success': False,
'message': 'Version %s is not available' % (release) }
return
self.shell_helper(git_base_command() + ['checkout', 'tags/'+release])
if self.flow is not None:
self.flow.set_operational_status(self.flow.OP_STATUS_UPDATING)
self.response = {
'success': True,
'message': 'Software version updating to %s' % (tag) }
def post_exec(self):
if self.flow is not None:
self.flow.send_status()
self.shell_helper(['sudo', 'reboot'])
| Fix version list validation check. | ## Code Before:
import subprocess
from command import Command
from . import ListVersionsCommand
from ..git_tools import git_base_command
class UpdateSoftwareCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
release = self.params['release']
list_cmd = ListVersionsCommand(None, None, {})
list_cmd.exec_cmd()
if list_cmd.get_response().success is False:
self.response = {
'success': False,
'message': 'Unable to list available versions.' }
return
if not release in list_cmd.get_response()['version_list']:
self.response = {
'success': False,
'message': 'Version %s is not available' % (release) }
return
self.shell_helper(git_base_command() + ['checkout', 'tags/'+release])
if self.flow is not None:
self.flow.set_operational_status(self.flow.OP_STATUS_UPDATING)
self.response = {
'success': True,
'message': 'Software version updating to %s' % (tag) }
def post_exec(self):
if self.flow is not None:
self.flow.send_status()
self.shell_helper(['sudo', 'reboot'])
## Instruction:
Fix version list validation check.
## Code After:
import subprocess
from command import Command
from list_versions_command import ListVersionsCommand
from ..git_tools import git_base_command
class UpdateSoftwareCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
release = self.params['release']
list_cmd = ListVersionsCommand(None, None, {})
list_cmd.exec_cmd()
if list_cmd.get_response()['success'] is False:
self.response = {
'success': False,
'message': 'Unable to list available versions.' }
return
if not release in list_cmd.get_response()['version_list']:
self.response = {
'success': False,
'message': 'Version %s is not available' % (release) }
return
self.shell_helper(git_base_command() + ['checkout', 'tags/'+release])
if self.flow is not None:
self.flow.set_operational_status(self.flow.OP_STATUS_UPDATING)
self.response = {
'success': True,
'message': 'Software version updating to %s' % (tag) }
def post_exec(self):
if self.flow is not None:
self.flow.send_status()
self.shell_helper(['sudo', 'reboot'])
|
2fc23ca753ca68d3c0531cf9c58d5864adfc373f | tests/test_short_url.py | tests/test_short_url.py | import unittest
from random import randrange
import short_url
class TestShortUrl(unittest.TestCase):
def test_one(self):
url = short_url.encode_url(12)
self.assertEqual(url, 'jy7yj')
key = short_url.decode_url(url)
self.assertEqual(key, 12)
def test_1000_random(self):
for random_int in range(1000):
random_int = randrange(100000000)
url = short_url.encode_url(random_int)
int_ = short_url.decode_url(url)
self.assertEqual(random_int, int_)
def test_custom_alphabet(self):
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
self.assertEqual(key, 12)
def test_short_alphabet(self):
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
| Use simple test functions and remove too special tests | Use simple test functions and remove too special tests
| Python | mit | Alir3z4/python-short_url | - import unittest
+
from random import randrange
+
+ from pytest import raises
import short_url
- class TestShortUrl(unittest.TestCase):
- def test_one(self):
- url = short_url.encode_url(12)
- self.assertEqual(url, 'jy7yj')
- key = short_url.decode_url(url)
- self.assertEqual(key, 12)
- def test_1000_random(self):
- for random_int in range(1000):
- random_int = randrange(100000000)
- url = short_url.encode_url(random_int)
- int_ = short_url.decode_url(url)
- self.assertEqual(random_int, int_)
- def test_custom_alphabet(self):
+ def test_custom_alphabet():
- encoder = short_url.UrlEncoder(alphabet='ab')
+ encoder = short_url.UrlEncoder(alphabet='ab')
- url = encoder.encode_url(12)
+ url = encoder.encode_url(12)
- self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
+ assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
- key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
+ key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
- self.assertEqual(key, 12)
+ assert key == 12
- def test_short_alphabet(self):
- with self.assertRaises(AttributeError):
- short_url.UrlEncoder(alphabet='aa')
- with self.assertRaises(AttributeError):
- short_url.UrlEncoder(alphabet='a')
+ def test_too_short_alphabet():
+ with raises(AttributeError):
+ short_url.UrlEncoder(alphabet='aa')
+ with raises(AttributeError):
+ short_url.UrlEncoder(alphabet='a')
+ | Use simple test functions and remove too special tests | ## Code Before:
import unittest
from random import randrange
import short_url
class TestShortUrl(unittest.TestCase):
def test_one(self):
url = short_url.encode_url(12)
self.assertEqual(url, 'jy7yj')
key = short_url.decode_url(url)
self.assertEqual(key, 12)
def test_1000_random(self):
for random_int in range(1000):
random_int = randrange(100000000)
url = short_url.encode_url(random_int)
int_ = short_url.decode_url(url)
self.assertEqual(random_int, int_)
def test_custom_alphabet(self):
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
self.assertEqual(url, 'bbaaaaaaaaaaaaaaaaaaaa')
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
self.assertEqual(key, 12)
def test_short_alphabet(self):
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with self.assertRaises(AttributeError):
short_url.UrlEncoder(alphabet='a')
## Instruction:
Use simple test functions and remove too special tests
## Code After:
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
8653159dcf6a078bc2193293b93457388e7799d3 | tests/tests.py | tests/tests.py | import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def output_is_not_truncated_when_not_ending_in_a_newline(shell):
result = shell.run(["echo", "-n", "hello"])
assert_equal("hello", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| Add test for output that doesn't end in a newline | Add test for output that doesn't end in a newline
| Python | bsd-2-clause | mwilliamson/spur.py | import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
+
+ @test
+ def output_is_not_truncated_when_not_ending_in_a_newline(shell):
+ result = shell.run(["echo", "-n", "hello"])
+ assert_equal("hello", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| Add test for output that doesn't end in a newline | ## Code Before:
import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
## Instruction:
Add test for output that doesn't end in a newline
## Code After:
import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def output_is_not_truncated_when_not_ending_in_a_newline(shell):
result = shell.run(["echo", "-n", "hello"])
assert_equal("hello", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
|
f4e07b93ab81fd0a0dc59ec77fca596a2fcca738 | froide/helper/form_utils.py | froide/helper/form_utils.py | import json
from django.db import models
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, models.Model) and hasattr(obj, 'as_data'):
return obj.as_data()
return json.JSONEncoder.default(self, obj)
class JSONMixin(object):
def as_json(self):
return json.dumps(self.as_data(), cls=DjangoJSONEncoder)
def as_data(self):
return {
'fields': {
str(name): self.field_to_dict(name, field) for name, field in self.fields.items()
},
'errors': {f: e.get_json_data() for f, e in self.errors.items()},
'nonFieldErrors': [e.get_json_data() for e in self.non_field_errors()]
}
def field_to_dict(self, name, field):
return {
"type": field.__class__.__name__,
"widget_type": field.widget.__class__.__name__,
"hidden": field.widget.is_hidden,
"required": field.widget.is_required,
"label": str(field.label),
"help_text": str(field.help_text),
"initial": self.get_initial_for_field(field, name),
"placeholder": str(field.widget.attrs.get('placeholder', '')),
"value": self[name].value() if self.is_bound else None
}
| import json
from django.db import models
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, models.Model) and hasattr(obj, 'as_data'):
return obj.as_data()
return json.JSONEncoder.default(self, obj)
def get_data(error):
if isinstance(error, (dict, str)):
return error
return error.get_json_data()
class JSONMixin(object):
def as_json(self):
return json.dumps(self.as_data(), cls=DjangoJSONEncoder)
def as_data(self):
return {
'fields': {
str(name): self.field_to_dict(name, field) for name, field in self.fields.items()
},
'errors': {f: get_data(e) for f, e in self.errors.items()},
'nonFieldErrors': [get_data(e) for e in self.non_field_errors()]
}
def field_to_dict(self, name, field):
return {
"type": field.__class__.__name__,
"widget_type": field.widget.__class__.__name__,
"hidden": field.widget.is_hidden,
"required": field.widget.is_required,
"label": str(field.label),
"help_text": str(field.help_text),
"initial": self.get_initial_for_field(field, name),
"placeholder": str(field.widget.attrs.get('placeholder', '')),
"value": self[name].value() if self.is_bound else None
}
| Fix serialization of form errors | Fix serialization of form errors | Python | mit | fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide | import json
from django.db import models
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, models.Model) and hasattr(obj, 'as_data'):
return obj.as_data()
return json.JSONEncoder.default(self, obj)
+ def get_data(error):
+ if isinstance(error, (dict, str)):
+ return error
+ return error.get_json_data()
+
+
class JSONMixin(object):
def as_json(self):
return json.dumps(self.as_data(), cls=DjangoJSONEncoder)
def as_data(self):
return {
'fields': {
str(name): self.field_to_dict(name, field) for name, field in self.fields.items()
},
- 'errors': {f: e.get_json_data() for f, e in self.errors.items()},
+ 'errors': {f: get_data(e) for f, e in self.errors.items()},
- 'nonFieldErrors': [e.get_json_data() for e in self.non_field_errors()]
+ 'nonFieldErrors': [get_data(e) for e in self.non_field_errors()]
}
def field_to_dict(self, name, field):
return {
"type": field.__class__.__name__,
"widget_type": field.widget.__class__.__name__,
"hidden": field.widget.is_hidden,
"required": field.widget.is_required,
"label": str(field.label),
"help_text": str(field.help_text),
"initial": self.get_initial_for_field(field, name),
"placeholder": str(field.widget.attrs.get('placeholder', '')),
"value": self[name].value() if self.is_bound else None
}
| Fix serialization of form errors | ## Code Before:
import json
from django.db import models
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, models.Model) and hasattr(obj, 'as_data'):
return obj.as_data()
return json.JSONEncoder.default(self, obj)
class JSONMixin(object):
def as_json(self):
return json.dumps(self.as_data(), cls=DjangoJSONEncoder)
def as_data(self):
return {
'fields': {
str(name): self.field_to_dict(name, field) for name, field in self.fields.items()
},
'errors': {f: e.get_json_data() for f, e in self.errors.items()},
'nonFieldErrors': [e.get_json_data() for e in self.non_field_errors()]
}
def field_to_dict(self, name, field):
return {
"type": field.__class__.__name__,
"widget_type": field.widget.__class__.__name__,
"hidden": field.widget.is_hidden,
"required": field.widget.is_required,
"label": str(field.label),
"help_text": str(field.help_text),
"initial": self.get_initial_for_field(field, name),
"placeholder": str(field.widget.attrs.get('placeholder', '')),
"value": self[name].value() if self.is_bound else None
}
## Instruction:
Fix serialization of form errors
## Code After:
import json
from django.db import models
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, models.Model) and hasattr(obj, 'as_data'):
return obj.as_data()
return json.JSONEncoder.default(self, obj)
def get_data(error):
if isinstance(error, (dict, str)):
return error
return error.get_json_data()
class JSONMixin(object):
def as_json(self):
return json.dumps(self.as_data(), cls=DjangoJSONEncoder)
def as_data(self):
return {
'fields': {
str(name): self.field_to_dict(name, field) for name, field in self.fields.items()
},
'errors': {f: get_data(e) for f, e in self.errors.items()},
'nonFieldErrors': [get_data(e) for e in self.non_field_errors()]
}
def field_to_dict(self, name, field):
return {
"type": field.__class__.__name__,
"widget_type": field.widget.__class__.__name__,
"hidden": field.widget.is_hidden,
"required": field.widget.is_required,
"label": str(field.label),
"help_text": str(field.help_text),
"initial": self.get_initial_for_field(field, name),
"placeholder": str(field.widget.attrs.get('placeholder', '')),
"value": self[name].value() if self.is_bound else None
}
|
e8092ec82ff8ee9c0104b507751e45555c08685b | tests/tests.py | tests/tests.py | from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
| from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].slug, "salsa")
| Fix test on python 3.3 | Fix test on python 3.3
| Python | mit | avelino/django-tags | from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
- self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
- self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
| Fix test on python 3.3 | ## Code Before:
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
## Instruction:
Fix test on python 3.3
## Code After:
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].slug, "salsa")
|
eae8053398c26ede98c4e253caf7f29f930b2f97 | compile.py | compile.py | from compileall import compile_dir
from distutils.sysconfig import get_python_lib
import os
import os.path
import sys
EXCLUDES = [
'gunicorn/workers/_gaiohttp.py',
'pymysql/_socketio.py',
]
def compile_files(path):
return compile_dir(path, maxlevels=50, quiet=True)
def remove_python3_files(path):
for e in EXCLUDES:
fp = os.path.join(path, e)
for extension in ('', 'c', 'o'):
name = fp + extension
if os.path.exists(name):
print('Removing file %s containing Python 3 syntax.' % name)
os.remove(name)
def main():
sp = get_python_lib()
remove_python3_files(sp)
status = compile_files(sp)
sys.exit(not status)
if __name__ == '__main__':
main()
| from compileall import compile_dir
from distutils.sysconfig import get_python_lib
import os
import os.path
import sys
EXCLUDES_27 = [
'pymysql/_socketio.py',
]
EXCLUDES_34 = [
'gunicorn/workers/_gaiohttp.py',
]
def compile_files(path):
return compile_dir(path, maxlevels=50, quiet=True)
def remove_python3_files(path):
excludes = []
if sys.version_info < (2, 7):
excludes.extend(EXCLUDES_27)
if sys.version_info < (3, 4):
excludes.extend(EXCLUDES_34)
for e in excludes:
fp = os.path.join(path, e)
for extension in ('', 'c', 'o'):
name = fp + extension
if os.path.exists(name):
print('Removing file %s containing Python 3 syntax.' % name)
os.remove(name)
def main():
sp = get_python_lib()
remove_python3_files(sp)
status = compile_files(sp)
sys.exit(not status)
if __name__ == '__main__':
main()
| Split the Python specific version exludes between 2.7/3.4 specific syntax. | Split the Python specific version exludes between 2.7/3.4 specific syntax.
| Python | apache-2.0 | therewillbecode/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea | from compileall import compile_dir
from distutils.sysconfig import get_python_lib
import os
import os.path
import sys
- EXCLUDES = [
+ EXCLUDES_27 = [
+ 'pymysql/_socketio.py',
+ ]
+ EXCLUDES_34 = [
'gunicorn/workers/_gaiohttp.py',
- 'pymysql/_socketio.py',
]
def compile_files(path):
return compile_dir(path, maxlevels=50, quiet=True)
def remove_python3_files(path):
- for e in EXCLUDES:
+ excludes = []
+ if sys.version_info < (2, 7):
+ excludes.extend(EXCLUDES_27)
+ if sys.version_info < (3, 4):
+ excludes.extend(EXCLUDES_34)
+
+ for e in excludes:
fp = os.path.join(path, e)
for extension in ('', 'c', 'o'):
name = fp + extension
if os.path.exists(name):
print('Removing file %s containing Python 3 syntax.' % name)
os.remove(name)
def main():
sp = get_python_lib()
remove_python3_files(sp)
status = compile_files(sp)
sys.exit(not status)
if __name__ == '__main__':
main()
| Split the Python specific version exludes between 2.7/3.4 specific syntax. | ## Code Before:
from compileall import compile_dir
from distutils.sysconfig import get_python_lib
import os
import os.path
import sys
EXCLUDES = [
'gunicorn/workers/_gaiohttp.py',
'pymysql/_socketio.py',
]
def compile_files(path):
return compile_dir(path, maxlevels=50, quiet=True)
def remove_python3_files(path):
for e in EXCLUDES:
fp = os.path.join(path, e)
for extension in ('', 'c', 'o'):
name = fp + extension
if os.path.exists(name):
print('Removing file %s containing Python 3 syntax.' % name)
os.remove(name)
def main():
sp = get_python_lib()
remove_python3_files(sp)
status = compile_files(sp)
sys.exit(not status)
if __name__ == '__main__':
main()
## Instruction:
Split the Python specific version exludes between 2.7/3.4 specific syntax.
## Code After:
from compileall import compile_dir
from distutils.sysconfig import get_python_lib
import os
import os.path
import sys
EXCLUDES_27 = [
'pymysql/_socketio.py',
]
EXCLUDES_34 = [
'gunicorn/workers/_gaiohttp.py',
]
def compile_files(path):
return compile_dir(path, maxlevels=50, quiet=True)
def remove_python3_files(path):
excludes = []
if sys.version_info < (2, 7):
excludes.extend(EXCLUDES_27)
if sys.version_info < (3, 4):
excludes.extend(EXCLUDES_34)
for e in excludes:
fp = os.path.join(path, e)
for extension in ('', 'c', 'o'):
name = fp + extension
if os.path.exists(name):
print('Removing file %s containing Python 3 syntax.' % name)
os.remove(name)
def main():
sp = get_python_lib()
remove_python3_files(sp)
status = compile_files(sp)
sys.exit(not status)
if __name__ == '__main__':
main()
|
ca74738e9241230fd0cc843aa9b76f67494d02eb | python/intermediate/create_inter_python_data.py | python/intermediate/create_inter_python_data.py | """Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
years = np.arange(1960, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = 0.5 * temps + 0.7 * rainfalls + noise
data = zip(years, temps, rainfalls, mosquitos)
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv('mosquito_data_A2.csv', index=False, float_format='%.0f')
df_short = df[-10:]
df_short.to_csv('mosquito_data_A1.csv', index=False, float_format='%.0f')
| """Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
datasets = {'A1': [0, 0.5, 0.7, 10],
'A2': [0, 0.5, 0.7, 50],
'A3': [0, 0.5, 0.3, 50],
'B1': [3, 0.7, 0.2, 50],
'B2': [3, 0.7, 0.7, 50]}
def make_data(intercept, tempslope, rainfallslope, numyears):
years = np.arange(2010 - numyears, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
return zip(years, temps, rainfalls, mosquitos)
def export_data(data, filename):
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv(filename, index=False, float_format='%.0f')
for site in datasets:
data = make_data(*datasets[site])
if site == 'A1':
#create a shorter dataset for first example
data = data[-10:]
export_data(data, '%s_mosquito_data.csv' % site)
| Allow creation of multiple example data files for Inter Python | Allow creation of multiple example data files for Inter Python
Generalizes the script for creating data files to allow for the
easy generation of larger numbers of data files.
| Python | bsd-2-clause | selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest | """Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
- years = np.arange(1960, 2011)
- temps = np.random.uniform(70, 90, len(years))
- rainfalls = np.random.uniform(100, 300, len(years))
- noise = 2 * np.random.randn(len(years))
- mosquitos = 0.5 * temps + 0.7 * rainfalls + noise
+ datasets = {'A1': [0, 0.5, 0.7, 10],
+ 'A2': [0, 0.5, 0.7, 50],
+ 'A3': [0, 0.5, 0.3, 50],
+ 'B1': [3, 0.7, 0.2, 50],
+ 'B2': [3, 0.7, 0.7, 50]}
+ def make_data(intercept, tempslope, rainfallslope, numyears):
+ years = np.arange(2010 - numyears, 2011)
+ temps = np.random.uniform(70, 90, len(years))
+ rainfalls = np.random.uniform(100, 300, len(years))
+ noise = 2 * np.random.randn(len(years))
+ mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
- data = zip(years, temps, rainfalls, mosquitos)
+ return zip(years, temps, rainfalls, mosquitos)
- df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
- df.to_csv('mosquito_data_A2.csv', index=False, float_format='%.0f')
- df_short = df[-10:]
- df_short.to_csv('mosquito_data_A1.csv', index=False, float_format='%.0f')
+ def export_data(data, filename):
+ df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
+ df.to_csv(filename, index=False, float_format='%.0f')
+
+ for site in datasets:
+ data = make_data(*datasets[site])
+ if site == 'A1':
+ #create a shorter dataset for first example
+ data = data[-10:]
+ export_data(data, '%s_mosquito_data.csv' % site)
+ | Allow creation of multiple example data files for Inter Python | ## Code Before:
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
years = np.arange(1960, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = 0.5 * temps + 0.7 * rainfalls + noise
data = zip(years, temps, rainfalls, mosquitos)
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv('mosquito_data_A2.csv', index=False, float_format='%.0f')
df_short = df[-10:]
df_short.to_csv('mosquito_data_A1.csv', index=False, float_format='%.0f')
## Instruction:
Allow creation of multiple example data files for Inter Python
## Code After:
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
datasets = {'A1': [0, 0.5, 0.7, 10],
'A2': [0, 0.5, 0.7, 50],
'A3': [0, 0.5, 0.3, 50],
'B1': [3, 0.7, 0.2, 50],
'B2': [3, 0.7, 0.7, 50]}
def make_data(intercept, tempslope, rainfallslope, numyears):
years = np.arange(2010 - numyears, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
return zip(years, temps, rainfalls, mosquitos)
def export_data(data, filename):
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv(filename, index=False, float_format='%.0f')
for site in datasets:
data = make_data(*datasets[site])
if site == 'A1':
#create a shorter dataset for first example
data = data[-10:]
export_data(data, '%s_mosquito_data.csv' % site)
|
9b6a22a9cb908d1fbfa5f9b5081f6c96644115b0 | tests/test_tags.py | tests/test_tags.py |
from unittest import TestCase
from django.test.utils import setup_test_template_loader, override_settings
from django.template import Context
from django.template.loader import get_template
TEMPLATES = {
'basetag': '''{% load damn %}{% assets %}''',
'test2': '''
<!doctype html>{% load damn %}
<html>
<head>
{% assets %}
</head>
<body>
{% asset 'js/jquery.js' %}
</body>
</html>
''',
}
DAMN_PROCESSORS = {
'js': {
'class': 'damn.processors.ScriptProcessor',
},
}
class TagTests(TestCase):
def setUp(self):
setup_test_template_loader(TEMPLATES)
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
)
def test_simple(self):
t = get_template('basetag')
t.render()
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
)
def test_one(self):
t = get_template('test2')
o = t.render(Context())
self.assertContains(o, '<script src="/static/js/jquery.js"></script>')
| from django.test import TestCase
from django.test.utils import setup_test_template_loader, override_settings
from django.template import Context
from django.template.loader import get_template
TEMPLATES = {
'basetag': '''{% load damn %}{% assets %}''',
'test2': '''
<!doctype html>{% load damn %}
<html>
<head>
{% assets %}
</head>
<body>
{% asset 'js/jquery.js' %}
</body>
</html>
''',
}
DAMN_PROCESSORS = {
'js': {
'processor': 'damn.processors.ScriptProcessor',
},
}
class TagTests(TestCase):
def setUp(self):
setup_test_template_loader(TEMPLATES)
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
STATIC_URL = '/',
)
def test_simple(self):
t = get_template('basetag')
t.render()
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
STATIC_URL = '/',
)
def test_one(self):
t = get_template('test2')
o = t.render(Context())
self.assertTrue('<script src="/static/js/jquery.js"></script>' in o)
| Use TestCase from Django Set STATIC_URL | Use TestCase from Django
Set STATIC_URL
| Python | bsd-2-clause | funkybob/django-amn | + from django.test import TestCase
- from unittest import TestCase
from django.test.utils import setup_test_template_loader, override_settings
from django.template import Context
from django.template.loader import get_template
TEMPLATES = {
'basetag': '''{% load damn %}{% assets %}''',
'test2': '''
<!doctype html>{% load damn %}
<html>
<head>
{% assets %}
</head>
<body>
{% asset 'js/jquery.js' %}
</body>
</html>
''',
}
DAMN_PROCESSORS = {
'js': {
- 'class': 'damn.processors.ScriptProcessor',
+ 'processor': 'damn.processors.ScriptProcessor',
},
}
class TagTests(TestCase):
def setUp(self):
setup_test_template_loader(TEMPLATES)
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
+ STATIC_URL = '/',
)
def test_simple(self):
t = get_template('basetag')
t.render()
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
+ STATIC_URL = '/',
)
def test_one(self):
t = get_template('test2')
o = t.render(Context())
- self.assertContains(o, '<script src="/static/js/jquery.js"></script>')
+ self.assertTrue('<script src="/static/js/jquery.js"></script>' in o)
| Use TestCase from Django Set STATIC_URL | ## Code Before:
from unittest import TestCase
from django.test.utils import setup_test_template_loader, override_settings
from django.template import Context
from django.template.loader import get_template
TEMPLATES = {
'basetag': '''{% load damn %}{% assets %}''',
'test2': '''
<!doctype html>{% load damn %}
<html>
<head>
{% assets %}
</head>
<body>
{% asset 'js/jquery.js' %}
</body>
</html>
''',
}
DAMN_PROCESSORS = {
'js': {
'class': 'damn.processors.ScriptProcessor',
},
}
class TagTests(TestCase):
def setUp(self):
setup_test_template_loader(TEMPLATES)
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
)
def test_simple(self):
t = get_template('basetag')
t.render()
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
)
def test_one(self):
t = get_template('test2')
o = t.render(Context())
self.assertContains(o, '<script src="/static/js/jquery.js"></script>')
## Instruction:
Use TestCase from Django Set STATIC_URL
## Code After:
from django.test import TestCase
from django.test.utils import setup_test_template_loader, override_settings
from django.template import Context
from django.template.loader import get_template
TEMPLATES = {
'basetag': '''{% load damn %}{% assets %}''',
'test2': '''
<!doctype html>{% load damn %}
<html>
<head>
{% assets %}
</head>
<body>
{% asset 'js/jquery.js' %}
</body>
</html>
''',
}
DAMN_PROCESSORS = {
'js': {
'processor': 'damn.processors.ScriptProcessor',
},
}
class TagTests(TestCase):
def setUp(self):
setup_test_template_loader(TEMPLATES)
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
STATIC_URL = '/',
)
def test_simple(self):
t = get_template('basetag')
t.render()
@override_settings(
DAMN_PROCESSORS=DAMN_PROCESSORS,
STATIC_URL = '/',
)
def test_one(self):
t = get_template('test2')
o = t.render(Context())
self.assertTrue('<script src="/static/js/jquery.js"></script>' in o)
|
6336e8e13c01b6a81b8586499e7a3e8fc8b532a8 | launch_control/commands/interface.py | launch_control/commands/interface.py | from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return cls.__doc__
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| Use inspect.getdoc() instead of plain __doc__ | Use inspect.getdoc() instead of plain __doc__
| Python | agpl-3.0 | Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server | + import inspect
+
from launch_control.utils.registry import RegistryBase
+
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
-
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
- return cls.__doc__
+ return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| Use inspect.getdoc() instead of plain __doc__ | ## Code Before:
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return cls.__doc__
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
## Instruction:
Use inspect.getdoc() instead of plain __doc__
## Code After:
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
612e253d0234e1852db61c589418edbb4add4b00 | gunicorn.conf.py | gunicorn.conf.py | preload_app = True
worker_class = "gunicorn.workers.gthread.ThreadWorker"
| forwarded_allow_ips = '*'
preload_app = True
worker_class = "gunicorn.workers.gthread.ThreadWorker"
| Disable checking of Front-end IPs | Disable checking of Front-end IPs
| Python | agpl-3.0 | City-of-Helsinki/kuulemma,fastmonkeys/kuulemma,City-of-Helsinki/kuulemma,fastmonkeys/kuulemma,City-of-Helsinki/kuulemma,fastmonkeys/kuulemma | + forwarded_allow_ips = '*'
preload_app = True
worker_class = "gunicorn.workers.gthread.ThreadWorker"
| Disable checking of Front-end IPs | ## Code Before:
preload_app = True
worker_class = "gunicorn.workers.gthread.ThreadWorker"
## Instruction:
Disable checking of Front-end IPs
## Code After:
forwarded_allow_ips = '*'
preload_app = True
worker_class = "gunicorn.workers.gthread.ThreadWorker"
|
37c1d6ae1345fbab7aea4404933d78d4b939bbc2 | hoomd/filters.py | hoomd/filters.py | import hoomd._hoomd as _hoomd
class ParticleFilterID:
def __init__(self, *args, **kwargs):
args_str = ''.join([str(arg) for arg in args])
kwargs_str = ''.join([str(value)for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
| import hoomd._hoomd as _hoomd
import numpy as np
class ParticleFilter:
def __init__(self, *args, **kwargs):
args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
else repr(list(arg)) for arg in args])
kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
else repr(list(value))
for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
| Change hashing for ParticleFilter python class | Change hashing for ParticleFilter python class
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | import hoomd._hoomd as _hoomd
+ import numpy as np
- class ParticleFilterID:
+ class ParticleFilter:
def __init__(self, *args, **kwargs):
- args_str = ''.join([str(arg) for arg in args])
- kwargs_str = ''.join([str(value)for value in kwargs.values()])
+ args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
+ else repr(list(arg)) for arg in args])
+ kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
+ else repr(list(value))
+ for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
| Change hashing for ParticleFilter python class | ## Code Before:
import hoomd._hoomd as _hoomd
class ParticleFilterID:
def __init__(self, *args, **kwargs):
args_str = ''.join([str(arg) for arg in args])
kwargs_str = ''.join([str(value)for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
## Instruction:
Change hashing for ParticleFilter python class
## Code After:
import hoomd._hoomd as _hoomd
import numpy as np
class ParticleFilter:
def __init__(self, *args, **kwargs):
args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
else repr(list(arg)) for arg in args])
kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
else repr(list(value))
for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
|
f5e36391c253a52fe2bd434caf59c0f5c389cc64 | tests/base.py | tests/base.py | import unittest
import os
os.environ['OGN_CONFIG_MODULE'] = 'config/test.py'
from ogn_python import db # noqa: E402
class TestBaseDB(unittest.TestCase):
@classmethod
def setUpClass(cls):
db.session.execute('CREATE EXTENSION IF NOT EXISTS postgis;')
db.session.commit()
db.create_all()
def setUp(self):
pass
def tearDown(self):
db.session.execute("""
DELETE FROM aircraft_beacons;
DELETE FROM receiver_beacons;
DELETE FROM takeoff_landings;
DELETE FROM logbook;
DELETE FROM receiver_coverages;
DELETE FROM device_stats;
DELETE FROM receiver_stats;
DELETE FROM receivers;
DELETE FROM devices;
""")
if __name__ == '__main__':
unittest.main()
| import unittest
import os
os.environ['OGN_CONFIG_MODULE'] = 'config/test.py'
from ogn_python import db # noqa: E402
class TestBaseDB(unittest.TestCase):
@classmethod
def setUpClass(cls):
db.drop_all()
db.session.execute('CREATE EXTENSION IF NOT EXISTS postgis;')
db.session.commit()
db.create_all()
def setUp(self):
pass
def tearDown(self):
db.session.execute("""
DELETE FROM aircraft_beacons;
DELETE FROM receiver_beacons;
DELETE FROM takeoff_landings;
DELETE FROM logbook;
DELETE FROM receiver_coverages;
DELETE FROM device_stats;
DELETE FROM receiver_stats;
DELETE FROM receivers;
DELETE FROM devices;
""")
if __name__ == '__main__':
unittest.main()
| Drop db before each test | Drop db before each test
| Python | agpl-3.0 | Meisterschueler/ogn-python,glidernet/ogn-python,glidernet/ogn-python,Meisterschueler/ogn-python,glidernet/ogn-python,glidernet/ogn-python,Meisterschueler/ogn-python,Meisterschueler/ogn-python | import unittest
import os
os.environ['OGN_CONFIG_MODULE'] = 'config/test.py'
from ogn_python import db # noqa: E402
class TestBaseDB(unittest.TestCase):
@classmethod
def setUpClass(cls):
+ db.drop_all()
db.session.execute('CREATE EXTENSION IF NOT EXISTS postgis;')
db.session.commit()
db.create_all()
def setUp(self):
pass
def tearDown(self):
db.session.execute("""
DELETE FROM aircraft_beacons;
DELETE FROM receiver_beacons;
DELETE FROM takeoff_landings;
DELETE FROM logbook;
DELETE FROM receiver_coverages;
DELETE FROM device_stats;
DELETE FROM receiver_stats;
DELETE FROM receivers;
DELETE FROM devices;
""")
if __name__ == '__main__':
unittest.main()
| Drop db before each test | ## Code Before:
import unittest
import os
os.environ['OGN_CONFIG_MODULE'] = 'config/test.py'
from ogn_python import db # noqa: E402
class TestBaseDB(unittest.TestCase):
@classmethod
def setUpClass(cls):
db.session.execute('CREATE EXTENSION IF NOT EXISTS postgis;')
db.session.commit()
db.create_all()
def setUp(self):
pass
def tearDown(self):
db.session.execute("""
DELETE FROM aircraft_beacons;
DELETE FROM receiver_beacons;
DELETE FROM takeoff_landings;
DELETE FROM logbook;
DELETE FROM receiver_coverages;
DELETE FROM device_stats;
DELETE FROM receiver_stats;
DELETE FROM receivers;
DELETE FROM devices;
""")
if __name__ == '__main__':
unittest.main()
## Instruction:
Drop db before each test
## Code After:
import unittest
import os
os.environ['OGN_CONFIG_MODULE'] = 'config/test.py'
from ogn_python import db # noqa: E402
class TestBaseDB(unittest.TestCase):
@classmethod
def setUpClass(cls):
db.drop_all()
db.session.execute('CREATE EXTENSION IF NOT EXISTS postgis;')
db.session.commit()
db.create_all()
def setUp(self):
pass
def tearDown(self):
db.session.execute("""
DELETE FROM aircraft_beacons;
DELETE FROM receiver_beacons;
DELETE FROM takeoff_landings;
DELETE FROM logbook;
DELETE FROM receiver_coverages;
DELETE FROM device_stats;
DELETE FROM receiver_stats;
DELETE FROM receivers;
DELETE FROM devices;
""")
if __name__ == '__main__':
unittest.main()
|
f1008dc6573661c41361cfe5f3c61a3ee719d6be | marketpulse/auth/models.py | marketpulse/auth/models.py | from django.contrib.auth.models import AbstractUser
from django.db.models import fields
class User(AbstractUser):
mozillians_url = fields.URLField()
mozillians_username = fields.CharField(max_length=30, blank=True)
| from django.contrib.auth.models import AbstractUser
from django.db.models import fields
class User(AbstractUser):
mozillians_url = fields.URLField()
mozillians_username = fields.CharField(max_length=30, blank=True)
def __unicode__(self):
username = self.mozillians_username or self.username
return unicode(username)
| Use mozillians_username for unicode representation. | Use mozillians_username for unicode representation.
| Python | mpl-2.0 | akatsoulas/marketpulse,mozilla/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,akatsoulas/marketpulse | from django.contrib.auth.models import AbstractUser
from django.db.models import fields
class User(AbstractUser):
mozillians_url = fields.URLField()
mozillians_username = fields.CharField(max_length=30, blank=True)
+ def __unicode__(self):
+ username = self.mozillians_username or self.username
+ return unicode(username)
+ | Use mozillians_username for unicode representation. | ## Code Before:
from django.contrib.auth.models import AbstractUser
from django.db.models import fields
class User(AbstractUser):
mozillians_url = fields.URLField()
mozillians_username = fields.CharField(max_length=30, blank=True)
## Instruction:
Use mozillians_username for unicode representation.
## Code After:
from django.contrib.auth.models import AbstractUser
from django.db.models import fields
class User(AbstractUser):
mozillians_url = fields.URLField()
mozillians_username = fields.CharField(max_length=30, blank=True)
def __unicode__(self):
username = self.mozillians_username or self.username
return unicode(username)
|
46245254cdf9c3f2f6a9c27fe7e089867b4f394f | cloudbio/custom/versioncheck.py | cloudbio/custom/versioncheck.py | from distutils.version import LooseVersion
from fabric.api import quiet
from cloudbio.custom import shared
def _parse_from_stdoutflag(out, flag):
"""Extract version information from a flag in verbose stdout.
"""
for line in out.split("\n") + out.stderr.split("\n"):
if line.find(flag) >= 0:
parts = [x for x in line.split() if not x.startswith(flag)]
return parts[0]
return ""
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
with quiet():
out = env.safe_run_output(cmd)
if stdout_flag:
iversion = _parse_from_stdoutflag(out, stdout_flag)
else:
iversion = out.strip()
return LooseVersion(iversion) >= LooseVersion(version)
| from distutils.version import LooseVersion
from fabric.api import quiet
from cloudbio.custom import shared
def _parse_from_stdoutflag(out, flag):
"""Extract version information from a flag in verbose stdout.
"""
for line in out.split("\n") + out.stderr.split("\n"):
if line.find(flag) >= 0:
parts = [x for x in line.split() if not x.startswith(flag)]
return parts[0]
return ""
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
with quiet():
path_safe = "export PATH=$PATH:%s/bin && "
out = env.safe_run_output(path_safe + cmd)
if stdout_flag:
iversion = _parse_from_stdoutflag(out, stdout_flag)
else:
iversion = out.strip()
return LooseVersion(iversion) >= LooseVersion(version)
| Include env.system_install PATH as part of version checking to work with installed software not on the global PATH. Thanks to James Cuff | Include env.system_install PATH as part of version checking to work with installed software not on the global PATH. Thanks to James Cuff
| Python | mit | chapmanb/cloudbiolinux,elkingtonmcb/cloudbiolinux,kdaily/cloudbiolinux,elkingtonmcb/cloudbiolinux,kdaily/cloudbiolinux,averagehat/cloudbiolinux,kdaily/cloudbiolinux,chapmanb/cloudbiolinux,joemphilips/cloudbiolinux,AICIDNN/cloudbiolinux,joemphilips/cloudbiolinux,pjotrp/cloudbiolinux,pjotrp/cloudbiolinux,elkingtonmcb/cloudbiolinux,lpantano/cloudbiolinux,joemphilips/cloudbiolinux,kdaily/cloudbiolinux,heuermh/cloudbiolinux,rchekaluk/cloudbiolinux,heuermh/cloudbiolinux,averagehat/cloudbiolinux,AICIDNN/cloudbiolinux,pjotrp/cloudbiolinux,heuermh/cloudbiolinux,rchekaluk/cloudbiolinux,AICIDNN/cloudbiolinux,rchekaluk/cloudbiolinux,rchekaluk/cloudbiolinux,chapmanb/cloudbiolinux,averagehat/cloudbiolinux,chapmanb/cloudbiolinux,joemphilips/cloudbiolinux,pjotrp/cloudbiolinux,elkingtonmcb/cloudbiolinux,averagehat/cloudbiolinux,AICIDNN/cloudbiolinux,lpantano/cloudbiolinux,heuermh/cloudbiolinux,lpantano/cloudbiolinux | from distutils.version import LooseVersion
from fabric.api import quiet
from cloudbio.custom import shared
def _parse_from_stdoutflag(out, flag):
"""Extract version information from a flag in verbose stdout.
"""
for line in out.split("\n") + out.stderr.split("\n"):
if line.find(flag) >= 0:
parts = [x for x in line.split() if not x.startswith(flag)]
return parts[0]
return ""
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
with quiet():
+ path_safe = "export PATH=$PATH:%s/bin && "
- out = env.safe_run_output(cmd)
+ out = env.safe_run_output(path_safe + cmd)
if stdout_flag:
iversion = _parse_from_stdoutflag(out, stdout_flag)
else:
iversion = out.strip()
return LooseVersion(iversion) >= LooseVersion(version)
| Include env.system_install PATH as part of version checking to work with installed software not on the global PATH. Thanks to James Cuff | ## Code Before:
from distutils.version import LooseVersion
from fabric.api import quiet
from cloudbio.custom import shared
def _parse_from_stdoutflag(out, flag):
"""Extract version information from a flag in verbose stdout.
"""
for line in out.split("\n") + out.stderr.split("\n"):
if line.find(flag) >= 0:
parts = [x for x in line.split() if not x.startswith(flag)]
return parts[0]
return ""
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
with quiet():
out = env.safe_run_output(cmd)
if stdout_flag:
iversion = _parse_from_stdoutflag(out, stdout_flag)
else:
iversion = out.strip()
return LooseVersion(iversion) >= LooseVersion(version)
## Instruction:
Include env.system_install PATH as part of version checking to work with installed software not on the global PATH. Thanks to James Cuff
## Code After:
from distutils.version import LooseVersion
from fabric.api import quiet
from cloudbio.custom import shared
def _parse_from_stdoutflag(out, flag):
"""Extract version information from a flag in verbose stdout.
"""
for line in out.split("\n") + out.stderr.split("\n"):
if line.find(flag) >= 0:
parts = [x for x in line.split() if not x.startswith(flag)]
return parts[0]
return ""
def up_to_date(env, cmd, version, args=None, stdout_flag=None):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
with quiet():
path_safe = "export PATH=$PATH:%s/bin && "
out = env.safe_run_output(path_safe + cmd)
if stdout_flag:
iversion = _parse_from_stdoutflag(out, stdout_flag)
else:
iversion = out.strip()
return LooseVersion(iversion) >= LooseVersion(version)
|
e728d6ebdd101b393f3d87fdfbade2c4c52c5ef1 | cdent/emitter/perl.py | cdent/emitter/perl.py |
from __future__ import absolute_import
from cdent.emitter import Emitter as Base
class Emitter(Base):
LANGUAGE_ID = 'pm'
def emit_includecdent(self, includecdent):
self.writeln('use CDent::Run;')
def emit_class(self, class_):
name = class_.name
self.writeln('package %s;' % name)
self.writeln('use CDent::Class;')
self.writeln()
self.emit(class_.has)
self.writeln()
self.writeln('1;')
def emit_method(self, method):
name = method.name
self.writeln('sub %s {' % name)
self.writeln(' my $self = shift;')
self.emit(method.has, indent=True)
self.writeln('}')
def emit_println(self, println):
self.write('print ', indent=True)
self.emit(println.args)
self.writeln(', "\\n";', indent=False)
def emit_return(self, ret):
self.writeln('return;')
|
from __future__ import absolute_import
from cdent.emitter import Emitter as Base
class Emitter(Base):
LANGUAGE_ID = 'pm'
def emit_includecdent(self, includecdent):
self.writeln('use CDent::Run;')
def emit_class(self, class_):
name = class_.name
self.writeln('package %s;' % name)
self.writeln('use Moose;')
self.writeln()
self.emit(class_.has)
self.writeln()
self.writeln('1;')
def emit_method(self, method):
name = method.name
self.writeln('sub %s {' % name)
self.writeln(' my $self = shift;')
self.emit(method.has, indent=True)
self.writeln('}')
def emit_println(self, println):
self.write('print ', indent=True)
self.emit(println.args)
self.writeln(', "\\n";', indent=False)
def emit_return(self, ret):
self.writeln('return;')
| Use Moose for Perl 5 | Use Moose for Perl 5
| Python | bsd-2-clause | ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py |
from __future__ import absolute_import
from cdent.emitter import Emitter as Base
class Emitter(Base):
LANGUAGE_ID = 'pm'
def emit_includecdent(self, includecdent):
self.writeln('use CDent::Run;')
def emit_class(self, class_):
name = class_.name
self.writeln('package %s;' % name)
- self.writeln('use CDent::Class;')
+ self.writeln('use Moose;')
self.writeln()
self.emit(class_.has)
self.writeln()
self.writeln('1;')
def emit_method(self, method):
name = method.name
self.writeln('sub %s {' % name)
self.writeln(' my $self = shift;')
self.emit(method.has, indent=True)
self.writeln('}')
def emit_println(self, println):
self.write('print ', indent=True)
self.emit(println.args)
self.writeln(', "\\n";', indent=False)
def emit_return(self, ret):
self.writeln('return;')
| Use Moose for Perl 5 | ## Code Before:
from __future__ import absolute_import
from cdent.emitter import Emitter as Base
class Emitter(Base):
LANGUAGE_ID = 'pm'
def emit_includecdent(self, includecdent):
self.writeln('use CDent::Run;')
def emit_class(self, class_):
name = class_.name
self.writeln('package %s;' % name)
self.writeln('use CDent::Class;')
self.writeln()
self.emit(class_.has)
self.writeln()
self.writeln('1;')
def emit_method(self, method):
name = method.name
self.writeln('sub %s {' % name)
self.writeln(' my $self = shift;')
self.emit(method.has, indent=True)
self.writeln('}')
def emit_println(self, println):
self.write('print ', indent=True)
self.emit(println.args)
self.writeln(', "\\n";', indent=False)
def emit_return(self, ret):
self.writeln('return;')
## Instruction:
Use Moose for Perl 5
## Code After:
from __future__ import absolute_import
from cdent.emitter import Emitter as Base
class Emitter(Base):
LANGUAGE_ID = 'pm'
def emit_includecdent(self, includecdent):
self.writeln('use CDent::Run;')
def emit_class(self, class_):
name = class_.name
self.writeln('package %s;' % name)
self.writeln('use Moose;')
self.writeln()
self.emit(class_.has)
self.writeln()
self.writeln('1;')
def emit_method(self, method):
name = method.name
self.writeln('sub %s {' % name)
self.writeln(' my $self = shift;')
self.emit(method.has, indent=True)
self.writeln('}')
def emit_println(self, println):
self.write('print ', indent=True)
self.emit(println.args)
self.writeln(', "\\n";', indent=False)
def emit_return(self, ret):
self.writeln('return;')
|
8e4fca866590b4f7aa308d2cc1948b999bb1de8c | filebrowser_safe/urls.py | filebrowser_safe/urls.py | from __future__ import unicode_literals
from django.conf.urls import *
urlpatterns = patterns('',
# filebrowser urls
url(r'^browse/$', 'filebrowser_safe.views.browse', name="fb_browse"),
url(r'^mkdir/', 'filebrowser_safe.views.mkdir', name="fb_mkdir"),
url(r'^upload/', 'filebrowser_safe.views.upload', name="fb_upload"),
url(r'^rename/$', 'filebrowser_safe.views.rename', name="fb_rename"),
url(r'^delete/$', 'filebrowser_safe.views.delete', name="fb_delete"),
url(r'^check_file/$', 'filebrowser_safe.views._check_file', name="fb_check"),
url(r'^upload_file/$', 'filebrowser_safe.views._upload_file', name="fb_do_upload"),
)
| from __future__ import unicode_literals
from django.conf.urls import url
from filebrowser_safe import views
urlpatterns = [
url(r'^browse/$', views.browse, name="fb_browse"),
url(r'^mkdir/', views.mkdir, name="fb_mkdir"),
url(r'^upload/', views.upload, name="fb_upload"),
url(r'^rename/$', views.rename, name="fb_rename"),
url(r'^delete/$', views.delete, name="fb_delete"),
url(r'^check_file/$', views._check_file, name="fb_check"),
url(r'^upload_file/$', views._upload_file, name="fb_do_upload"),
]
| Update from deprecated features of urlpatterns. | Update from deprecated features of urlpatterns.
| Python | bsd-3-clause | ryneeverett/filebrowser-safe,ryneeverett/filebrowser-safe,ryneeverett/filebrowser-safe,ryneeverett/filebrowser-safe | from __future__ import unicode_literals
- from django.conf.urls import *
+ from django.conf.urls import url
- urlpatterns = patterns('',
+ from filebrowser_safe import views
- # filebrowser urls
- url(r'^browse/$', 'filebrowser_safe.views.browse', name="fb_browse"),
- url(r'^mkdir/', 'filebrowser_safe.views.mkdir', name="fb_mkdir"),
- url(r'^upload/', 'filebrowser_safe.views.upload', name="fb_upload"),
- url(r'^rename/$', 'filebrowser_safe.views.rename', name="fb_rename"),
- url(r'^delete/$', 'filebrowser_safe.views.delete', name="fb_delete"),
- url(r'^check_file/$', 'filebrowser_safe.views._check_file', name="fb_check"),
- url(r'^upload_file/$', 'filebrowser_safe.views._upload_file', name="fb_do_upload"),
- )
+ urlpatterns = [
+ url(r'^browse/$', views.browse, name="fb_browse"),
+ url(r'^mkdir/', views.mkdir, name="fb_mkdir"),
+ url(r'^upload/', views.upload, name="fb_upload"),
+ url(r'^rename/$', views.rename, name="fb_rename"),
+ url(r'^delete/$', views.delete, name="fb_delete"),
+ url(r'^check_file/$', views._check_file, name="fb_check"),
+ url(r'^upload_file/$', views._upload_file, name="fb_do_upload"),
+ ]
| Update from deprecated features of urlpatterns. | ## Code Before:
from __future__ import unicode_literals
from django.conf.urls import *
urlpatterns = patterns('',
# filebrowser urls
url(r'^browse/$', 'filebrowser_safe.views.browse', name="fb_browse"),
url(r'^mkdir/', 'filebrowser_safe.views.mkdir', name="fb_mkdir"),
url(r'^upload/', 'filebrowser_safe.views.upload', name="fb_upload"),
url(r'^rename/$', 'filebrowser_safe.views.rename', name="fb_rename"),
url(r'^delete/$', 'filebrowser_safe.views.delete', name="fb_delete"),
url(r'^check_file/$', 'filebrowser_safe.views._check_file', name="fb_check"),
url(r'^upload_file/$', 'filebrowser_safe.views._upload_file', name="fb_do_upload"),
)
## Instruction:
Update from deprecated features of urlpatterns.
## Code After:
from __future__ import unicode_literals
from django.conf.urls import url
from filebrowser_safe import views
urlpatterns = [
url(r'^browse/$', views.browse, name="fb_browse"),
url(r'^mkdir/', views.mkdir, name="fb_mkdir"),
url(r'^upload/', views.upload, name="fb_upload"),
url(r'^rename/$', views.rename, name="fb_rename"),
url(r'^delete/$', views.delete, name="fb_delete"),
url(r'^check_file/$', views._check_file, name="fb_check"),
url(r'^upload_file/$', views._upload_file, name="fb_do_upload"),
]
|
5a09b88399b34ea8a5185fe1bcdff5f3f7ac7619 | invoke_pytest.py | invoke_pytest.py |
import sys
import py
if __name__ == "__main__":
sys.exit(py.test.cmdline.main())
|
import os
import sys
import py
if __name__ == "__main__":
os.environ["PYTEST_MD_REPORT_COLOR"] = "text"
sys.exit(py.test.cmdline.main())
| Add PYTEST_MD_REPORT_COLOR environment variable setting | Add PYTEST_MD_REPORT_COLOR environment variable setting
| Python | mit | thombashi/pingparsing,thombashi/pingparsing |
+ import os
import sys
import py
if __name__ == "__main__":
+ os.environ["PYTEST_MD_REPORT_COLOR"] = "text"
sys.exit(py.test.cmdline.main())
| Add PYTEST_MD_REPORT_COLOR environment variable setting | ## Code Before:
import sys
import py
if __name__ == "__main__":
sys.exit(py.test.cmdline.main())
## Instruction:
Add PYTEST_MD_REPORT_COLOR environment variable setting
## Code After:
import os
import sys
import py
if __name__ == "__main__":
os.environ["PYTEST_MD_REPORT_COLOR"] = "text"
sys.exit(py.test.cmdline.main())
|
4a2d59375a94c3863431cbf62638c83c2cc70cfb | spec/openpassword/keychain_spec.py | spec/openpassword/keychain_spec.py | from nose.tools import *
from openpassword import EncryptionKey
from openpassword import Keychain
from openpassword.exceptions import InvalidPasswordException
import fudge
import time
class KeychainSpec:
def it_unlocks_the_keychain_with_the_right_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
@raises(InvalidPasswordException)
def it_raises_invalidpasswordexception_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
keychain.unlock('wrongpassword')
def it_fails_to_unlock_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
try:
keychain.unlock('wrongpassword')
except:
pass
eq_(keychain.is_locked(), True)
def it_locks_when_lock_is_called(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
keychain.lock()
eq_(keychain.is_locked(), True)
class Spy:
def __init__(self):
self.called = False
def callback(self):
self.called = True
| from nose.tools import *
from openpassword import EncryptionKey
from openpassword import Keychain
from openpassword.exceptions import InvalidPasswordException
import fudge
class KeychainSpec:
def it_unlocks_the_keychain_with_the_right_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
@raises(InvalidPasswordException)
def it_raises_invalidpasswordexception_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
keychain.unlock('wrongpassword')
def it_fails_to_unlock_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
try:
keychain.unlock('wrongpassword')
except:
pass
eq_(keychain.is_locked(), True)
def it_locks_when_lock_is_called(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
keychain.lock()
eq_(keychain.is_locked(), True)
| Remove leftover from deleted examples | Remove leftover from deleted examples
| Python | mit | openpassword/blimey,openpassword/blimey | from nose.tools import *
from openpassword import EncryptionKey
from openpassword import Keychain
from openpassword.exceptions import InvalidPasswordException
import fudge
- import time
class KeychainSpec:
def it_unlocks_the_keychain_with_the_right_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
@raises(InvalidPasswordException)
def it_raises_invalidpasswordexception_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
keychain.unlock('wrongpassword')
def it_fails_to_unlock_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
try:
keychain.unlock('wrongpassword')
except:
pass
eq_(keychain.is_locked(), True)
def it_locks_when_lock_is_called(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
keychain.lock()
eq_(keychain.is_locked(), True)
-
- class Spy:
- def __init__(self):
- self.called = False
-
- def callback(self):
- self.called = True
- | Remove leftover from deleted examples | ## Code Before:
from nose.tools import *
from openpassword import EncryptionKey
from openpassword import Keychain
from openpassword.exceptions import InvalidPasswordException
import fudge
import time
class KeychainSpec:
def it_unlocks_the_keychain_with_the_right_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
@raises(InvalidPasswordException)
def it_raises_invalidpasswordexception_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
keychain.unlock('wrongpassword')
def it_fails_to_unlock_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
try:
keychain.unlock('wrongpassword')
except:
pass
eq_(keychain.is_locked(), True)
def it_locks_when_lock_is_called(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
keychain.lock()
eq_(keychain.is_locked(), True)
class Spy:
def __init__(self):
self.called = False
def callback(self):
self.called = True
## Instruction:
Remove leftover from deleted examples
## Code After:
from nose.tools import *
from openpassword import EncryptionKey
from openpassword import Keychain
from openpassword.exceptions import InvalidPasswordException
import fudge
class KeychainSpec:
def it_unlocks_the_keychain_with_the_right_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
@raises(InvalidPasswordException)
def it_raises_invalidpasswordexception_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
keychain.unlock('wrongpassword')
def it_fails_to_unlock_with_wrong_password(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt").raises(InvalidPasswordException)
keychain = Keychain(EncryptionKey)
try:
keychain.unlock('wrongpassword')
except:
pass
eq_(keychain.is_locked(), True)
def it_locks_when_lock_is_called(self):
EncryptionKey = fudge.Fake('encryption_key')
EncryptionKey.provides("decrypt")
keychain = Keychain(EncryptionKey)
keychain.unlock('rightpassword')
eq_(keychain.is_locked(), False)
keychain.lock()
eq_(keychain.is_locked(), True)
|
419e06b36c63e8c7fbfdd64dfb7ee5d5654ca3af | studentvoice/urls.py | studentvoice/urls.py | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from studentvoice import views
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^create/$', views.create, name='create'),
url(r'^search/', views.search, name='search'),
url(r'^(?P<voice_id>\d+)/$', views.show, name='show'),
url(r'^vote/$', views.vote, name='vote'),
url(r'^(?P<voice_id>\d+)/report/$', views.report, name='report'),
url(r'^(?P<voice_id>\d+)/create_comment/$', views.create_comment, name='create_comment'),
url(r'^delete/(?P<voice_id>\d+)/$', views.delete, name='delete'),
url(r'^(?P<voice_id>\d+)/edit/$', views.edit, name='edit'),
url(r'^(?P<voice_id>\d+)/respond/$', views.respond, name='respond'),
url(r'^(?P<voice_id>\d+)/respond/edit/$', views.edit_response, name='edit_response'),
)
| from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from studentvoice import views
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^about/$', views.about, name='about'),
url(r'^create/$', views.create, name='create'),
url(r'^search/', views.search, name='search'),
url(r'^(?P<voice_id>\d+)/$', views.show, name='show'),
url(r'^vote/$', views.vote, name='vote'),
url(r'^(?P<voice_id>\d+)/report/$', views.report, name='report'),
url(r'^(?P<voice_id>\d+)/create_comment/$', views.create_comment, name='create_comment'),
url(r'^delete/(?P<voice_id>\d+)/$', views.delete, name='delete'),
url(r'^(?P<voice_id>\d+)/edit/$', views.edit, name='edit'),
url(r'^(?P<voice_id>\d+)/respond/$', views.respond, name='respond'),
url(r'^(?P<voice_id>\d+)/respond/edit/$', views.edit_response, name='edit_response'),
)
| Add the about page to url.py | Add the about page to url.py
| Python | agpl-3.0 | osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from studentvoice import views
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
+ url(r'^about/$', views.about, name='about'),
url(r'^create/$', views.create, name='create'),
url(r'^search/', views.search, name='search'),
url(r'^(?P<voice_id>\d+)/$', views.show, name='show'),
url(r'^vote/$', views.vote, name='vote'),
url(r'^(?P<voice_id>\d+)/report/$', views.report, name='report'),
url(r'^(?P<voice_id>\d+)/create_comment/$', views.create_comment, name='create_comment'),
url(r'^delete/(?P<voice_id>\d+)/$', views.delete, name='delete'),
url(r'^(?P<voice_id>\d+)/edit/$', views.edit, name='edit'),
url(r'^(?P<voice_id>\d+)/respond/$', views.respond, name='respond'),
url(r'^(?P<voice_id>\d+)/respond/edit/$', views.edit_response, name='edit_response'),
)
| Add the about page to url.py | ## Code Before:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from studentvoice import views
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^create/$', views.create, name='create'),
url(r'^search/', views.search, name='search'),
url(r'^(?P<voice_id>\d+)/$', views.show, name='show'),
url(r'^vote/$', views.vote, name='vote'),
url(r'^(?P<voice_id>\d+)/report/$', views.report, name='report'),
url(r'^(?P<voice_id>\d+)/create_comment/$', views.create_comment, name='create_comment'),
url(r'^delete/(?P<voice_id>\d+)/$', views.delete, name='delete'),
url(r'^(?P<voice_id>\d+)/edit/$', views.edit, name='edit'),
url(r'^(?P<voice_id>\d+)/respond/$', views.respond, name='respond'),
url(r'^(?P<voice_id>\d+)/respond/edit/$', views.edit_response, name='edit_response'),
)
## Instruction:
Add the about page to url.py
## Code After:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from studentvoice import views
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^about/$', views.about, name='about'),
url(r'^create/$', views.create, name='create'),
url(r'^search/', views.search, name='search'),
url(r'^(?P<voice_id>\d+)/$', views.show, name='show'),
url(r'^vote/$', views.vote, name='vote'),
url(r'^(?P<voice_id>\d+)/report/$', views.report, name='report'),
url(r'^(?P<voice_id>\d+)/create_comment/$', views.create_comment, name='create_comment'),
url(r'^delete/(?P<voice_id>\d+)/$', views.delete, name='delete'),
url(r'^(?P<voice_id>\d+)/edit/$', views.edit, name='edit'),
url(r'^(?P<voice_id>\d+)/respond/$', views.respond, name='respond'),
url(r'^(?P<voice_id>\d+)/respond/edit/$', views.edit_response, name='edit_response'),
)
|
fe41aabf073ce3a02b5af117120d62ffc0324655 | linked-list/linked-list.py | linked-list/linked-list.py | class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
| class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
def search(self, position):
counter = 1
current_node = self.head
if position < 1:
return None
while current_node and counter <= position: # go through linked list until you reach input position
if counter == position:
return current_node
current_node = current_node.next
counter += 1
return None # if position larger than length of linked list
| Add search method for python linked list implementation | Add search method for python linked list implementation
| Python | mit | derekmpham/interview-prep,derekmpham/interview-prep | class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
+ def search(self, position):
+ counter = 1
+ current_node = self.head
+ if position < 1:
+ return None
+ while current_node and counter <= position: # go through linked list until you reach input position
+ if counter == position:
+ return current_node
+ current_node = current_node.next
+ counter += 1
+ return None # if position larger than length of linked list
+
+
+
+
+
+
+
+
+
+
+
+ | Add search method for python linked list implementation | ## Code Before:
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
## Instruction:
Add search method for python linked list implementation
## Code After:
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
def search(self, position):
counter = 1
current_node = self.head
if position < 1:
return None
while current_node and counter <= position: # go through linked list until you reach input position
if counter == position:
return current_node
current_node = current_node.next
counter += 1
return None # if position larger than length of linked list
|
2f4ace9d1d1489cac1a8ace8b431eec376a02060 | corehq/apps/couch_sql_migration/management/commands/show_started_migrations.py | corehq/apps/couch_sql_migration/management/commands/show_started_migrations.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
stats = get_diff_stats(item.domain)
print(format_diff_stats(stats))
print("")
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
try:
stats = get_diff_stats(item.domain)
print(format_diff_stats(stats))
except Exception as err:
print("Cannot get diff stats: {}".format(err))
print("")
| Handle error in get diff stats | Handle error in get diff stats
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
+ try:
- stats = get_diff_stats(item.domain)
+ stats = get_diff_stats(item.domain)
- print(format_diff_stats(stats))
+ print(format_diff_stats(stats))
+ except Exception as err:
+ print("Cannot get diff stats: {}".format(err))
print("")
| Handle error in get diff stats | ## Code Before:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
stats = get_diff_stats(item.domain)
print(format_diff_stats(stats))
print("")
## Instruction:
Handle error in get diff stats
## Code After:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
try:
stats = get_diff_stats(item.domain)
print(format_diff_stats(stats))
except Exception as err:
print("Cannot get diff stats: {}".format(err))
print("")
|
3a8d7ff5f047c7b3476b8dcffa0e6850e952a645 | docs/examples/http_proxy/set_http_proxy_method.py | docs/examples/http_proxy/set_http_proxy_method.py | from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
| from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
| Fix a typo in the example. | Fix a typo in the example.
| Python | apache-2.0 | kater169/libcloud,DimensionDataCBUSydney/libcloud,t-tran/libcloud,Scalr/libcloud,MrBasset/libcloud,watermelo/libcloud,curoverse/libcloud,Kami/libcloud,SecurityCompass/libcloud,Kami/libcloud,pantheon-systems/libcloud,andrewsomething/libcloud,schaubl/libcloud,pantheon-systems/libcloud,jimbobhickville/libcloud,munkiat/libcloud,iPlantCollaborativeOpenSource/libcloud,schaubl/libcloud,Kami/libcloud,JamesGuthrie/libcloud,sahildua2305/libcloud,jimbobhickville/libcloud,iPlantCollaborativeOpenSource/libcloud,aleGpereira/libcloud,mgogoulos/libcloud,SecurityCompass/libcloud,curoverse/libcloud,munkiat/libcloud,sfriesel/libcloud,mbrukman/libcloud,smaffulli/libcloud,mistio/libcloud,niteoweb/libcloud,briancurtin/libcloud,supertom/libcloud,sergiorua/libcloud,cryptickp/libcloud,watermelo/libcloud,vongazman/libcloud,sergiorua/libcloud,samuelchong/libcloud,sfriesel/libcloud,StackPointCloud/libcloud,JamesGuthrie/libcloud,thesquelched/libcloud,cloudControl/libcloud,lochiiconnectivity/libcloud,DimensionDataCBUSydney/libcloud,aviweit/libcloud,t-tran/libcloud,thesquelched/libcloud,jerryblakley/libcloud,techhat/libcloud,cryptickp/libcloud,MrBasset/libcloud,ZuluPro/libcloud,ByteInternet/libcloud,Verizon/libcloud,mbrukman/libcloud,wrigri/libcloud,jimbobhickville/libcloud,Verizon/libcloud,cloudControl/libcloud,wuyuewen/libcloud,iPlantCollaborativeOpenSource/libcloud,mbrukman/libcloud,sahildua2305/libcloud,niteoweb/libcloud,kater169/libcloud,lochiiconnectivity/libcloud,atsaki/libcloud,curoverse/libcloud,smaffulli/libcloud,apache/libcloud,erjohnso/libcloud,mistio/libcloud,apache/libcloud,marcinzaremba/libcloud,ZuluPro/libcloud,ByteInternet/libcloud,mathspace/libcloud,dcorbacho/libcloud,marcinzaremba/libcloud,wido/libcloud,pantheon-systems/libcloud,Itxaka/libcloud,dcorbacho/libcloud,mtekel/libcloud,schaubl/libcloud,munkiat/libcloud,Itxaka/libcloud,mathspace/libcloud,Scalr/libcloud,DimensionDataCBUSydney/libcloud,jerryblakley/libcloud,Cloud-Elasticity-Services/as-libcloud,andrewsomething/libcloud,Itxaka/libcloud,NexusIS/libcloud,atsaki/libcloud,Cloud-Elasticity-Services/as-libcloud,jerryblakley/libcloud,techhat/libcloud,aleGpereira/libcloud,techhat/libcloud,samuelchong/libcloud,supertom/libcloud,cloudControl/libcloud,niteoweb/libcloud,sfriesel/libcloud,mgogoulos/libcloud,t-tran/libcloud,Verizon/libcloud,marcinzaremba/libcloud,carletes/libcloud,wuyuewen/libcloud,samuelchong/libcloud,cryptickp/libcloud,mathspace/libcloud,thesquelched/libcloud,mtekel/libcloud,wrigri/libcloud,sergiorua/libcloud,vongazman/libcloud,carletes/libcloud,smaffulli/libcloud,vongazman/libcloud,mistio/libcloud,sahildua2305/libcloud,wuyuewen/libcloud,dcorbacho/libcloud,illfelder/libcloud,lochiiconnectivity/libcloud,atsaki/libcloud,aviweit/libcloud,pquentin/libcloud,carletes/libcloud,ZuluPro/libcloud,wido/libcloud,ByteInternet/libcloud,briancurtin/libcloud,pquentin/libcloud,mgogoulos/libcloud,illfelder/libcloud,apache/libcloud,aviweit/libcloud,erjohnso/libcloud,andrewsomething/libcloud,Scalr/libcloud,aleGpereira/libcloud,MrBasset/libcloud,mtekel/libcloud,pquentin/libcloud,watermelo/libcloud,supertom/libcloud,StackPointCloud/libcloud,Cloud-Elasticity-Services/as-libcloud,NexusIS/libcloud,briancurtin/libcloud,erjohnso/libcloud,JamesGuthrie/libcloud,SecurityCompass/libcloud,wrigri/libcloud,StackPointCloud/libcloud,kater169/libcloud,NexusIS/libcloud,wido/libcloud,illfelder/libcloud | from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
- driver.set_http_proxy(proxy_url=PROXY_URL)
+ driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
| Fix a typo in the example. | ## Code Before:
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
## Instruction:
Fix a typo in the example.
## Code After:
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
|
ba4eace22eb2379a5a0d8a79615892edd58b1f49 | mezzanine/core/sitemaps.py | mezzanine/core/sitemaps.py |
from django.contrib.sitemaps import Sitemap
from django.db.models import get_models
from mezzanine.core.models import Displayable
class DisplayableSitemap(Sitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
def items(self):
"""
Return all published items for models that subclass
``Displayable``.
"""
items = []
item_urls = set()
for model in get_models():
if issubclass(model, Displayable):
for item in model.objects.published():
url = item.get_absolute_url()
# check if the url of that item was already seen
# (this might happen for Page items and subclasses of Page like RichTextPage)
if not url in item_urls:
items.append(item)
item_urls.add(url)
return items
|
from django.contrib.sitemaps import Sitemap
from django.db.models import get_models
from mezzanine.core.models import Displayable
class DisplayableSitemap(Sitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
def items(self):
"""
Return all published items for models that subclass
``Displayable``.
"""
items = {}
for model in get_models():
if issubclass(model, Displayable):
for item in model.objects.published():
items[item.get_absolute_url()] = item
return items.values()
| Clean up sitemap URL handling. | Clean up sitemap URL handling.
| Python | bsd-2-clause | Cajoline/mezzanine,guibernardino/mezzanine,agepoly/mezzanine,sjuxax/mezzanine,vladir/mezzanine,Cicero-Zhao/mezzanine,stbarnabas/mezzanine,sjdines/mezzanine,viaregio/mezzanine,wbtuomela/mezzanine,biomassives/mezzanine,frankchin/mezzanine,orlenko/plei,dekomote/mezzanine-modeltranslation-backport,batpad/mezzanine,mush42/mezzanine,vladir/mezzanine,industrydive/mezzanine,scarcry/snm-mezzanine,nikolas/mezzanine,geodesign/mezzanine,PegasusWang/mezzanine,Cajoline/mezzanine,stephenmcd/mezzanine,saintbird/mezzanine,dovydas/mezzanine,theclanks/mezzanine,wrwrwr/mezzanine,jerivas/mezzanine,Skytorn86/mezzanine,theclanks/mezzanine,vladir/mezzanine,molokov/mezzanine,stbarnabas/mezzanine,adrian-the-git/mezzanine,ZeroXn/mezzanine,promil23/mezzanine,biomassives/mezzanine,spookylukey/mezzanine,saintbird/mezzanine,sjuxax/mezzanine,readevalprint/mezzanine,christianwgd/mezzanine,webounty/mezzanine,gbosh/mezzanine,industrydive/mezzanine,fusionbox/mezzanine,tuxinhang1989/mezzanine,readevalprint/mezzanine,wbtuomela/mezzanine,emile2016/mezzanine,ZeroXn/mezzanine,viaregio/mezzanine,wbtuomela/mezzanine,joshcartme/mezzanine,dustinrb/mezzanine,Kniyl/mezzanine,gradel/mezzanine,theclanks/mezzanine,frankier/mezzanine,wyzex/mezzanine,PegasusWang/mezzanine,eino-makitalo/mezzanine,orlenko/sfpirg,eino-makitalo/mezzanine,nikolas/mezzanine,PegasusWang/mezzanine,promil23/mezzanine,sjdines/mezzanine,guibernardino/mezzanine,sjdines/mezzanine,damnfine/mezzanine,Cicero-Zhao/mezzanine,saintbird/mezzanine,scarcry/snm-mezzanine,jjz/mezzanine,wyzex/mezzanine,spookylukey/mezzanine,AlexHill/mezzanine,webounty/mezzanine,webounty/mezzanine,ryneeverett/mezzanine,batpad/mezzanine,jerivas/mezzanine,frankier/mezzanine,jerivas/mezzanine,dekomote/mezzanine-modeltranslation-backport,agepoly/mezzanine,orlenko/plei,orlenko/sfpirg,frankchin/mezzanine,jjz/mezzanine,SoLoHiC/mezzanine,molokov/mezzanine,dsanders11/mezzanine,orlenko/sfpirg,cccs-web/mezzanine,douglaskastle/mezzanine,wrwrwr/mezzanine,Kniyl/mezzanine,gradel/mezzanine,viaregio/mezzanine,adrian-the-git/mezzanine,dovydas/mezzanine,SoLoHiC/mezzanine,nikolas/mezzanine,geodesign/mezzanine,mush42/mezzanine,douglaskastle/mezzanine,gradel/mezzanine,eino-makitalo/mezzanine,douglaskastle/mezzanine,stephenmcd/mezzanine,ZeroXn/mezzanine,joshcartme/mezzanine,biomassives/mezzanine,dsanders11/mezzanine,tuxinhang1989/mezzanine,AlexHill/mezzanine,frankchin/mezzanine,damnfine/mezzanine,mush42/mezzanine,scarcry/snm-mezzanine,dsanders11/mezzanine,orlenko/plei,dustinrb/mezzanine,Skytorn86/mezzanine,frankier/mezzanine,molokov/mezzanine,adrian-the-git/mezzanine,joshcartme/mezzanine,gbosh/mezzanine,sjuxax/mezzanine,emile2016/mezzanine,gbosh/mezzanine,industrydive/mezzanine,Kniyl/mezzanine,ryneeverett/mezzanine,dekomote/mezzanine-modeltranslation-backport,fusionbox/mezzanine,readevalprint/mezzanine,geodesign/mezzanine,agepoly/mezzanine,SoLoHiC/mezzanine,stephenmcd/mezzanine,Skytorn86/mezzanine,dustinrb/mezzanine,wyzex/mezzanine,promil23/mezzanine,jjz/mezzanine,dovydas/mezzanine,Cajoline/mezzanine,cccs-web/mezzanine,spookylukey/mezzanine,tuxinhang1989/mezzanine,christianwgd/mezzanine,ryneeverett/mezzanine,christianwgd/mezzanine,damnfine/mezzanine,emile2016/mezzanine |
from django.contrib.sitemaps import Sitemap
from django.db.models import get_models
from mezzanine.core.models import Displayable
class DisplayableSitemap(Sitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
def items(self):
"""
Return all published items for models that subclass
``Displayable``.
"""
- items = []
+ items = {}
- item_urls = set()
for model in get_models():
if issubclass(model, Displayable):
for item in model.objects.published():
- url = item.get_absolute_url()
+ items[item.get_absolute_url()] = item
- # check if the url of that item was already seen
- # (this might happen for Page items and subclasses of Page like RichTextPage)
- if not url in item_urls:
- items.append(item)
- item_urls.add(url)
- return items
+ return items.values()
| Clean up sitemap URL handling. | ## Code Before:
from django.contrib.sitemaps import Sitemap
from django.db.models import get_models
from mezzanine.core.models import Displayable
class DisplayableSitemap(Sitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
def items(self):
"""
Return all published items for models that subclass
``Displayable``.
"""
items = []
item_urls = set()
for model in get_models():
if issubclass(model, Displayable):
for item in model.objects.published():
url = item.get_absolute_url()
# check if the url of that item was already seen
# (this might happen for Page items and subclasses of Page like RichTextPage)
if not url in item_urls:
items.append(item)
item_urls.add(url)
return items
## Instruction:
Clean up sitemap URL handling.
## Code After:
from django.contrib.sitemaps import Sitemap
from django.db.models import get_models
from mezzanine.core.models import Displayable
class DisplayableSitemap(Sitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
def items(self):
"""
Return all published items for models that subclass
``Displayable``.
"""
items = {}
for model in get_models():
if issubclass(model, Displayable):
for item in model.objects.published():
items[item.get_absolute_url()] = item
return items.values()
|
d1da755f10d4287d1cfbec3a6d29d9961125bbce | plugins/tff_backend/plugin_consts.py | plugins/tff_backend/plugin_consts.py |
NAMESPACE = u'tff_backend'
KEY_ALGORITHM = u'ed25519'
KEY_NAME = u'threefold'
THREEFOLD_APP_ID = u'em-be-threefold-token'
FULL_CURRENCY_NAMES = {
'USD': 'dollar',
'EUR': 'euro',
'YEN': 'yen',
'UAE': 'dirham',
'GBP': 'pound',
}
CURRENCY_RATES = {
'USD': 5.0,
'EUR': 4.2,
'YEN': 543.6,
'UAE': 18.6,
'GBP': 3.83
}
|
NAMESPACE = u'tff_backend'
KEY_ALGORITHM = u'ed25519'
KEY_NAME = u'threefold'
THREEFOLD_APP_ID = u'em-be-threefold-token'
FULL_CURRENCY_NAMES = {
'USD': 'dollar',
'EUR': 'euro',
'YEN': 'yen',
'UAE': 'dirham',
'GBP': 'pound',
'BTC': 'bitcoin',
}
CURRENCY_RATES = {
'USD': 5.0,
'EUR': 4.2,
'YEN': 543.6,
'UAE': 18.6,
'GBP': 3.83,
'BTC': .0011,
}
| Add BTC to possible currencies | Add BTC to possible currencies
| Python | bsd-3-clause | threefoldfoundation/app_backend,threefoldfoundation/app_backend,threefoldfoundation/app_backend,threefoldfoundation/app_backend |
NAMESPACE = u'tff_backend'
KEY_ALGORITHM = u'ed25519'
KEY_NAME = u'threefold'
THREEFOLD_APP_ID = u'em-be-threefold-token'
FULL_CURRENCY_NAMES = {
'USD': 'dollar',
'EUR': 'euro',
'YEN': 'yen',
'UAE': 'dirham',
'GBP': 'pound',
+ 'BTC': 'bitcoin',
}
CURRENCY_RATES = {
'USD': 5.0,
'EUR': 4.2,
'YEN': 543.6,
'UAE': 18.6,
- 'GBP': 3.83
+ 'GBP': 3.83,
+ 'BTC': .0011,
}
| Add BTC to possible currencies | ## Code Before:
NAMESPACE = u'tff_backend'
KEY_ALGORITHM = u'ed25519'
KEY_NAME = u'threefold'
THREEFOLD_APP_ID = u'em-be-threefold-token'
FULL_CURRENCY_NAMES = {
'USD': 'dollar',
'EUR': 'euro',
'YEN': 'yen',
'UAE': 'dirham',
'GBP': 'pound',
}
CURRENCY_RATES = {
'USD': 5.0,
'EUR': 4.2,
'YEN': 543.6,
'UAE': 18.6,
'GBP': 3.83
}
## Instruction:
Add BTC to possible currencies
## Code After:
NAMESPACE = u'tff_backend'
KEY_ALGORITHM = u'ed25519'
KEY_NAME = u'threefold'
THREEFOLD_APP_ID = u'em-be-threefold-token'
FULL_CURRENCY_NAMES = {
'USD': 'dollar',
'EUR': 'euro',
'YEN': 'yen',
'UAE': 'dirham',
'GBP': 'pound',
'BTC': 'bitcoin',
}
CURRENCY_RATES = {
'USD': 5.0,
'EUR': 4.2,
'YEN': 543.6,
'UAE': 18.6,
'GBP': 3.83,
'BTC': .0011,
}
|
9c8dbde9b39f6fcd713a7d118dcd613cc48cf54e | astropy/tests/tests/test_run_tests.py | astropy/tests/tests/test_run_tests.py | from __future__ import (absolute_import, division, print_function,
unicode_literals)
# test helper.run_tests function
import sys
from .. import helper
from ... import _get_test_runner
from .. helper import pytest
# run_tests should raise ValueError when asked to run on a module it can't find
def test_module_not_found():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests('fake.module')
# run_tests should raise ValueError when passed an invalid pastebin= option
def test_pastebin_keyword():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests(pastebin='not_an_option')
# tests that tests are only run in Python 3 out of the 2to3'd build (otherwise
# a syntax error would occur)
try:
from .run_after_2to3 import test_run_after_2to3
except SyntaxError:
def test_run_after_2to3():
helper.pytest.fail("Not running the 2to3'd tests!")
def test_deprecation_warning():
if sys.version_info[:2] == (3, 3):
with pytest.raises(DeprecationWarning):
'{0:s}'.format(object())
| from __future__ import (absolute_import, division, print_function,
unicode_literals)
# test helper.run_tests function
import warnings
from .. import helper
from ... import _get_test_runner
from .. helper import pytest
# run_tests should raise ValueError when asked to run on a module it can't find
def test_module_not_found():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests('fake.module')
# run_tests should raise ValueError when passed an invalid pastebin= option
def test_pastebin_keyword():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests(pastebin='not_an_option')
# tests that tests are only run in Python 3 out of the 2to3'd build (otherwise
# a syntax error would occur)
try:
from .run_after_2to3 import test_run_after_2to3
except SyntaxError:
def test_run_after_2to3():
helper.pytest.fail("Not running the 2to3'd tests!")
def test_deprecation_warning():
with pytest.raises(DeprecationWarning):
warnings.warn('test warning', DeprecationWarning)
| Test that deprecation exceptions are working differently, after suggestion by @embray | Test that deprecation exceptions are working differently, after
suggestion by @embray
| Python | bsd-3-clause | larrybradley/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,stargaser/astropy,DougBurke/astropy,joergdietrich/astropy,kelle/astropy,mhvk/astropy,funbaker/astropy,saimn/astropy,lpsinger/astropy,pllim/astropy,dhomeier/astropy,StuartLittlefair/astropy,lpsinger/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,dhomeier/astropy,mhvk/astropy,kelle/astropy,saimn/astropy,larrybradley/astropy,StuartLittlefair/astropy,dhomeier/astropy,bsipocz/astropy,astropy/astropy,kelle/astropy,MSeifert04/astropy,funbaker/astropy,astropy/astropy,DougBurke/astropy,tbabej/astropy,tbabej/astropy,stargaser/astropy,AustereCuriosity/astropy,saimn/astropy,lpsinger/astropy,joergdietrich/astropy,joergdietrich/astropy,StuartLittlefair/astropy,astropy/astropy,AustereCuriosity/astropy,bsipocz/astropy,saimn/astropy,astropy/astropy,stargaser/astropy,funbaker/astropy,stargaser/astropy,DougBurke/astropy,mhvk/astropy,astropy/astropy,bsipocz/astropy,dhomeier/astropy,MSeifert04/astropy,joergdietrich/astropy,kelle/astropy,joergdietrich/astropy,kelle/astropy,larrybradley/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,lpsinger/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,tbabej/astropy,aleksandr-bakanov/astropy,pllim/astropy,pllim/astropy,larrybradley/astropy,pllim/astropy,AustereCuriosity/astropy,funbaker/astropy,mhvk/astropy,bsipocz/astropy,tbabej/astropy,pllim/astropy,saimn/astropy,MSeifert04/astropy,MSeifert04/astropy,tbabej/astropy,mhvk/astropy,AustereCuriosity/astropy,lpsinger/astropy | from __future__ import (absolute_import, division, print_function,
unicode_literals)
# test helper.run_tests function
- import sys
+ import warnings
from .. import helper
from ... import _get_test_runner
from .. helper import pytest
# run_tests should raise ValueError when asked to run on a module it can't find
def test_module_not_found():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests('fake.module')
# run_tests should raise ValueError when passed an invalid pastebin= option
def test_pastebin_keyword():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests(pastebin='not_an_option')
# tests that tests are only run in Python 3 out of the 2to3'd build (otherwise
# a syntax error would occur)
try:
from .run_after_2to3 import test_run_after_2to3
except SyntaxError:
def test_run_after_2to3():
helper.pytest.fail("Not running the 2to3'd tests!")
def test_deprecation_warning():
- if sys.version_info[:2] == (3, 3):
- with pytest.raises(DeprecationWarning):
+ with pytest.raises(DeprecationWarning):
- '{0:s}'.format(object())
+ warnings.warn('test warning', DeprecationWarning)
| Test that deprecation exceptions are working differently, after suggestion by @embray | ## Code Before:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# test helper.run_tests function
import sys
from .. import helper
from ... import _get_test_runner
from .. helper import pytest
# run_tests should raise ValueError when asked to run on a module it can't find
def test_module_not_found():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests('fake.module')
# run_tests should raise ValueError when passed an invalid pastebin= option
def test_pastebin_keyword():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests(pastebin='not_an_option')
# tests that tests are only run in Python 3 out of the 2to3'd build (otherwise
# a syntax error would occur)
try:
from .run_after_2to3 import test_run_after_2to3
except SyntaxError:
def test_run_after_2to3():
helper.pytest.fail("Not running the 2to3'd tests!")
def test_deprecation_warning():
if sys.version_info[:2] == (3, 3):
with pytest.raises(DeprecationWarning):
'{0:s}'.format(object())
## Instruction:
Test that deprecation exceptions are working differently, after suggestion by @embray
## Code After:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# test helper.run_tests function
import warnings
from .. import helper
from ... import _get_test_runner
from .. helper import pytest
# run_tests should raise ValueError when asked to run on a module it can't find
def test_module_not_found():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests('fake.module')
# run_tests should raise ValueError when passed an invalid pastebin= option
def test_pastebin_keyword():
with helper.pytest.raises(ValueError):
_get_test_runner().run_tests(pastebin='not_an_option')
# tests that tests are only run in Python 3 out of the 2to3'd build (otherwise
# a syntax error would occur)
try:
from .run_after_2to3 import test_run_after_2to3
except SyntaxError:
def test_run_after_2to3():
helper.pytest.fail("Not running the 2to3'd tests!")
def test_deprecation_warning():
with pytest.raises(DeprecationWarning):
warnings.warn('test warning', DeprecationWarning)
|
24093369bb1dbd2e9034db9425920ffdc14ee070 | abusehelper/bots/abusech/feodoccbot.py | abusehelper/bots/abusech/feodoccbot.py |
from abusehelper.core import bot
from . import host_or_ip, split_description, AbuseCHFeedBot
class FeodoCcBot(AbuseCHFeedBot):
feed_type = "c&c"
feed_name = "feodo c&c"
feeds = bot.ListParam(default=["https://feodotracker.abuse.ch/feodotracker.rss"])
# The timestamp in the title appears to be the firstseen timestamp,
# skip including it as the "source time".
parse_title = None
def parse_description(self, description):
got_version = False
for key, value in split_description(description):
if key == "version":
yield "malware family", "feodo." + value.strip().lower()
got_version = True
elif key == "host":
yield host_or_ip(value)
if not got_version:
yield "malware family", "feodo"
if __name__ == "__main__":
FeodoCcBot.from_command_line().execute()
|
from abusehelper.core import bot
from . import host_or_ip, split_description, AbuseCHFeedBot
class FeodoCcBot(AbuseCHFeedBot):
feed_type = "c&c"
feed_name = "feodo c&c"
feeds = bot.ListParam(default=["https://feodotracker.abuse.ch/feodotracker.rss"])
# The timestamp in the title appears to be the firstseen timestamp,
# skip including it as the "source time".
parse_title = None
def parse_description(self, description):
got_version = False
for key, value in split_description(description):
if key == "version":
yield "malware family", "feodo." + value.strip().lower()
got_version = True
elif key == "status":
yield "status", value
elif key == "host":
yield host_or_ip(value)
if not got_version:
yield "malware family", "feodo"
if __name__ == "__main__":
FeodoCcBot.from_command_line().execute()
| Include status information in abuse.ch's Feodo C&C feed | Include status information in abuse.ch's Feodo C&C feed
| Python | mit | abusesa/abusehelper |
from abusehelper.core import bot
from . import host_or_ip, split_description, AbuseCHFeedBot
class FeodoCcBot(AbuseCHFeedBot):
feed_type = "c&c"
feed_name = "feodo c&c"
feeds = bot.ListParam(default=["https://feodotracker.abuse.ch/feodotracker.rss"])
# The timestamp in the title appears to be the firstseen timestamp,
# skip including it as the "source time".
parse_title = None
def parse_description(self, description):
got_version = False
for key, value in split_description(description):
if key == "version":
yield "malware family", "feodo." + value.strip().lower()
got_version = True
+ elif key == "status":
+ yield "status", value
elif key == "host":
yield host_or_ip(value)
if not got_version:
yield "malware family", "feodo"
if __name__ == "__main__":
FeodoCcBot.from_command_line().execute()
| Include status information in abuse.ch's Feodo C&C feed | ## Code Before:
from abusehelper.core import bot
from . import host_or_ip, split_description, AbuseCHFeedBot
class FeodoCcBot(AbuseCHFeedBot):
feed_type = "c&c"
feed_name = "feodo c&c"
feeds = bot.ListParam(default=["https://feodotracker.abuse.ch/feodotracker.rss"])
# The timestamp in the title appears to be the firstseen timestamp,
# skip including it as the "source time".
parse_title = None
def parse_description(self, description):
got_version = False
for key, value in split_description(description):
if key == "version":
yield "malware family", "feodo." + value.strip().lower()
got_version = True
elif key == "host":
yield host_or_ip(value)
if not got_version:
yield "malware family", "feodo"
if __name__ == "__main__":
FeodoCcBot.from_command_line().execute()
## Instruction:
Include status information in abuse.ch's Feodo C&C feed
## Code After:
from abusehelper.core import bot
from . import host_or_ip, split_description, AbuseCHFeedBot
class FeodoCcBot(AbuseCHFeedBot):
feed_type = "c&c"
feed_name = "feodo c&c"
feeds = bot.ListParam(default=["https://feodotracker.abuse.ch/feodotracker.rss"])
# The timestamp in the title appears to be the firstseen timestamp,
# skip including it as the "source time".
parse_title = None
def parse_description(self, description):
got_version = False
for key, value in split_description(description):
if key == "version":
yield "malware family", "feodo." + value.strip().lower()
got_version = True
elif key == "status":
yield "status", value
elif key == "host":
yield host_or_ip(value)
if not got_version:
yield "malware family", "feodo"
if __name__ == "__main__":
FeodoCcBot.from_command_line().execute()
|
efd1841fb904e30ac0b87b7c7d019f2745452cb2 | test_output.py | test_output.py |
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
|
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
def _assert_resolve_tco(self, url, expected):
result = self.run_safari_rs('resolve', url)
assert result.rc == 0
assert result.stderr == ''
assert result.stdout == expected
def test_resolve_single_redirect(self):
self._assert_resolve_tco('https://t.co/2pciHpqpwC', 'https://donmelton.com/2013/06/04/remembering-penny/')
def test_resolve_multiple_redirect(self):
self._assert_resolve_tco('https://t.co/oSJaiNlIP6', 'https://bitly.com/blog/backlinking-strategy/')
def test_resolve_no_redirect(self):
self._assert_resolve_tco('https://example.org/', 'https://example.org/')
if __name__ == '__main__':
unittest.main()
| Add some tests for the URL resolver | Add some tests for the URL resolver
| Python | mit | alexwlchan/safari.rs,alexwlchan/safari.rs |
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
+ def _assert_resolve_tco(self, url, expected):
+ result = self.run_safari_rs('resolve', url)
+ assert result.rc == 0
+ assert result.stderr == ''
+ assert result.stdout == expected
+
+ def test_resolve_single_redirect(self):
+ self._assert_resolve_tco('https://t.co/2pciHpqpwC', 'https://donmelton.com/2013/06/04/remembering-penny/')
+
+ def test_resolve_multiple_redirect(self):
+ self._assert_resolve_tco('https://t.co/oSJaiNlIP6', 'https://bitly.com/blog/backlinking-strategy/')
+
+ def test_resolve_no_redirect(self):
+ self._assert_resolve_tco('https://example.org/', 'https://example.org/')
+
if __name__ == '__main__':
unittest.main()
| Add some tests for the URL resolver | ## Code Before:
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
if __name__ == '__main__':
unittest.main()
## Instruction:
Add some tests for the URL resolver
## Code After:
import unittest
from conftest import BaseTest
class TestSafariRS(BaseTest):
def test_urls_all_flag_is_deprecated(self):
result = self.run_safari_rs('urls-all')
self.assertIn('deprecated', result.stderr)
def test_list_tabs_flag_is_not_deprecated(self):
result = self.run_safari_rs('list-tabs')
self.assertNotIn('deprecated', result.stderr)
def test_no_extra_whitespace_on_tidy_url(self):
result = self.run_safari_rs('tidy-url', 'https://github.com/alexwlchan/safari.rs/issues')
assert result.rc == 0
assert result.stderr == ''
assert result.stdout.strip() == result.stdout
def _assert_resolve_tco(self, url, expected):
result = self.run_safari_rs('resolve', url)
assert result.rc == 0
assert result.stderr == ''
assert result.stdout == expected
def test_resolve_single_redirect(self):
self._assert_resolve_tco('https://t.co/2pciHpqpwC', 'https://donmelton.com/2013/06/04/remembering-penny/')
def test_resolve_multiple_redirect(self):
self._assert_resolve_tco('https://t.co/oSJaiNlIP6', 'https://bitly.com/blog/backlinking-strategy/')
def test_resolve_no_redirect(self):
self._assert_resolve_tco('https://example.org/', 'https://example.org/')
if __name__ == '__main__':
unittest.main()
|
153c832f083e8ec801ecb8dbddd2f8e79b735eed | utilities.py | utilities.py |
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
def get_pv_names(mode):
''' Given a certain ring mode as a string, return all available pvs '''
ap.machines.load(mode)
result = set()
elements = ap.getElements('*')
for element in elements:
pvs = element.pv()
if(len(pvs) > 0):
pv_name = pvs[0].split(':')[0]
result.add(pv_name)
return result
def get_pvs_from_file(filepath):
''' Return a list of pvs from a given file '''
with open(filepath) as f:
contents = f.read().splitlines()
return contents
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
def get_pv_names(mode):
''' Given a certain ring mode as a string, return all available pvs '''
ap.machines.load(mode)
result = set()
elements = ap.getElements('*')
for element in elements:
pvs = element.pv()
if(len(pvs) > 0):
pv_name = pvs[0].split(':')[0]
result.add(pv_name)
return result
def get_pvs_from_file(filepath):
''' Return a list of pvs from a given file '''
with open(filepath) as f:
contents = f.read().splitlines()
return contents
def write_pvs_to_file(filename, data):
''' Write given pvs to file '''
f = open(filename, 'w')
for element in data:
f.write(element, '\n')
f.close()
| Add utility function to write pvs to file | Add utility function to write pvs to file
| Python | apache-2.0 | razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects |
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
+
def get_pv_names(mode):
''' Given a certain ring mode as a string, return all available pvs '''
ap.machines.load(mode)
result = set()
elements = ap.getElements('*')
for element in elements:
pvs = element.pv()
if(len(pvs) > 0):
pv_name = pvs[0].split(':')[0]
result.add(pv_name)
return result
def get_pvs_from_file(filepath):
''' Return a list of pvs from a given file '''
with open(filepath) as f:
contents = f.read().splitlines()
return contents
+ def write_pvs_to_file(filename, data):
+ ''' Write given pvs to file '''
+ f = open(filename, 'w')
+ for element in data:
+ f.write(element, '\n')
+ f.close()
+ | Add utility function to write pvs to file | ## Code Before:
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
def get_pv_names(mode):
''' Given a certain ring mode as a string, return all available pvs '''
ap.machines.load(mode)
result = set()
elements = ap.getElements('*')
for element in elements:
pvs = element.pv()
if(len(pvs) > 0):
pv_name = pvs[0].split(':')[0]
result.add(pv_name)
return result
def get_pvs_from_file(filepath):
''' Return a list of pvs from a given file '''
with open(filepath) as f:
contents = f.read().splitlines()
return contents
## Instruction:
Add utility function to write pvs to file
## Code After:
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
def get_pv_names(mode):
''' Given a certain ring mode as a string, return all available pvs '''
ap.machines.load(mode)
result = set()
elements = ap.getElements('*')
for element in elements:
pvs = element.pv()
if(len(pvs) > 0):
pv_name = pvs[0].split(':')[0]
result.add(pv_name)
return result
def get_pvs_from_file(filepath):
''' Return a list of pvs from a given file '''
with open(filepath) as f:
contents = f.read().splitlines()
return contents
def write_pvs_to_file(filename, data):
''' Write given pvs to file '''
f = open(filename, 'w')
for element in data:
f.write(element, '\n')
f.close()
|
69a94173a48d04bc9e409278574844ebbc43af8b | dadd/worker/__init__.py | dadd/worker/__init__.py | import os
from functools import partial
import click
from flask import Flask
from dadd import server
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
app.debug = True
if ctx.obj:
app.config.update(ctx.obj)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| from functools import partial
import click
from flask import Flask
from dadd import server
from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if ctx.obj:
app.config.update(ctx.obj)
update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| Allow worker to use APP_SETTINGS_YAML correctly. | Allow worker to use APP_SETTINGS_YAML correctly.
| Python | bsd-3-clause | ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd | - import os
-
from functools import partial
import click
from flask import Flask
from dadd import server
+ from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
- if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
- app.debug = True
-
if ctx.obj:
app.config.update(ctx.obj)
+
+ update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
| Allow worker to use APP_SETTINGS_YAML correctly. | ## Code Before:
import os
from functools import partial
import click
from flask import Flask
from dadd import server
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if os.environ.get('DEBUG') or (ctx.obj and ctx.obj.get('DEBUG')):
app.debug = True
if ctx.obj:
app.config.update(ctx.obj)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
## Instruction:
Allow worker to use APP_SETTINGS_YAML correctly.
## Code After:
from functools import partial
import click
from flask import Flask
from dadd import server
from dadd.master.utils import update_config
app = Flask(__name__)
app.config.from_object('dadd.worker.settings')
import dadd.worker.handlers # noqa
@click.command()
@click.pass_context
def run(ctx):
if ctx.obj:
app.config.update(ctx.obj)
update_config(app)
register = partial(dadd.worker.handlers.register,
app.config['HOST'],
app.config['PORT'])
server.monitor('Dadd_Heartbeat', register, 2)
server.mount(app, '/')
server.run(app.config)
|
a499f5fbe63f03a3c404a28e0c1286af74382e09 | tests/utils.py | tests/utils.py | import os
from django.core.files.base import ContentFile
from imagekit.lib import Image, StringIO
from .models import Photo
import pickle
def get_image_file():
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'lenna-800x600-white-border.jpg')
tmp = StringIO()
tmp.write(open(path, 'r+b').read())
tmp.seek(0)
return tmp
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = get_image_file()
file = ContentFile(img.read())
instance.original_image = file
instance.original_image.save(image_name, file)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = StringIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
| import os
from django.core.files.base import ContentFile
from imagekit.lib import Image, StringIO
from tempfile import NamedTemporaryFile
from .models import Photo
import pickle
def _get_image_file(file_factory):
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'lenna-800x600-white-border.jpg')
tmp = file_factory()
tmp.write(open(path, 'r+b').read())
tmp.seek(0)
return tmp
def get_image_file():
return _get_image_file(StringIO)
def get_named_image_file():
return _get_image_file(NamedTemporaryFile)
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = get_image_file()
file = ContentFile(img.read())
instance.original_image = file
instance.original_image.save(image_name, file)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = StringIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
| Add util for generating named image file | Add util for generating named image file
| Python | bsd-3-clause | FundedByMe/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit | import os
from django.core.files.base import ContentFile
from imagekit.lib import Image, StringIO
+ from tempfile import NamedTemporaryFile
from .models import Photo
import pickle
- def get_image_file():
+ def _get_image_file(file_factory):
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'lenna-800x600-white-border.jpg')
- tmp = StringIO()
+ tmp = file_factory()
tmp.write(open(path, 'r+b').read())
tmp.seek(0)
return tmp
+
+
+ def get_image_file():
+ return _get_image_file(StringIO)
+
+
+ def get_named_image_file():
+ return _get_image_file(NamedTemporaryFile)
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = get_image_file()
file = ContentFile(img.read())
instance.original_image = file
instance.original_image.save(image_name, file)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = StringIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
| Add util for generating named image file | ## Code Before:
import os
from django.core.files.base import ContentFile
from imagekit.lib import Image, StringIO
from .models import Photo
import pickle
def get_image_file():
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'lenna-800x600-white-border.jpg')
tmp = StringIO()
tmp.write(open(path, 'r+b').read())
tmp.seek(0)
return tmp
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = get_image_file()
file = ContentFile(img.read())
instance.original_image = file
instance.original_image.save(image_name, file)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = StringIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
## Instruction:
Add util for generating named image file
## Code After:
import os
from django.core.files.base import ContentFile
from imagekit.lib import Image, StringIO
from tempfile import NamedTemporaryFile
from .models import Photo
import pickle
def _get_image_file(file_factory):
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'lenna-800x600-white-border.jpg')
tmp = file_factory()
tmp.write(open(path, 'r+b').read())
tmp.seek(0)
return tmp
def get_image_file():
return _get_image_file(StringIO)
def get_named_image_file():
return _get_image_file(NamedTemporaryFile)
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = get_image_file()
file = ContentFile(img.read())
instance.original_image = file
instance.original_image.save(image_name, file)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = StringIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
|
8004590503914d9674a0b17f412c8d1836f5e1a1 | testScript.py | testScript.py | from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| Add second author for testing purposes | Add second author for testing purposes
| Python | bsd-3-clause | ElsevierDev/elsapy | from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
- myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
+ myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
+ ##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
+
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| Add second author for testing purposes | ## Code Before:
from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
## Instruction:
Add second author for testing purposes
## Code After:
from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
|