commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 51
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
211b7b28e2d8c7ed0e0f67bea1a1a68b520a53b1 | pagerduty_events_api/pagerduty_service.py | pagerduty_events_api/pagerduty_service.py | from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
payload = {'service_key': self.__service_key,
'event_type': 'trigger',
'description': description}
incident_data = PagerdutyRestClient().post(
self.__append_additional_info_to_payload(payload, additional_params)
)
return PagerdutyIncident(self.__service_key, incident_data['incident_key'])
@staticmethod
def __append_additional_info_to_payload(mandatory_data, additional_data):
return {**additional_data, **mandatory_data}
| from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
incident = PagerdutyIncident(self.__service_key)
incident.trigger(description, additional_params)
return incident
| Use "blank" PD incident instance for triggering through PD service. | Use "blank" PD incident instance for triggering through PD service.
| Python | mit | BlasiusVonSzerencsi/pagerduty-events-api | from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
+ incident = PagerdutyIncident(self.__service_key)
+ incident.trigger(description, additional_params)
- payload = {'service_key': self.__service_key,
- 'event_type': 'trigger',
- 'description': description}
+ return incident
- incident_data = PagerdutyRestClient().post(
- self.__append_additional_info_to_payload(payload, additional_params)
- )
- return PagerdutyIncident(self.__service_key, incident_data['incident_key'])
-
- @staticmethod
- def __append_additional_info_to_payload(mandatory_data, additional_data):
- return {**additional_data, **mandatory_data}
- | Use "blank" PD incident instance for triggering through PD service. | ## Code Before:
from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
payload = {'service_key': self.__service_key,
'event_type': 'trigger',
'description': description}
incident_data = PagerdutyRestClient().post(
self.__append_additional_info_to_payload(payload, additional_params)
)
return PagerdutyIncident(self.__service_key, incident_data['incident_key'])
@staticmethod
def __append_additional_info_to_payload(mandatory_data, additional_data):
return {**additional_data, **mandatory_data}
## Instruction:
Use "blank" PD incident instance for triggering through PD service.
## Code After:
from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
incident = PagerdutyIncident(self.__service_key)
incident.trigger(description, additional_params)
return incident
|
524d5427d54342f26008a5b527140d4158f70edf | tests/test_extension.py | tests/test_extension.py | from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| Clear websocket data to try and fix Travis | Clear websocket data to try and fix Travis
| Python | agpl-3.0 | palfrey/mopidy-tachikoma,palfrey/mopidy-tachikoma | from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
+ get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| Clear websocket data to try and fix Travis | ## Code Before:
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
## Instruction:
Clear websocket data to try and fix Travis
## Code After:
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
|
87d2e511b0fedd2a09610c35337336d443a756a4 | tests/unit/cli/filewatch/test_stat.py | tests/unit/cli/filewatch/test_stat.py | import os
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
assert len(calls) == 1
| import os
import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
| Add polling loop to allow time for callback to be invoked | Add polling loop to allow time for callback to be invoked
| Python | apache-2.0 | awslabs/chalice | import os
+ import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
+ for _ in range(10):
- assert len(calls) == 1
+ if len(calls) == 1:
+ break
+ time.sleep(0.2)
+ else:
+ raise AssertionError("Expected callback to be invoked but was not.")
| Add polling loop to allow time for callback to be invoked | ## Code Before:
import os
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
assert len(calls) == 1
## Instruction:
Add polling loop to allow time for callback to be invoked
## Code After:
import os
import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
|
ce12cd0f56997dc6d33a9e4e7c13df27d05a133b | Python/Tests/TestData/DebuggerProject/ThreadJoin.py | Python/Tests/TestData/DebuggerProject/ThreadJoin.py | from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
from threading import Thread
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
| from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
| Remove redundant import from test script. | Remove redundant import from test script.
| Python | apache-2.0 | zooba/PTVS,zooba/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,huguesv/PTVS,Microsoft/PTVS,int19h/PTVS,zooba/PTVS,int19h/PTVS,int19h/PTVS,huguesv/PTVS,Microsoft/PTVS,int19h/PTVS,Microsoft/PTVS,zooba/PTVS,Microsoft/PTVS,int19h/PTVS,Microsoft/PTVS,zooba/PTVS,huguesv/PTVS,zooba/PTVS,Microsoft/PTVS,huguesv/PTVS | from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
- from threading import Thread
-
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
| Remove redundant import from test script. | ## Code Before:
from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
from threading import Thread
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
## Instruction:
Remove redundant import from test script.
## Code After:
from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
|
e7942afdc1e93aec57e4e02d862a91eab9b5c0cb | trackingtermites/termite.py | trackingtermites/termite.py | from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
| from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y,xoffset,yoffset\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
| Include missing columns in output | Include missing columns in output
| Python | mit | dmrib/trackingtermites | from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
- trail_out.write('label,frame,time,x,y\n')
+ trail_out.write('label,frame,time,x,y,xoffset,yoffset\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
| Include missing columns in output | ## Code Before:
from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
## Instruction:
Include missing columns in output
## Code After:
from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y,xoffset,yoffset\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
|
b0814b95ea854f7b3f0b9db48ae9beee078c2a30 | versions/software/openjdk.py | versions/software/openjdk.py | import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
| import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
| Update OpenJDK version to support both 8 and 9. | Update OpenJDK version to support both 8 and 9.
| Python | mit | mchung94/latest-versions | import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
+ # "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
- def downloadable_version(url):
- """Strip the version out of the Zulu OpenJDK manual download link."""
- # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
- filename = url[url.rfind('/') + 1:]
- jdk_version = get_text_between(filename, '-jdk', '-')
- version, update = jdk_version.rsplit('.', 1)
- return f'1.{version}_{update}'
-
-
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
+ installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
- div = soup.find('div', class_='latest_area')
- if div:
- zip_filename = re.compile('\.zip$')
+ zip_filename = re.compile('\.zip$')
- tag = div.find('a', class_='r-download', href=zip_filename)
+ for tag in soup.find_all('a', class_='r-download', href=zip_filename):
- if tag:
- return downloadable_version(tag.attrs['href'])
+ filename = tag.attrs['href']
+ zulu = get_text_between(filename, 'bin/zulu', '-')
+ jdk = get_text_between(filename, 'jdk', '-')
+ if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
+ return zulu
+ elif installed[0] == '1' and jdk[0] == installed[2]:
+ version, update = jdk.rsplit('.', 1)
+ return f'1.{version}_{update}'
return 'Unknown'
| Update OpenJDK version to support both 8 and 9. | ## Code Before:
import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
## Instruction:
Update OpenJDK version to support both 8 and 9.
## Code After:
import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
|
3b4c645792c1a58cdce3dc25171723e7139d66da | workflows/api/permissions.py | workflows/api/permissions.py | from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
| from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
if request.GET.get('preview', '0') == '1':
if workflow.public:
return True
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
| Return True for preview if workflow public | Return True for preview if workflow public
| Python | mit | xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend | from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
+ if request.GET.get('preview', '0') == '1':
+ if workflow.public:
+ return True
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
| Return True for preview if workflow public | ## Code Before:
from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
## Instruction:
Return True for preview if workflow public
## Code After:
from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
if request.GET.get('preview', '0') == '1':
if workflow.public:
return True
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
|
452ad6f3de797285a50094a4a145714e75204d95 | bake/cmdline.py | bake/cmdline.py |
import api as bake
import sys
# This
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file,'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
|
import api as bake
import sys
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file, 'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
| Make pep8 run mostly cleanly | Make pep8 run mostly cleanly
| Python | mit | AlexSzatmary/bake |
import api as bake
import sys
- # This
+
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
- hin = open(options.file,'r')
+ hin = open(options.file, 'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
- (label, tokens,
+ (label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
- config['label']['pattern'],
+ config['label']['pattern'],
- lines, options.slice_start,
+ lines, options.slice_start,
options.slice_end)
-
+
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
| Make pep8 run mostly cleanly | ## Code Before:
import api as bake
import sys
# This
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file,'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
## Instruction:
Make pep8 run mostly cleanly
## Code After:
import api as bake
import sys
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file, 'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
|
d5cf661b2658d7f9a0f5436444373202e514bf37 | src/psd_tools2/__init__.py | src/psd_tools2/__init__.py | from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
| from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
from .api.composer import compose
| Include compose in the top level | Include compose in the top level
| Python | mit | kmike/psd-tools,psd-tools/psd-tools,kmike/psd-tools | from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
+ from .api.composer import compose
| Include compose in the top level | ## Code Before:
from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
## Instruction:
Include compose in the top level
## Code After:
from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
from .api.composer import compose
|
2fea7b008336e1960efb375c63a4cc14053bc590 | src/wikicurses/__init__.py | src/wikicurses/__init__.py | import pkgutil
from enum import IntEnum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
| import pkgutil
from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
| Create BitEnum class for bitfields | Create BitEnum class for bitfields
| Python | mit | ids1024/wikicurses | import pkgutil
- from enum import IntEnum
+ from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
- class formats(IntEnum):
- i, b, blockquote = (1<<i for i in range(3))
+ class BitEnum(int, Enum):
+ def __new__(cls, *args):
+ value = 1 << len(cls.__members__)
+ return int.__new__(cls, value)
+ formats = BitEnum("formats", "i b blockquote")
+ | Create BitEnum class for bitfields | ## Code Before:
import pkgutil
from enum import IntEnum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
## Instruction:
Create BitEnum class for bitfields
## Code After:
import pkgutil
from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
|
e3a1d4998494143491b49312673ceb84ea98b7f8 | RatS/tmdb/tmdb_ratings_inserter.py | RatS/tmdb/tmdb_ratings_inserter.py | import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/account/{username}/import'.format(
username=self.site.USERNAME
)
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
| import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
| Adjust TMDB import page URL | Adjust TMDB import page URL
| Python | agpl-3.0 | StegSchreck/RatS,StegSchreck/RatS,StegSchreck/RatS | import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
- return 'https://www.themoviedb.org/account/{username}/import'.format(
+ return 'https://www.themoviedb.org/settings/import-list'
- username=self.site.USERNAME
- )
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
| Adjust TMDB import page URL | ## Code Before:
import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/account/{username}/import'.format(
username=self.site.USERNAME
)
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
## Instruction:
Adjust TMDB import page URL
## Code After:
import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
|
2afd2467c16969b10496ae96e17b9dce7911f232 | db.py | db.py | import sqlite3
connection = sqlite3.connect('data.db')
class SavedRoll:
@staticmethod
def save(user, name, args):
pass
@staticmethod
def get(user, name):
pass
@staticmethod
def delete(user, name):
pass
| class SavedRollManager:
"""
Class for managing saved rolls.
Attributes:
connection (sqlite3.Connection): Database connection used by manager
"""
def __init__(self, connection):
"""
Create a SavedRollManager instance.
Args:
connection (sqlite3.Connection): Database connection to use
"""
self.conn = connection
def save(self, user, chat, name, args):
"""
Save a roll to the database.
Args:
user (int): User ID to save roll for
chat (int): Chat ID to save roll for
name: Name of saved roll
args: Arguments to save for roll
"""
pass
def get(self, user, chat, name):
"""
Get a saved roll from the database.
Args:
user (int): User ID to get roll for
chat (int): Chat ID to get roll for
name: Name of saved roll
Returns:
list: List of arguments of saved roll
"""
pass
def delete(self, user, chat, name):
"""
Delete a saved roll from the database.
Args:
user (int): User ID to delete roll from
chat (int): Chat ID to delete roll from
name: Name of saved roll
"""
pass
| Make SavedRollManager less static, also docstrings | Make SavedRollManager less static, also docstrings
| Python | mit | foxscotch/foxrollbot | - import sqlite3
+ class SavedRollManager:
+ """
+ Class for managing saved rolls.
+ Attributes:
+ connection (sqlite3.Connection): Database connection used by manager
+ """
- connection = sqlite3.connect('data.db')
+ def __init__(self, connection):
+ """
+ Create a SavedRollManager instance.
+ Args:
+ connection (sqlite3.Connection): Database connection to use
+ """
+ self.conn = connection
- class SavedRoll:
- @staticmethod
- def save(user, name, args):
+ def save(self, user, chat, name, args):
+ """
+ Save a roll to the database.
+
+ Args:
+ user (int): User ID to save roll for
+ chat (int): Chat ID to save roll for
+ name: Name of saved roll
+ args: Arguments to save for roll
+ """
pass
- @staticmethod
- def get(user, name):
+ def get(self, user, chat, name):
+ """
+ Get a saved roll from the database.
+
+ Args:
+ user (int): User ID to get roll for
+ chat (int): Chat ID to get roll for
+ name: Name of saved roll
+
+ Returns:
+ list: List of arguments of saved roll
+ """
pass
- @staticmethod
- def delete(user, name):
+ def delete(self, user, chat, name):
+ """
+ Delete a saved roll from the database.
+
+ Args:
+ user (int): User ID to delete roll from
+ chat (int): Chat ID to delete roll from
+ name: Name of saved roll
+ """
pass
| Make SavedRollManager less static, also docstrings | ## Code Before:
import sqlite3
connection = sqlite3.connect('data.db')
class SavedRoll:
@staticmethod
def save(user, name, args):
pass
@staticmethod
def get(user, name):
pass
@staticmethod
def delete(user, name):
pass
## Instruction:
Make SavedRollManager less static, also docstrings
## Code After:
class SavedRollManager:
"""
Class for managing saved rolls.
Attributes:
connection (sqlite3.Connection): Database connection used by manager
"""
def __init__(self, connection):
"""
Create a SavedRollManager instance.
Args:
connection (sqlite3.Connection): Database connection to use
"""
self.conn = connection
def save(self, user, chat, name, args):
"""
Save a roll to the database.
Args:
user (int): User ID to save roll for
chat (int): Chat ID to save roll for
name: Name of saved roll
args: Arguments to save for roll
"""
pass
def get(self, user, chat, name):
"""
Get a saved roll from the database.
Args:
user (int): User ID to get roll for
chat (int): Chat ID to get roll for
name: Name of saved roll
Returns:
list: List of arguments of saved roll
"""
pass
def delete(self, user, chat, name):
"""
Delete a saved roll from the database.
Args:
user (int): User ID to delete roll from
chat (int): Chat ID to delete roll from
name: Name of saved roll
"""
pass
|
648c7fb94f92e8ef722af8c9462c9ff65bf643fc | intelmq/bots/collectors/mail/collector_mail_body.py | intelmq/bots/collectors/mail/collector_mail_body.py | from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
report["extra.email_received"] = message.date
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| Insert date when email was received | Insert date when email was received
Sometimes we receive email reports like "this is happening right now" and there is no date/time included. So if we process emails once per hour - we don't have info about event time. Additional field `extra.email_received` in the mail body collector would help. | Python | agpl-3.0 | aaronkaplan/intelmq,aaronkaplan/intelmq,certtools/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq | from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
+ report["extra.email_received"] = message.date
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| Insert date when email was received | ## Code Before:
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
## Instruction:
Insert date when email was received
## Code After:
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
report["extra.email_received"] = message.date
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
|
8286aee8eca008e2e469d49e7a426828e4f6c2bf | bin/s3imageresize.py | bin/s3imageresize.py |
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Upload a file to Amazon S3 and rotate old backups.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('psize', help="Path to the file to upload.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, args.psize)
|
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Resize all images stored in a folder on Amazon S3.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('width', help="Maximum width of the image.")
parser.add_argument('height', help="Maximum height of the image.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, (args.width,args.height))
| Fix parameter descriptions and change size to individual width and height parameters | Fix parameter descriptions and change size to individual width and height parameters
| Python | mit | dirkcuys/s3imageresize |
import argparse
from s3imageresize import resize_image_folder
- parser = argparse.ArgumentParser(description='Upload a file to Amazon S3 and rotate old backups.')
+ parser = argparse.ArgumentParser(description='Resize all images stored in a folder on Amazon S3.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
- parser.add_argument('psize', help="Path to the file to upload.")
+ parser.add_argument('width', help="Maximum width of the image.")
+ parser.add_argument('height', help="Maximum height of the image.")
args = parser.parse_args()
- resize_image_folder(args.bucket, args.prefix, args.psize)
+ resize_image_folder(args.bucket, args.prefix, (args.width,args.height))
| Fix parameter descriptions and change size to individual width and height parameters | ## Code Before:
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Upload a file to Amazon S3 and rotate old backups.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('psize', help="Path to the file to upload.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, args.psize)
## Instruction:
Fix parameter descriptions and change size to individual width and height parameters
## Code After:
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Resize all images stored in a folder on Amazon S3.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('width', help="Maximum width of the image.")
parser.add_argument('height', help="Maximum height of the image.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, (args.width,args.height))
|
945e2def0a106541583907101060a234e6846d27 | sources/bioformats/large_image_source_bioformats/girder_source.py | sources/bioformats/large_image_source_bioformats/girder_source.py |
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
|
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
def mayHaveAdjacentFiles(self, largeImageFile):
# bioformats uses extensions to determine how to open a file, so this
# needs to be set for all file formats.
return True
| Fix reading from hashed file names. | Fix reading from hashed file names.
Bioformats expects file extensions to exist, so flag that we should
always appear as actual, fully-pathed files.
| Python | apache-2.0 | girder/large_image,DigitalSlideArchive/large_image,girder/large_image,girder/large_image,DigitalSlideArchive/large_image,DigitalSlideArchive/large_image |
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
+ def mayHaveAdjacentFiles(self, largeImageFile):
+ # bioformats uses extensions to determine how to open a file, so this
+ # needs to be set for all file formats.
+ return True
+ | Fix reading from hashed file names. | ## Code Before:
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
## Instruction:
Fix reading from hashed file names.
## Code After:
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
def mayHaveAdjacentFiles(self, largeImageFile):
# bioformats uses extensions to determine how to open a file, so this
# needs to be set for all file formats.
return True
|
82f5a5cccb8a7a36adc6f880d3cc1e11b8e596ee | envelope/templatetags/envelope_tags.py | envelope/templatetags/envelope_tags.py |
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
form = context['form']
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
|
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
| Raise a more specific error when form is not passed to the template. | Raise a more specific error when form is not passed to the template.
| Python | mit | r4ts0n/django-envelope,r4ts0n/django-envelope,affan2/django-envelope,affan2/django-envelope,zsiciarz/django-envelope,zsiciarz/django-envelope |
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
+ try:
- form = context['form']
+ form = context['form']
+ except KeyError:
+ raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
| Raise a more specific error when form is not passed to the template. | ## Code Before:
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
form = context['form']
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
## Instruction:
Raise a more specific error when form is not passed to the template.
## Code After:
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
|
ea324a30823fbf18c72dd639b9c43d3ecb57b034 | txircd/modules/extra/services/account_extban.py | txircd/modules/extra/services/account_extban.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
if fnmatchcase(userAccount, mask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
lowerMask = ircLower(mask)
if fnmatchcase(userAccount, lowerMask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | Fix matching users against R: extbans | Fix matching users against R: extbans
| Python | bsd-3-clause | Heufneutje/txircd | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
+ lowerMask = ircLower(mask)
- if fnmatchcase(userAccount, mask):
+ if fnmatchcase(userAccount, lowerMask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | Fix matching users against R: extbans | ## Code Before:
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
if fnmatchcase(userAccount, mask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban()
## Instruction:
Fix matching users against R: extbans
## Code After:
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
lowerMask = ircLower(mask)
if fnmatchcase(userAccount, lowerMask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() |
d649e0ff501604d9b8b24bd69a7545528332c05c | polling_stations/apps/pollingstations/models.py | polling_stations/apps/pollingstations/models.py | from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
return self.name
| from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
name = self.name or "Unnamed"
return "%s (%s)" % (name, self.council)
| Fix unicode for unknown names | Fix unicode for unknown names
| Python | bsd-3-clause | andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
- return self.name
+ name = self.name or "Unnamed"
+ return "%s (%s)" % (name, self.council)
| Fix unicode for unknown names | ## Code Before:
from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
return self.name
## Instruction:
Fix unicode for unknown names
## Code After:
from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
name = self.name or "Unnamed"
return "%s (%s)" % (name, self.council)
|
c5996b4a933f2d27251e8d85f3392b715e130759 | mapentity/templatetags/convert_tags.py | mapentity/templatetags/convert_tags.py | import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype | Support conversion format as extension, instead of mimetype
| Python | bsd-3-clause | Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,Anaethelion/django-mapentity | import urllib
+ from mimetypes import types_map
+
from django import template
from django.conf import settings
+
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
+ if '/' not in format:
+ extension = '.' + format if not format.startswith('.') else format
+ format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype | ## Code Before:
import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
## Instruction:
Support conversion format as extension, instead of mimetype
## Code After:
import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
5885c053e9bf20c7b91ebc2c8aebd1dfb9c78a46 | avalonstar/components/broadcasts/models.py | avalonstar/components/broadcasts/models.py | from django.db import models
from components.games.models import Game
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
def __unicode__(self):
return 'Episode %s' % self.number
| from django.db import models
from components.games.models import Game
class Series(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return '%s' % self.name
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
series = models.ForeignKey(Series, related_name='broadcasts')
# ...
def __unicode__(self):
return 'Episode %s' % self.number
| Add the concept of series (like Whatever Wednesday). | Add the concept of series (like Whatever Wednesday).
| Python | apache-2.0 | bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv | from django.db import models
from components.games.models import Game
+
+
+ class Series(models.Model):
+ name = models.CharField(max_length=200)
+
+ def __unicode__(self):
+ return '%s' % self.name
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
+ series = models.ForeignKey(Series, related_name='broadcasts')
+ # ...
def __unicode__(self):
return 'Episode %s' % self.number
| Add the concept of series (like Whatever Wednesday). | ## Code Before:
from django.db import models
from components.games.models import Game
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
def __unicode__(self):
return 'Episode %s' % self.number
## Instruction:
Add the concept of series (like Whatever Wednesday).
## Code After:
from django.db import models
from components.games.models import Game
class Series(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return '%s' % self.name
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
series = models.ForeignKey(Series, related_name='broadcasts')
# ...
def __unicode__(self):
return 'Episode %s' % self.number
|
c7f6e0c2e9c5be112a7576c3d2a1fc8a79eb9f18 | brasilcomvc/settings/staticfiles.py | brasilcomvc/settings/staticfiles.py | import os
import sys
# Disable django-pipeline when in test mode
PIPELINE_ENABLED = 'test' not in sys.argv
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| import os
import sys
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
if 'test' in sys.argv:
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| Fix django-pipeline configuration for development/test | fix(set): Fix django-pipeline configuration for development/test
| Python | apache-2.0 | brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc | import os
import sys
-
- # Disable django-pipeline when in test mode
- PIPELINE_ENABLED = 'test' not in sys.argv
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
+
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
+ if 'test' in sys.argv:
+ STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
+
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| Fix django-pipeline configuration for development/test | ## Code Before:
import os
import sys
# Disable django-pipeline when in test mode
PIPELINE_ENABLED = 'test' not in sys.argv
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
## Instruction:
Fix django-pipeline configuration for development/test
## Code After:
import os
import sys
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
if 'test' in sys.argv:
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
|
a5274f0628bec7a77fc2722ced723c4f35f3fb4b | microcosm_flask/fields/query_string_list.py | microcosm_flask/fields/query_string_list.py | from marshmallow.fields import List, ValidationError
class SelfSerializableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
| from marshmallow.fields import List, ValidationError
class PrintableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
| Change the name of SelfSerializableList to PrintableList | Change the name of SelfSerializableList to PrintableList
| Python | apache-2.0 | globality-corp/microcosm-flask,globality-corp/microcosm-flask | from marshmallow.fields import List, ValidationError
- class SelfSerializableList(list):
+ class PrintableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
- attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param)
+ attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
| Change the name of SelfSerializableList to PrintableList | ## Code Before:
from marshmallow.fields import List, ValidationError
class SelfSerializableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
## Instruction:
Change the name of SelfSerializableList to PrintableList
## Code After:
from marshmallow.fields import List, ValidationError
class PrintableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
|
faa74af66ff0542c5a08d85caf2e2b897506b1d0 | custom/ewsghana/handlers/help.py | custom/ewsghana/handlers/help.py | from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = [c.code for c in SQLProduct.by_domain(self.domain).order_by('code')]
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
| from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = SQLProduct.by_domain(self.domain).order_by('code').values_list('code', flat=True)
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
| Use values_list instead of iterating over | Use values_list instead of iterating over
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
- codes = [c.code for c in SQLProduct.by_domain(self.domain).order_by('code')]
+ codes = SQLProduct.by_domain(self.domain).order_by('code').values_list('code', flat=True)
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
| Use values_list instead of iterating over | ## Code Before:
from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = [c.code for c in SQLProduct.by_domain(self.domain).order_by('code')]
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
## Instruction:
Use values_list instead of iterating over
## Code After:
from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = SQLProduct.by_domain(self.domain).order_by('code').values_list('code', flat=True)
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
|
b0bed22c3ccafe596cf715f2be56c3261b4a6853 | reporting_scripts/course_completers.py | reporting_scripts/course_completers.py | '''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['username'], document['name'], document['grade']])
output = CSV(result, ['Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
| '''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['id'],user_document['username'], document['name'], document['grade']])
output = CSV(result, ['User ID','Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
| Update to include User ID in result | Update to include User ID in result
| Python | mit | McGillX/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research | '''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
- result.append([user_document['username'], document['name'], document['grade']])
+ result.append([user_document['id'],user_document['username'], document['name'], document['grade']])
- output = CSV(result, ['Username', 'Name', 'Grade'], output_file='course_completers.csv')
+ output = CSV(result, ['User ID','Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
| Update to include User ID in result | ## Code Before:
'''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['username'], document['name'], document['grade']])
output = CSV(result, ['Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
## Instruction:
Update to include User ID in result
## Code After:
'''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['id'],user_document['username'], document['name'], document['grade']])
output = CSV(result, ['User ID','Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
|
fd9c73fc65a7234732ed55a7ae89365aec6cf123 | behave_django/runner.py | behave_django/runner.py | from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(*args, **kwargs):
pass
def teardown_databases(*args, **kwargs):
pass
| from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(self, **kwargs):
pass
def teardown_databases(self, old_config, **kwargs):
pass
| Fix Landscape complaint "Method has no argument" | Fix Landscape complaint "Method has no argument"
| Python | mit | bittner/behave-django,behave/behave-django,behave/behave-django,bittner/behave-django | from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
- def setup_databases(*args, **kwargs):
+ def setup_databases(self, **kwargs):
pass
- def teardown_databases(*args, **kwargs):
+ def teardown_databases(self, old_config, **kwargs):
pass
| Fix Landscape complaint "Method has no argument" | ## Code Before:
from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(*args, **kwargs):
pass
def teardown_databases(*args, **kwargs):
pass
## Instruction:
Fix Landscape complaint "Method has no argument"
## Code After:
from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(self, **kwargs):
pass
def teardown_databases(self, old_config, **kwargs):
pass
|
dfc7c7ae72b91f3bc7724da6b0d8071b3e9253b7 | altair/vegalite/v2/examples/us_state_capitals.py | altair/vegalite/v2/examples/us_state_capitals.py |
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text().encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points
|
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
| Add points for capital locations> | Add points for capital locations>
| Python | bsd-3-clause | ellisonbg/altair,jakevdp/altair,altair-viz/altair |
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
- points = alt.Chart(capitals).mark_text().encode(
+ points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
- chart = background + points
+ chart = background + points + points.mark_point(color='black')
| Add points for capital locations> | ## Code Before:
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text().encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points
## Instruction:
Add points for capital locations>
## Code After:
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
|
80a940305765a22f96b0c0af0b0b46f1e3f5c377 | tests/unit/models/listing/test_generator.py | tests/unit/models/listing/test_generator.py | """Test praw.models.front."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
| """Test praw.models.listing.generator."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
| Fix docstring typo in ListingGenerator unit tests | Fix docstring typo in ListingGenerator unit tests
| Python | bsd-2-clause | praw-dev/praw,praw-dev/praw | - """Test praw.models.front."""
+ """Test praw.models.listing.generator."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
| Fix docstring typo in ListingGenerator unit tests | ## Code Before:
"""Test praw.models.front."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
## Instruction:
Fix docstring typo in ListingGenerator unit tests
## Code After:
"""Test praw.models.listing.generator."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
|
e5a94d2902a66d55be62b92e35ac90ac7aed7991 | javascript/navigator/__init__.py | javascript/navigator/__init__.py | __author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
class Navigator(v8.JSClass):
def __init__(self, runtime):
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
# if 'location' in runtime.manifest.get('capabilities', []):
if True:
self.geolocation = Geolocation(runtime)
| __author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
from javascript.exceptions import JSRuntimeException
class Navigator(v8.JSClass):
def __init__(self, runtime):
self._runtime = runtime
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
if 'location' in runtime.manifest.get('capabilities', []):
self.geolocation = Geolocation(runtime)
def __getattr__(self, item):
# __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation
# does not exist.
# This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`),
# but throw an informative exception if it's accessed.
if item == 'geolocation':
raise JSRuntimeException(
self._runtime,
"You must add 'location' to the appinfo.json capabilities array to access geolocation."
)
else:
raise AttributeError
| Implement location restriction more thoroughly. | Implement location restriction more thoroughly.
| Python | mit | youtux/pypkjs,pebble/pypkjs | __author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
-
+ from javascript.exceptions import JSRuntimeException
class Navigator(v8.JSClass):
def __init__(self, runtime):
+ self._runtime = runtime
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
- # if 'location' in runtime.manifest.get('capabilities', []):
+ if 'location' in runtime.manifest.get('capabilities', []):
- if True:
self.geolocation = Geolocation(runtime)
+ def __getattr__(self, item):
+ # __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation
+ # does not exist.
+ # This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`),
+ # but throw an informative exception if it's accessed.
+ if item == 'geolocation':
+ raise JSRuntimeException(
+ self._runtime,
+ "You must add 'location' to the appinfo.json capabilities array to access geolocation."
+ )
+ else:
+ raise AttributeError
+
+ | Implement location restriction more thoroughly. | ## Code Before:
__author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
class Navigator(v8.JSClass):
def __init__(self, runtime):
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
# if 'location' in runtime.manifest.get('capabilities', []):
if True:
self.geolocation = Geolocation(runtime)
## Instruction:
Implement location restriction more thoroughly.
## Code After:
__author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
from javascript.exceptions import JSRuntimeException
class Navigator(v8.JSClass):
def __init__(self, runtime):
self._runtime = runtime
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
if 'location' in runtime.manifest.get('capabilities', []):
self.geolocation = Geolocation(runtime)
def __getattr__(self, item):
# __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation
# does not exist.
# This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`),
# but throw an informative exception if it's accessed.
if item == 'geolocation':
raise JSRuntimeException(
self._runtime,
"You must add 'location' to the appinfo.json capabilities array to access geolocation."
)
else:
raise AttributeError
|
70847e9d88f086d52e167629666aebe5137c7a2e | debileweb/blueprints/forms.py | debileweb/blueprints/forms.py | from wtforms import TextField, BooleanField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
|
from wtforms import TextField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
| Add license + remove useless declaration | Add license + remove useless declaration
| Python | mit | opencollab/debile-web,opencollab/debile-web,opencollab/debile-web | +
- from wtforms import TextField, BooleanField, Form
+ from wtforms import TextField, Form
from wtforms.validators import Required
+
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
| Add license + remove useless declaration | ## Code Before:
from wtforms import TextField, BooleanField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
## Instruction:
Add license + remove useless declaration
## Code After:
from wtforms import TextField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
|
13a2ea421b761b9009fb7e1328e54cf0ae5cc54f | gapipy/resources/booking/agency.py | gapipy/resources/booking/agency.py | from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [('documents', AgencyDocument)]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from ...models.base import BaseModel
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class AgencyEmail(BaseModel):
_as_is_fields = ['type', 'address']
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude', 'transactional_email']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [
('documents', AgencyDocument),
('emails', AgencyEmail),
]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| Add new Agency resource fields | Add new Agency resource fields
| Python | mit | gadventures/gapipy | from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
+ from ...models.base import BaseModel
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
+
+
+ class AgencyEmail(BaseModel):
+ _as_is_fields = ['type', 'address']
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
- _as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude']
+ _as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude', 'transactional_email']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
- _model_collection_fields = [('documents', AgencyDocument)]
+ _model_collection_fields = [
+ ('documents', AgencyDocument),
+ ('emails', AgencyEmail),
+ ]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| Add new Agency resource fields | ## Code Before:
from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [('documents', AgencyDocument)]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
## Instruction:
Add new Agency resource fields
## Code After:
from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from ...models.base import BaseModel
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class AgencyEmail(BaseModel):
_as_is_fields = ['type', 'address']
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude', 'transactional_email']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [
('documents', AgencyDocument),
('emails', AgencyEmail),
]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
|
3b9508ff6546974ffb2aee8fe38aae15799aafc5 | cellcounter/accounts/urls.py | cellcounter/accounts/urls.py | from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from .views import RegistrationView, PasswordChangeView, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': '/',
},
name='reset-request'),
url('^password/reset/confirm/(?P<uidb64>\d+)/(?P<token>[\d\w-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': password_reset_done,
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) | from django.conf.urls import patterns, url
from .views import RegistrationView, PasswordChangeView, password_reset_sent, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': 'password-reset-sent',
},
name='password-reset'),
url('^password/reset/sent/$', password_reset_sent, name='password-reset-sent'),
url('^password/reset/done/$', password_reset_done, name='password-reset-done'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': 'password-reset-done',
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) | Add correct reset-sent and reset-done redirect views, tidy regex | Add correct reset-sent and reset-done redirect views, tidy regex
| Python | mit | haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter | from django.conf.urls import patterns, url
- from django.core.urlresolvers import reverse
- from .views import RegistrationView, PasswordChangeView, password_reset_done
+ from .views import RegistrationView, PasswordChangeView, password_reset_sent, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
- 'post_reset_redirect': '/',
+ 'post_reset_redirect': 'password-reset-sent',
},
- name='reset-request'),
+ name='password-reset'),
+ url('^password/reset/sent/$', password_reset_sent, name='password-reset-sent'),
+ url('^password/reset/done/$', password_reset_done, name='password-reset-done'),
- url('^password/reset/confirm/(?P<uidb64>\d+)/(?P<token>[\d\w-]+)/$',
+ url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
- 'post_reset_redirect': password_reset_done,
+ 'post_reset_redirect': 'password-reset-done',
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) | Add correct reset-sent and reset-done redirect views, tidy regex | ## Code Before:
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from .views import RegistrationView, PasswordChangeView, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': '/',
},
name='reset-request'),
url('^password/reset/confirm/(?P<uidb64>\d+)/(?P<token>[\d\w-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': password_reset_done,
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
)
## Instruction:
Add correct reset-sent and reset-done redirect views, tidy regex
## Code After:
from django.conf.urls import patterns, url
from .views import RegistrationView, PasswordChangeView, password_reset_sent, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': 'password-reset-sent',
},
name='password-reset'),
url('^password/reset/sent/$', password_reset_sent, name='password-reset-sent'),
url('^password/reset/done/$', password_reset_done, name='password-reset-done'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': 'password-reset-done',
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) |
2f0627c1e5c087cf5b712e846b4f687259342063 | credentials/management/commands/import_sshkeypair.py | credentials/management/commands/import_sshkeypair.py | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a label, public keyfile and private keyfile")
label, public_key, private_key = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a public keyfile, private keyfile and label")
public_key, private_key, name = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| Change the help and assignments to match. | Change the help and assignments to match.
| Python | mit | caio1982/capomastro,caio1982/capomastro,timrchavez/capomastro,timrchavez/capomastro,caio1982/capomastro | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
- "must provide a label, public keyfile and private keyfile")
+ "must provide a public keyfile, private keyfile and label")
- label, public_key, private_key = args
+ public_key, private_key, name = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| Change the help and assignments to match. | ## Code Before:
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a label, public keyfile and private keyfile")
label, public_key, private_key = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
## Instruction:
Change the help and assignments to match.
## Code After:
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a public keyfile, private keyfile and label")
public_key, private_key, name = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
|
58078b1d4eb64c7104715352fc11bf7abffd48a4 | feincms/management/commands/update_rsscontent.py | feincms/management/commands/update_rsscontent.py | from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
| from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
# find all concrete content types of RSSContent
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
| Add small explaining note to the RSSContent updating management command | Add small explaining note to the RSSContent updating management command
| Python | bsd-3-clause | hgrimelid/feincms,nickburlett/feincms,feincms/feincms,nickburlett/feincms,hgrimelid/feincms,joshuajonah/feincms,pjdelport/feincms,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,matthiask/feincms2-content,matthiask/feincms2-content,feincms/feincms,nickburlett/feincms,michaelkuty/feincms,hgrimelid/feincms,matthiask/django-content-editor,michaelkuty/feincms,feincms/feincms,matthiask/feincms2-content,michaelkuty/feincms,pjdelport/feincms,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,joshuajonah/feincms,mjl/feincms | from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
+ # find all concrete content types of RSSContent
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
| Add small explaining note to the RSSContent updating management command | ## Code Before:
from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
## Instruction:
Add small explaining note to the RSSContent updating management command
## Code After:
from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
# find all concrete content types of RSSContent
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
|
308bc2add0cc9d2d8af1d1851d71caa284094f62 | helusers/tests/test_oidc_api_token_authentication.py | helusers/tests/test_oidc_api_token_authentication.py | import json
import time
import uuid
import pytest
from jose import jwt
from helusers.oidc import ApiTokenAuthentication
from .keys import rsa_key
ISSUER = "test_issuer"
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf):
sut = _TestableApiTokenAuthentication()
unix_timestamp_now = int(time.time())
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
jwt_data = {
"iss": ISSUER,
"aud": "test_audience",
"iat": unix_timestamp_now - 10,
"exp": unix_timestamp_now + 1000,
"sub": str(user_uuid),
}
encoded_jwt = jwt.encode(
jwt_data, key=rsa_key.private_key_pem, algorithm=rsa_key.jose_algorithm
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
| import json
import uuid
import pytest
from helusers.oidc import ApiTokenAuthentication
from .conftest import encoded_jwt_factory, ISSUER1
from .keys import rsa_key
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER1,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf, unix_timestamp_now):
sut = _TestableApiTokenAuthentication()
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
encoded_jwt = encoded_jwt_factory(
iss=ISSUER1,
aud="test_audience",
iat=unix_timestamp_now - 10,
exp=unix_timestamp_now + 1000,
sub=str(user_uuid),
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
| Use common test helpers in a test | Use common test helpers in a test
| Python | bsd-2-clause | City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers | import json
- import time
import uuid
import pytest
- from jose import jwt
from helusers.oidc import ApiTokenAuthentication
+ from .conftest import encoded_jwt_factory, ISSUER1
from .keys import rsa_key
-
- ISSUER = "test_issuer"
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
- "issuer": ISSUER,
+ "issuer": ISSUER1,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
- def test_valid_jwt_is_accepted(rf):
+ def test_valid_jwt_is_accepted(rf, unix_timestamp_now):
sut = _TestableApiTokenAuthentication()
- unix_timestamp_now = int(time.time())
+ user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
+ encoded_jwt = encoded_jwt_factory(
- user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
- jwt_data = {
- "iss": ISSUER,
+ iss=ISSUER1,
- "aud": "test_audience",
+ aud="test_audience",
- "iat": unix_timestamp_now - 10,
+ iat=unix_timestamp_now - 10,
- "exp": unix_timestamp_now + 1000,
+ exp=unix_timestamp_now + 1000,
- "sub": str(user_uuid),
+ sub=str(user_uuid),
- }
-
- encoded_jwt = jwt.encode(
- jwt_data, key=rsa_key.private_key_pem, algorithm=rsa_key.jose_algorithm
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
| Use common test helpers in a test | ## Code Before:
import json
import time
import uuid
import pytest
from jose import jwt
from helusers.oidc import ApiTokenAuthentication
from .keys import rsa_key
ISSUER = "test_issuer"
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf):
sut = _TestableApiTokenAuthentication()
unix_timestamp_now = int(time.time())
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
jwt_data = {
"iss": ISSUER,
"aud": "test_audience",
"iat": unix_timestamp_now - 10,
"exp": unix_timestamp_now + 1000,
"sub": str(user_uuid),
}
encoded_jwt = jwt.encode(
jwt_data, key=rsa_key.private_key_pem, algorithm=rsa_key.jose_algorithm
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
## Instruction:
Use common test helpers in a test
## Code After:
import json
import uuid
import pytest
from helusers.oidc import ApiTokenAuthentication
from .conftest import encoded_jwt_factory, ISSUER1
from .keys import rsa_key
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER1,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf, unix_timestamp_now):
sut = _TestableApiTokenAuthentication()
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
encoded_jwt = encoded_jwt_factory(
iss=ISSUER1,
aud="test_audience",
iat=unix_timestamp_now - 10,
exp=unix_timestamp_now + 1000,
sub=str(user_uuid),
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
|
44f1e6ec95305bd7b4d69bbcdfb386f5ca958bdc | imagedownloader/stations/tests/units/test_devices.py | imagedownloader/stations/tests/units/test_devices.py | from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestProducts(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + self.device.product.name + ")") | from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestDevices(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + str(self.device.product) + ")") | Correct the name of the devices' test case to TestDevices (copy&paste bug). | stations: Correct the name of the devices' test case to TestDevices (copy&paste bug).
| Python | mit | gersolar/solar_radiation_model,ahMarrone/solar_radiation_model,scottlittle/solar_radiation_model | from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
- class TestProducts(TestCase):
+ class TestDevices(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
- self.assertEquals(str(self.device), self.device.serial_number + " (" + self.device.product.name + ")")
+ self.assertEquals(str(self.device), self.device.serial_number + " (" + str(self.device.product) + ")") | Correct the name of the devices' test case to TestDevices (copy&paste bug). | ## Code Before:
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestProducts(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + self.device.product.name + ")")
## Instruction:
Correct the name of the devices' test case to TestDevices (copy&paste bug).
## Code After:
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestDevices(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + str(self.device.product) + ")") |
2d698b1df6da2d5a0b3697891744d3c05e99cb95 | sympy/core/tests/test_compatibility.py | sympy/core/tests/test_compatibility.py | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
| from sympy.core.compatibility import default_sort_key, as_int, ordered, iterable
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_iterable():
assert iterable(0) == False
assert iterable(1) == False
assert iterable(None) == False
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
| Test some basic properties of iterable() | Test some basic properties of iterable()
| Python | bsd-3-clause | Gadal/sympy,jerli/sympy,souravsingh/sympy,Curious72/sympy,wanglongqi/sympy,chaffra/sympy,atsao72/sympy,sahilshekhawat/sympy,moble/sympy,skidzo/sympy,madan96/sympy,atreyv/sympy,lindsayad/sympy,skidzo/sympy,asm666/sympy,beni55/sympy,asm666/sympy,oliverlee/sympy,saurabhjn76/sympy,grevutiu-gabriel/sympy,drufat/sympy,postvakje/sympy,pandeyadarsh/sympy,wanglongqi/sympy,mafiya69/sympy,kaushik94/sympy,Davidjohnwilson/sympy,grevutiu-gabriel/sympy,hargup/sympy,sunny94/temp,postvakje/sympy,moble/sympy,souravsingh/sympy,bukzor/sympy,Shaswat27/sympy,rahuldan/sympy,jamesblunt/sympy,jaimahajan1997/sympy,maniteja123/sympy,VaibhavAgarwalVA/sympy,kumarkrishna/sympy,yukoba/sympy,ChristinaZografou/sympy,asm666/sympy,sahmed95/sympy,Mitchkoens/sympy,diofant/diofant,abloomston/sympy,abloomston/sympy,pbrady/sympy,AkademieOlympia/sympy,Titan-C/sympy,lindsayad/sympy,ahhda/sympy,debugger22/sympy,garvitr/sympy,abhiii5459/sympy,ga7g08/sympy,emon10005/sympy,sunny94/temp,sampadsaha5/sympy,atreyv/sympy,cswiercz/sympy,Arafatk/sympy,farhaanbukhsh/sympy,kevalds51/sympy,atreyv/sympy,jamesblunt/sympy,kaushik94/sympy,AunShiLord/sympy,yukoba/sympy,meghana1995/sympy,maniteja123/sympy,shipci/sympy,jerli/sympy,bukzor/sympy,rahuldan/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,dqnykamp/sympy,cswiercz/sympy,yukoba/sympy,atsao72/sympy,mcdaniel67/sympy,cswiercz/sympy,aktech/sympy,bukzor/sympy,farhaanbukhsh/sympy,iamutkarshtiwari/sympy,kumarkrishna/sympy,ChristinaZografou/sympy,yashsharan/sympy,Curious72/sympy,mcdaniel67/sympy,cccfran/sympy,ahhda/sympy,souravsingh/sympy,Shaswat27/sympy,Designist/sympy,pandeyadarsh/sympy,kaushik94/sympy,toolforger/sympy,ChristinaZografou/sympy,Gadal/sympy,iamutkarshtiwari/sympy,emon10005/sympy,vipulroxx/sympy,Sumith1896/sympy,Mitchkoens/sympy,Davidjohnwilson/sympy,liangjiaxing/sympy,garvitr/sympy,wyom/sympy,pandeyadarsh/sympy,oliverlee/sympy,Titan-C/sympy,Designist/sympy,shikil/sympy,Sumith1896/sympy,Mitchkoens/sympy,lindsayad/sympy,VaibhavAgarwalVA/sympy,yashsharan/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,saurabhjn76/sympy,ga7g08/sympy,kevalds51/sympy,Vishluck/sympy,debugger22/sympy,shikil/sympy,rahuldan/sympy,kaichogami/sympy,Titan-C/sympy,hargup/sympy,kumarkrishna/sympy,Curious72/sympy,pbrady/sympy,kaichogami/sympy,yashsharan/sympy,Shaswat27/sympy,sahmed95/sympy,wyom/sympy,postvakje/sympy,kevalds51/sympy,chaffra/sympy,AunShiLord/sympy,shipci/sympy,jbbskinny/sympy,mcdaniel67/sympy,abhiii5459/sympy,madan96/sympy,VaibhavAgarwalVA/sympy,maniteja123/sympy,moble/sympy,kaichogami/sympy,debugger22/sympy,drufat/sympy,Gadal/sympy,wyom/sympy,Arafatk/sympy,shikil/sympy,sampadsaha5/sympy,dqnykamp/sympy,MechCoder/sympy,sampadsaha5/sympy,toolforger/sympy,Sumith1896/sympy,MechCoder/sympy,aktech/sympy,Arafatk/sympy,MechCoder/sympy,Vishluck/sympy,atsao72/sympy,vipulroxx/sympy,jbbskinny/sympy,skirpichev/omg,sahmed95/sympy,hargup/sympy,abloomston/sympy,AkademieOlympia/sympy,pbrady/sympy,saurabhjn76/sympy,oliverlee/sympy,madan96/sympy,shipci/sympy,MridulS/sympy,sahilshekhawat/sympy,ahhda/sympy,liangjiaxing/sympy,toolforger/sympy,chaffra/sympy,sunny94/temp,mafiya69/sympy,Designist/sympy,Vishluck/sympy,meghana1995/sympy,sahilshekhawat/sympy,AunShiLord/sympy,dqnykamp/sympy,mafiya69/sympy,Davidjohnwilson/sympy,jbbskinny/sympy,cccfran/sympy,abhiii5459/sympy,garvitr/sympy,MridulS/sympy,vipulroxx/sympy,wanglongqi/sympy,jaimahajan1997/sympy,beni55/sympy,emon10005/sympy,aktech/sympy,MridulS/sympy,jamesblunt/sympy,meghana1995/sympy,ga7g08/sympy,jerli/sympy,drufat/sympy,skidzo/sympy,beni55/sympy,cccfran/sympy,grevutiu-gabriel/sympy,jaimahajan1997/sympy | - from sympy.core.compatibility import default_sort_key, as_int, ordered
+ from sympy.core.compatibility import default_sort_key, as_int, ordered, iterable
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
+ def test_iterable():
+ assert iterable(0) == False
+ assert iterable(1) == False
+ assert iterable(None) == False
+
+
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
| Test some basic properties of iterable() | ## Code Before:
from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
## Instruction:
Test some basic properties of iterable()
## Code After:
from sympy.core.compatibility import default_sort_key, as_int, ordered, iterable
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_iterable():
assert iterable(0) == False
assert iterable(1) == False
assert iterable(None) == False
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
|
e632fa3e12d3627abaf26f41a9f0483aaea24adf | imager/ImagerProfile/tests.py | imager/ImagerProfile/tests.py | from django.test import TestCase
import factory
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'imagerprofile.ImagerProfile'
django_get_or_create = ('username',)
username = 'John'
| from django.test import TestCase
import factory
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'imagerprofile.User'
django_get_or_create = ('username',)
username = factory.Sequence(lambda n: "Agent %03d" % n)
| Change test UserFactory model to point to User | Change test UserFactory model to point to User
| Python | mit | nbeck90/django-imager,nbeck90/django-imager | from django.test import TestCase
import factory
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
- model = 'imagerprofile.ImagerProfile'
+ model = 'imagerprofile.User'
django_get_or_create = ('username',)
- username = 'John'
+ username = factory.Sequence(lambda n: "Agent %03d" % n)
| Change test UserFactory model to point to User | ## Code Before:
from django.test import TestCase
import factory
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'imagerprofile.ImagerProfile'
django_get_or_create = ('username',)
username = 'John'
## Instruction:
Change test UserFactory model to point to User
## Code After:
from django.test import TestCase
import factory
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'imagerprofile.User'
django_get_or_create = ('username',)
username = factory.Sequence(lambda n: "Agent %03d" % n)
|
a0e8c92a9d12846c8cfe6819ea26d1e08dd4098a | example/models.py | example/models.py | import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=512)
filefield = i18n.LocalizedFileField(null=True, upload_to='files')
imagefield = i18n.LocalizedImageField(null=True, upload_to='images')
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField()
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField()
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2)
integerfield = i18n.LocalizedIntegerField()
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
| from django.db import models
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
untranslated_charfield = models.CharField(max_length=50, blank=True)
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=500, blank=True)
filefield = i18n.LocalizedFileField(null=True, upload_to='files', blank=True)
imagefield = i18n.LocalizedImageField(null=True, upload_to='images', blank=True)
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField(blank=True, null=True)
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField(null=True, blank=True)
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2, null=True,
blank=True)
integerfield = i18n.LocalizedIntegerField(null=True, blank=True)
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
| Make fields in example app non required | Make fields in example app non required
| Python | bsd-3-clause | jonasundderwolf/django-localizedfields,jonasundderwolf/django-localizedfields | + from django.db import models
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
-
+ untranslated_charfield = models.CharField(max_length=50, blank=True)
charfield = i18n.LocalizedCharField(max_length=50)
- textfield = i18n.LocalizedTextField(max_length=512)
+ textfield = i18n.LocalizedTextField(max_length=500, blank=True)
- filefield = i18n.LocalizedFileField(null=True, upload_to='files')
+ filefield = i18n.LocalizedFileField(null=True, upload_to='files', blank=True)
- imagefield = i18n.LocalizedImageField(null=True, upload_to='images')
+ imagefield = i18n.LocalizedImageField(null=True, upload_to='images', blank=True)
booleanfield = i18n.LocalizedBooleanField()
- datefield = i18n.LocalizedDateField()
+ datefield = i18n.LocalizedDateField(blank=True, null=True)
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
- urlfied = i18n.LocalizedURLField()
+ urlfied = i18n.LocalizedURLField(null=True, blank=True)
- decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2)
+ decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2, null=True,
+ blank=True)
- integerfield = i18n.LocalizedIntegerField()
+ integerfield = i18n.LocalizedIntegerField(null=True, blank=True)
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
| Make fields in example app non required | ## Code Before:
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=512)
filefield = i18n.LocalizedFileField(null=True, upload_to='files')
imagefield = i18n.LocalizedImageField(null=True, upload_to='images')
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField()
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField()
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2)
integerfield = i18n.LocalizedIntegerField()
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
## Instruction:
Make fields in example app non required
## Code After:
from django.db import models
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
untranslated_charfield = models.CharField(max_length=50, blank=True)
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=500, blank=True)
filefield = i18n.LocalizedFileField(null=True, upload_to='files', blank=True)
imagefield = i18n.LocalizedImageField(null=True, upload_to='images', blank=True)
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField(blank=True, null=True)
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField(null=True, blank=True)
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2, null=True,
blank=True)
integerfield = i18n.LocalizedIntegerField(null=True, blank=True)
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
|
d93014618636ba23ebfd99c466072e8b4c265a42 | wikiwhere/plot_data_generation/count_generation.py | wikiwhere/plot_data_generation/count_generation.py | '''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
for label in feature_counts:
dict_for_label = {}
dict_for_label["label"] = label
dict_for_label["count"] = feature_counts[label]
feature_count_array.append(dict_for_label)
return feature_count_array | '''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
import operator
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
sorted_feature_counts = sorted(feature_counts.items(), key=operator.itemgetter(1),reverse=True)
for feature_count_tuple in sorted_feature_counts:
dict_for_label = {}
dict_for_label["label"] = feature_count_tuple[0]
dict_for_label["count"] = feature_count_tuple[1]
feature_count_array.append(dict_for_label)
return feature_count_array
| Add reverse sorting of count_array | Add reverse sorting of count_array | Python | mit | mkrnr/wikiwhere | '''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
+ import operator
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
-
- for label in feature_counts:
+
+ sorted_feature_counts = sorted(feature_counts.items(), key=operator.itemgetter(1),reverse=True)
+
+ for feature_count_tuple in sorted_feature_counts:
dict_for_label = {}
- dict_for_label["label"] = label
+ dict_for_label["label"] = feature_count_tuple[0]
- dict_for_label["count"] = feature_counts[label]
+ dict_for_label["count"] = feature_count_tuple[1]
feature_count_array.append(dict_for_label)
return feature_count_array
+ | Add reverse sorting of count_array | ## Code Before:
'''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
for label in feature_counts:
dict_for_label = {}
dict_for_label["label"] = label
dict_for_label["count"] = feature_counts[label]
feature_count_array.append(dict_for_label)
return feature_count_array
## Instruction:
Add reverse sorting of count_array
## Code After:
'''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
import operator
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
sorted_feature_counts = sorted(feature_counts.items(), key=operator.itemgetter(1),reverse=True)
for feature_count_tuple in sorted_feature_counts:
dict_for_label = {}
dict_for_label["label"] = feature_count_tuple[0]
dict_for_label["count"] = feature_count_tuple[1]
feature_count_array.append(dict_for_label)
return feature_count_array
|
ad7d331868706c97caa0bf0abff88d6ab5537d8d | pyramid_skosprovider/__init__.py | pyramid_skosprovider/__init__.py |
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
| Add skos_registry to the request. | Add skos_registry to the request.
Add the skos_registry to the request through the add_request_method
directive.
| Python | mit | koenedaele/pyramid_skosprovider |
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
+ config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
| Add skos_registry to the request. | ## Code Before:
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
## Instruction:
Add skos_registry to the request.
## Code After:
from zope.interface import Interface
from skosprovider.registry import Registry
class ISkosRegistry(Interface):
pass
def _build_skos_registry(registry):
skos_registry = registry.queryUtility(ISkosRegistry)
if skos_registry is not None:
return skos_registry
skos_registry = Registry()
registry.registerUtility(skos_registry, ISkosRegistry)
return registry.queryUtility(ISkosRegistry)
def get_skos_registry(registry):
#Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(ISkosRegistry)
def includeme(config):
_build_skos_registry(config.registry)
config.add_directive('get_skos_registry', get_skos_registry)
config.add_request_method(get_skos_registry, 'skos_registry', reify=True)
config.add_route('skosprovider.conceptschemes', '/conceptschemes')
config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}')
config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts')
config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}')
config.scan()
|
638ea1b12b71f74b357d60b09f1284625db73b2d | migrations/versions/0040_adjust_mmg_provider_rate.py | migrations/versions/0040_adjust_mmg_provider_rate.py |
# revision identifiers, used by Alembic.
revision = '0040_adjust_mmg_provider_rate'
down_revision = '0039_fix_notifications'
import uuid
from datetime import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(
sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")),
id=uuid.uuid4(),
valid_from=datetime.utcnow(),
rate=1.65
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(("DELETE FROM provider_rates "
"WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') "
"AND rate = 1.65"))
### end Alembic commands ###
|
# revision identifiers, used by Alembic.
revision = '0040_adjust_mmg_provider_rate'
down_revision = '0039_fix_notifications'
import uuid
from datetime import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(
sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")),
id=uuid.uuid4(),
valid_from=datetime(2016, 7, 1),
rate=1.65
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(("DELETE FROM provider_rates "
"WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') "
"AND rate = 1.65"))
### end Alembic commands ###
| Set the start date for the new rate as July 1 | Set the start date for the new rate as July 1
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
# revision identifiers, used by Alembic.
revision = '0040_adjust_mmg_provider_rate'
down_revision = '0039_fix_notifications'
import uuid
from datetime import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(
sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")),
id=uuid.uuid4(),
- valid_from=datetime.utcnow(),
+ valid_from=datetime(2016, 7, 1),
rate=1.65
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(("DELETE FROM provider_rates "
"WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') "
"AND rate = 1.65"))
### end Alembic commands ###
| Set the start date for the new rate as July 1 | ## Code Before:
# revision identifiers, used by Alembic.
revision = '0040_adjust_mmg_provider_rate'
down_revision = '0039_fix_notifications'
import uuid
from datetime import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(
sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")),
id=uuid.uuid4(),
valid_from=datetime.utcnow(),
rate=1.65
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(("DELETE FROM provider_rates "
"WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') "
"AND rate = 1.65"))
### end Alembic commands ###
## Instruction:
Set the start date for the new rate as July 1
## Code After:
# revision identifiers, used by Alembic.
revision = '0040_adjust_mmg_provider_rate'
down_revision = '0039_fix_notifications'
import uuid
from datetime import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(
sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) "
"VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")),
id=uuid.uuid4(),
valid_from=datetime(2016, 7, 1),
rate=1.65
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute(("DELETE FROM provider_rates "
"WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') "
"AND rate = 1.65"))
### end Alembic commands ###
|
0ceedd5b22a42634889b572018db1153e1ef2855 | tests/integration/services/user_avatar/test_update_avatar_image.py | tests/integration/services/user_avatar/test_update_avatar_image.py |
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
assert avatar.path == data_path / 'global/users/avatars' / expected_filename
|
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
assert avatar.path == expected
| Use `/` operator to assemble path | Use `/` operator to assemble path
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
+ expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
- assert avatar.path == data_path / 'global/users/avatars' / expected_filename
+ assert avatar.path == expected
| Use `/` operator to assemble path | ## Code Before:
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
assert avatar.path == data_path / 'global/users/avatars' / expected_filename
## Instruction:
Use `/` operator to assemble path
## Code After:
from pathlib import Path
import pytest
from byceps.services.user_avatar import service as user_avatar_service
from byceps.util.image.models import ImageType
@pytest.mark.parametrize(
'image_extension, image_type',
[
('jpeg', ImageType.jpeg),
('png', ImageType.png),
],
)
def test_path(data_path, site_app, user, image_extension, image_type):
with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f:
avatar_id = user_avatar_service.update_avatar_image(
user.id, f, {image_type}
)
avatar = user_avatar_service.get_db_avatar(avatar_id)
expected_filename = f'{avatar.id}.{image_extension}'
expected = data_path / 'global' / 'users' / 'avatars' / expected_filename
assert avatar.path == expected
|
1da520787717117b0413715f9a6df834f2d9e7e1 | press_releases/migrations/0009_auto_20170519_1308.py | press_releases/migrations/0009_auto_20170519_1308.py | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
| Change help text wording to follow WorkflowStateMixin | Change help text wording to follow WorkflowStateMixin
| Python | mit | ic-labs/django-icekit,ic-labs/icekit-press-releases,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-press-releases | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
- field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
+ field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
- field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
+ field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
| Change help text wording to follow WorkflowStateMixin | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
## Instruction:
Change help text wording to follow WorkflowStateMixin
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
199f9ace071b95822a9a0fb53c9becfb0ab4abd2 | tests/pytests/unit/modules/test_win_servermanager.py | tests/pytests/unit/modules/test_win_servermanager.py | import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {win_servermanager: {}}
def test_install():
mock_out = {
"FeatureResult": {
}
}
with patch.object(win_servermanager, "_pshell_json", return_value=""):
| import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {
win_servermanager: {
"__grains__": {"osversion": "6.2"}
}
}
def test_install():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': False,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": False,
"Restarted": False,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": False,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer")
assert result == expected
def test_install_restart():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': True,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": True,
"Restarted": True,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": True,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer", restart=True)
mock_reboot.assert_called_once()
assert result == expected
| Add some unit tests for install | Add some unit tests for install
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
+ return {
- return {win_servermanager: {}}
+ win_servermanager: {
+ "__grains__": {"osversion": "6.2"}
+ }
+ }
def test_install():
mock_out = {
+ 'Success': True,
+ 'RestartNeeded': 1,
+ 'FeatureResult': [
+ {
+ 'Id': 338,
+ 'Name': 'XPS-Viewer',
+ 'DisplayName': 'XPS Viewer',
+ 'Success': True,
+ 'RestartNeeded': False,
+ 'Message': '',
+ 'SkipReason': 0
+ }
+ ],
+ 'ExitCode': 0
+ }
+ expected = {
+ "ExitCode": 0,
+ "RestartNeeded": False,
+ "Restarted": False,
- "FeatureResult": {
+ "Features": {
+ "XPS-Viewer": {
+ "DisplayName": "XPS Viewer",
+ "Message": "",
+ "RestartNeeded": False,
+ "SkipReason": 0,
+ "Success": True
+ }
+ },
+ "Success": True}
+ mock_reboot = MagicMock(return_value=True)
- }
- }
-
- with patch.object(win_servermanager, "_pshell_json", return_value=""):
+ with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
+ patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
+ result = win_servermanager.install("XPS-Viewer")
+ assert result == expected
+ def test_install_restart():
+ mock_out = {
+ 'Success': True,
+ 'RestartNeeded': 1,
+ 'FeatureResult': [
+ {
+ 'Id': 338,
+ 'Name': 'XPS-Viewer',
+ 'DisplayName': 'XPS Viewer',
+ 'Success': True,
+ 'RestartNeeded': True,
+ 'Message': '',
+ 'SkipReason': 0
+ }
+ ],
+ 'ExitCode': 0
+ }
+ expected = {
+ "ExitCode": 0,
+ "RestartNeeded": True,
+ "Restarted": True,
+ "Features": {
+ "XPS-Viewer": {
+ "DisplayName": "XPS Viewer",
+ "Message": "",
+ "RestartNeeded": True,
+ "SkipReason": 0,
+ "Success": True
+ }
+ },
+ "Success": True}
+
+ mock_reboot = MagicMock(return_value=True)
+ with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
+ patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
+ result = win_servermanager.install("XPS-Viewer", restart=True)
+ mock_reboot.assert_called_once()
+ assert result == expected
+ | Add some unit tests for install | ## Code Before:
import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {win_servermanager: {}}
def test_install():
mock_out = {
"FeatureResult": {
}
}
with patch.object(win_servermanager, "_pshell_json", return_value=""):
## Instruction:
Add some unit tests for install
## Code After:
import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {
win_servermanager: {
"__grains__": {"osversion": "6.2"}
}
}
def test_install():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': False,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": False,
"Restarted": False,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": False,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer")
assert result == expected
def test_install_restart():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': True,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": True,
"Restarted": True,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": True,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer", restart=True)
mock_reboot.assert_called_once()
assert result == expected
|
6c40079139e714ff145e0a4adff8c3a537172ef5 | erpnext/patches/v4_1/fix_delivery_and_billing_status_for_draft_so.py | erpnext/patches/v4_1/fix_delivery_and_billing_status_for_draft_so.py |
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0""") |
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0 and docstatus = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0 and docstatus = 0""") | Update delivery and billing status in SO | Update delivery and billing status in SO
| Python | agpl-3.0 | gangadharkadam/saloon_erp,njmube/erpnext,Tejal011089/fbd_erpnext,anandpdoshi/erpnext,SPKian/Testing,indictranstech/focal-erpnext,mbauskar/helpdesk-erpnext,4commerce-technologies-AG/erpnext,mbauskar/helpdesk-erpnext,indictranstech/vestasi-erpnext,indictranstech/internal-erpnext,indictranstech/phrerp,indictranstech/buyback-erp,suyashphadtare/test,indictranstech/vestasi-erpnext,ShashaQin/erpnext,indictranstech/erpnext,gangadhar-kadam/verve-erp,BhupeshGupta/erpnext,gangadharkadam/saloon_erp,shitolepriya/test-erp,rohitwaghchaure/erpnext-receipher,BhupeshGupta/erpnext,gangadhar-kadam/smrterp,mbauskar/alec_frappe5_erpnext,mbauskar/Das_Erpnext,gangadhar-kadam/verve-erp,indictranstech/buyback-erp,indictranstech/Das_Erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,SPKian/Testing,sheafferusa/erpnext,SPKian/Testing,gangadharkadam/office_erp,Tejal011089/paypal_erpnext,ThiagoGarciaAlves/erpnext,netfirms/erpnext,gangadharkadam/letzerp,indictranstech/biggift-erpnext,gangadharkadam/v6_erp,SPKian/Testing2,Drooids/erpnext,saurabh6790/test-erp,hernad/erpnext,pombredanne/erpnext,mahabuber/erpnext,indictranstech/internal-erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,mahabuber/erpnext,susuchina/ERPNEXT,indictranstech/tele-erpnext,gangadharkadam/tailorerp,gangadhar-kadam/helpdesk-erpnext,indictranstech/erpnext,mbauskar/omnitech-erpnext,gangadhar-kadam/verve_test_erp,suyashphadtare/vestasi-update-erp,sheafferusa/erpnext,sagar30051991/ozsmart-erp,hanselke/erpnext-1,tmimori/erpnext,mahabuber/erpnext,Tejal011089/paypal_erpnext,Tejal011089/osmosis_erpnext,netfirms/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/osmosis_erpnext,njmube/erpnext,mbauskar/omnitech-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v4_erp,fuhongliang/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/v4_erp,gangadharkadam/verveerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,suyashphadtare/gd-erp,indictranstech/osmosis-erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/verveerp,Drooids/erpnext,rohitwaghchaure/digitales_erpnext,BhupeshGupta/erpnext,indictranstech/biggift-erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/sher,hanselke/erpnext-1,suyashphadtare/vestasi-erp-jan-end,ShashaQin/erpnext,Tejal011089/huntercamp_erpnext,fuhongliang/erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,Tejal011089/trufil-erpnext,gangadhar-kadam/smrterp,MartinEnder/erpnext-de,rohitwaghchaure/erpnext-receipher,Tejal011089/huntercamp_erpnext,rohitwaghchaure/digitales_erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_live_erp,indictranstech/phrerp,gangadharkadam/smrterp,mbauskar/helpdesk-erpnext,suyashphadtare/sajil-erp,indictranstech/tele-erpnext,gangadhar-kadam/verve_test_erp,fuhongliang/erpnext,netfirms/erpnext,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/GenieManager-erpnext,hatwar/Das_erpnext,dieface/erpnext,hatwar/focal-erpnext,gangadhar-kadam/latestchurcherp,treejames/erpnext,susuchina/ERPNEXT,4commerce-technologies-AG/erpnext,hatwar/focal-erpnext,shitolepriya/test-erp,hatwar/Das_erpnext,mbauskar/sapphire-erpnext,mbauskar/phrerp,dieface/erpnext,shitolepriya/test-erp,Tejal011089/fbd_erpnext,indictranstech/Das_Erpnext,pawaranand/phrerp,BhupeshGupta/erpnext,fuhongliang/erpnext,suyashphadtare/vestasi-erp-final,gangadharkadam/v4_erp,suyashphadtare/sajil-final-erp,gangadhar-kadam/latestchurcherp,meisterkleister/erpnext,suyashphadtare/vestasi-erp-1,hernad/erpnext,pawaranand/phrerp,mbauskar/Das_Erpnext,gangadharkadam/saloon_erp_install,gangadharkadam/johnerp,geekroot/erpnext,indictranstech/osmosis-erpnext,gangadhar-kadam/verve_erp,gangadharkadam/v6_erp,gangadharkadam/verveerp,gmarke/erpnext,rohitwaghchaure/New_Theme_Erp,rohitwaghchaure/GenieManager-erpnext,indictranstech/reciphergroup-erpnext,indictranstech/biggift-erpnext,mbauskar/phrerp,njmube/erpnext,hatwar/Das_erpnext,suyashphadtare/sajil-final-erp,ShashaQin/erpnext,suyashphadtare/gd-erp,mbauskar/alec_frappe5_erpnext,indictranstech/tele-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,mbauskar/phrerp,treejames/erpnext,rohitwaghchaure/New_Theme_Erp,ThiagoGarciaAlves/erpnext,gangadhar-kadam/laganerp,gangadhar-kadam/verve_live_erp,gangadhar-kadam/verve_live_erp,anandpdoshi/erpnext,indictranstech/focal-erpnext,gangadharkadam/v4_erp,sagar30051991/ozsmart-erp,gangadharkadam/v5_erp,tmimori/erpnext,MartinEnder/erpnext-de,4commerce-technologies-AG/erpnext,gangadhar-kadam/verve_test_erp,suyashphadtare/test,gangadhar-kadam/laganerp,Tejal011089/digitales_erpnext,Tejal011089/huntercamp_erpnext,pombredanne/erpnext,shitolepriya/test-erp,gangadhar-kadam/verve_live_erp,rohitwaghchaure/digitales_erpnext,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,shft117/SteckerApp,suyashphadtare/vestasi-update-erp,suyashphadtare/sajil-erp,Tejal011089/osmosis_erpnext,gangadharkadam/letzerp,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/contributionerp,netfirms/erpnext,indictranstech/phrerp,Tejal011089/trufil-erpnext,indictranstech/tele-erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/latestchurcherp,meisterkleister/erpnext,Tejal011089/digitales_erpnext,aruizramon/alec_erpnext,indictranstech/focal-erpnext,treejames/erpnext,gangadharkadam/v5_erp,mbauskar/omnitech-erpnext,indictranstech/vestasi-erpnext,Tejal011089/trufil-erpnext,hanselke/erpnext-1,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/vlinkerp,gangadharkadam/smrterp,suyashphadtare/gd-erp,Drooids/erpnext,gangadharkadam/saloon_erp,sheafferusa/erpnext,rohitwaghchaure/New_Theme_Erp,indictranstech/vestasi-erpnext,hatwar/focal-erpnext,gangadharkadam/v5_erp,Tejal011089/paypal_erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,hatwar/focal-erpnext,aruizramon/alec_erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,indictranstech/osmosis-erpnext,indictranstech/internal-erpnext,hanselke/erpnext-1,sheafferusa/erpnext,gangadharkadam/sher,mbauskar/Das_Erpnext,susuchina/ERPNEXT,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,gangadhar-kadam/latestchurcherp,rohitwaghchaure/erpnext_smart,suyashphadtare/vestasi-erp-final,gmarke/erpnext,indictranstech/fbd_erpnext,gangadharkadam/contributionerp,sagar30051991/ozsmart-erp,suyashphadtare/test,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/v6_erp,gmarke/erpnext,Suninus/erpnext,hatwar/buyback-erpnext,mbauskar/helpdesk-erpnext,gangadhar-kadam/verve-erp,gangadharkadam/vlinkerp,gangadharkadam/vlinkerp,ShashaQin/erpnext,pombredanne/erpnext,geekroot/erpnext,gangadharkadam/letzerp,rohitwaghchaure/GenieManager-erpnext,indictranstech/erpnext,Suninus/erpnext,tmimori/erpnext,suyashphadtare/vestasi-update-erp,indictranstech/trufil-erpnext,indictranstech/erpnext,indictranstech/trufil-erpnext,indictranstech/trufil-erpnext,indictranstech/fbd_erpnext,gangadharkadam/letzerp,MartinEnder/erpnext-de,indictranstech/internal-erpnext,rohitwaghchaure/New_Theme_Erp,aruizramon/alec_erpnext,MartinEnder/erpnext-de,gangadharkadam/office_erp,indictranstech/fbd_erpnext,Tejal011089/trufil-erpnext,Tejal011089/osmosis_erpnext,ThiagoGarciaAlves/erpnext,hatwar/buyback-erpnext,indictranstech/biggift-erpnext,Suninus/erpnext,SPKian/Testing,saurabh6790/test-erp,mbauskar/alec_frappe5_erpnext,shft117/SteckerApp,gangadharkadam/sterp,geekroot/erpnext,treejames/erpnext,gangadharkadam/johnerp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,gsnbng/erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/fbd_erpnext,Suninus/erpnext,mbauskar/omnitech-erpnext,suyashphadtare/vestasi-erp-1,gangadharkadam/saloon_erp_install,suyashphadtare/vestasi-erp-1,gangadharkadam/sterp,suyashphadtare/vestasi-erp-jan-end,indictranstech/phrerp,ThiagoGarciaAlves/erpnext,meisterkleister/erpnext,gangadharkadam/v5_erp,Tejal011089/paypal_erpnext,hernad/erpnext,hernad/erpnext,gangadharkadam/verveerp,gangadhar-kadam/verve_test_erp,hatwar/Das_erpnext,suyashphadtare/gd-erp,indictranstech/reciphergroup-erpnext,susuchina/ERPNEXT,mbauskar/sapphire-erpnext,mbauskar/omnitech-demo-erpnext,gmarke/erpnext,indictranstech/osmosis-erpnext,Tejal011089/fbd_erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,shft117/SteckerApp,gangadhar-kadam/verve_erp,suyashphadtare/sajil-erp,geekroot/erpnext,gangadharkadam/tailorerp,aruizramon/alec_erpnext,gangadharkadam/v6_erp,saurabh6790/test-erp,meisterkleister/erpnext,SPKian/Testing2,suyashphadtare/sajil-final-erp,mbauskar/phrerp,indictranstech/focal-erpnext,gangadharkadam/contributionerp,gangadharkadam/vlinkerp,gangadhar-kadam/laganerp,indictranstech/buyback-erp,njmube/erpnext,pombredanne/erpnext,dieface/erpnext,gsnbng/erpnext,mbauskar/Das_Erpnext,SPKian/Testing2,hatwar/buyback-erpnext,gangadharkadam/contributionerp,gsnbng/erpnext,tmimori/erpnext,sagar30051991/ozsmart-erp,suyashphadtare/vestasi-erp-final,rohitwaghchaure/erpnext_smart,indictranstech/buyback-erp,pawaranand/phrerp,pawaranand/phrerp,gangadharkadam/saloon_erp_install,gangadharkadam/office_erp,Drooids/erpnext,shft117/SteckerApp,dieface/erpnext,indictranstech/trufil-erpnext |
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
- where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0""")
+ where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0 and docstatus = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
- where billing_status = 'Billed' and ifnull(per_billed, 0) = 0""")
+ where billing_status = 'Billed' and ifnull(per_billed, 0) = 0 and docstatus = 0""") | Update delivery and billing status in SO | ## Code Before:
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0""")
## Instruction:
Update delivery and billing status in SO
## Code After:
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0 and docstatus = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0 and docstatus = 0""") |
a6f8e42d3e297776a19c8e76dd7f1cfded32a266 | pycon/tutorials/tests/test_utils.py | pycon/tutorials/tests/test_utils.py | """Test for the tutorials.utils package"""
import datetime
import unittest
from mock import patch
from django.template import Template
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(unittest.TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
| """Test for the tutorials.utils package"""
import datetime
from mock import patch
from django.template import Template
from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
| Use django TestCase in tutorial send email test | Use django TestCase in tutorial send email test
It was using regular Python unittest.TestCase for some
reason, resulting in leaving old BulkEmail objects in
the database that other tests weren't expecting.
| Python | bsd-3-clause | PyCon/pycon,PyCon/pycon,PyCon/pycon,njl/pycon,PyCon/pycon,njl/pycon,njl/pycon,njl/pycon | """Test for the tutorials.utils package"""
import datetime
- import unittest
from mock import patch
from django.template import Template
+ from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
- class TestSendEmailMessage(unittest.TestCase):
+ class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
| Use django TestCase in tutorial send email test | ## Code Before:
"""Test for the tutorials.utils package"""
import datetime
import unittest
from mock import patch
from django.template import Template
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(unittest.TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
## Instruction:
Use django TestCase in tutorial send email test
## Code After:
"""Test for the tutorials.utils package"""
import datetime
from mock import patch
from django.template import Template
from django.test import TestCase
from pycon.bulkemail.models import BulkEmail
from ..utils import queue_email_message
today = datetime.date.today()
class TestSendEmailMessage(TestCase):
@patch('django.core.mail.message.EmailMessage.send')
@patch('pycon.tutorials.utils.get_template')
def test_send_email_message(self, get_template, send_mail):
# queue_email_message comes up with the expected template names
# and calls send_mail with the expected arguments
test_template = Template("test template")
get_template.return_value = test_template
context = {'a': 1, 'b': 2}
queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context)
args, kwargs = get_template.call_args_list[0]
expected_template_name = "tutorials/email/TESTNAME/subject.txt"
self.assertEqual(expected_template_name, args[0])
args, kwargs = get_template.call_args_list[1]
expected_template_name = "tutorials/email/TESTNAME/body.txt"
self.assertEqual(expected_template_name, args[0])
# Creates a BulkEmail object
self.assertEqual(1, BulkEmail.objects.count())
|
f869cf9a94749ea210d38178317d196fbdd15fac | resolwe/flow/tests/test_backend.py | resolwe/flow/tests/test_backend.py | import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH'])
os.makedirs(settings.FLOW['BACKEND']['DATA_PATH'])
def test_manager(self):
manager.communicate()
| import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
data_path = settings.FLOW['BACKEND']['DATA_PATH']
if os.path.exists(data_path):
shutil.rmtree(data_path)
os.makedirs(data_path)
def test_manager(self):
manager.communicate()
| Fix error if no data path | Fix error if no data path
| Python | apache-2.0 | jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe | import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
- shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH'])
- os.makedirs(settings.FLOW['BACKEND']['DATA_PATH'])
+ data_path = settings.FLOW['BACKEND']['DATA_PATH']
+
+ if os.path.exists(data_path):
+ shutil.rmtree(data_path)
+
+ os.makedirs(data_path)
def test_manager(self):
manager.communicate()
| Fix error if no data path | ## Code Before:
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH'])
os.makedirs(settings.FLOW['BACKEND']['DATA_PATH'])
def test_manager(self):
manager.communicate()
## Instruction:
Fix error if no data path
## Code After:
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
data_path = settings.FLOW['BACKEND']['DATA_PATH']
if os.path.exists(data_path):
shutil.rmtree(data_path)
os.makedirs(data_path)
def test_manager(self):
manager.communicate()
|
a2920b9bf5386b3f92a8e2cd5f7c4251439b2c42 | newswall/admin.py | newswall/admin.py | from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
| from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
| Make a few fields editable from the changelist | Make a few fields editable from the changelist
| Python | bsd-3-clause | matthiask/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall,HerraLampila/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall | from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
+ list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
+ list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
| Make a few fields editable from the changelist | ## Code Before:
from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
## Instruction:
Make a few fields editable from the changelist
## Code After:
from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
|
ce052f8e19d46f6db202e7eee054d5b88af01d9b | nanagogo/__init__.py | nanagogo/__init__.py |
from nanagogo.api import NanagogoRequest, NanagogoError
def get(path, params={}):
r = NanagogoRequest(path,
method="GET",
params=params)
return r.wrap()
def post(path, params={}, data=None):
r = NanagogoRequest(path,
method="POST",
params=params,
data=data)
return r.wrap()
class NanagogoTalk(object):
def __init__(self, name):
self.name = name
@property
def info(self):
path = ("talks", self.name)
return get(path)
def feed(self, count=30, targetid=None, direction="PREV"):
path = ("talks", self.name, "posts")
params = {'limit': count,
'targetId': targetid,
'direction': direction}
return get(path, params=params)
def iterfeed(self, count=200, targetid=None):
while True:
feed = self.feed(count=count,
targetid=targetid,
direction="PREV")
if len(feed) == 0:
break
yield feed
targetid = feed[-1]['post']['postId'] - 1
if targetid <= 0:
break
if __name__ == "__main__":
tani = NanagogoTalk('tani-marika')
print(tani.info)
|
from nanagogo.api import NanagogoRequest, NanagogoError, s
def get(path, params={}):
r = NanagogoRequest(path,
method="GET",
params=params)
return r.wrap()
def post(path, params={}, data=None):
r = NanagogoRequest(path,
method="POST",
params=params,
data=data)
return r.wrap()
class NanagogoTalk(object):
def __init__(self, name):
self.name = name
@property
def info(self):
path = ("talks", self.name)
return get(path)
def feed(self, count=30, targetid=None, direction="PREV"):
path = ("talks", self.name, "posts")
params = {'limit': count,
'targetId': targetid,
'direction': direction.upper()}
return get(path, params=params)
def iterfeed(self, count=200, targetid=None):
while True:
feed = self.feed(count=count,
targetid=targetid,
direction="PREV")
if len(feed) == 0:
break
yield feed
targetid = feed[-1]['post']['postId'] - 1
if targetid <= 0:
break
if __name__ == "__main__":
tani = NanagogoTalk('tani-marika')
print(tani.info)
| Convert direction to upper case | Convert direction to upper case
| Python | mit | kastden/nanagogo |
- from nanagogo.api import NanagogoRequest, NanagogoError
+ from nanagogo.api import NanagogoRequest, NanagogoError, s
def get(path, params={}):
r = NanagogoRequest(path,
method="GET",
params=params)
return r.wrap()
def post(path, params={}, data=None):
r = NanagogoRequest(path,
method="POST",
params=params,
data=data)
return r.wrap()
class NanagogoTalk(object):
def __init__(self, name):
self.name = name
@property
def info(self):
path = ("talks", self.name)
return get(path)
def feed(self, count=30, targetid=None, direction="PREV"):
path = ("talks", self.name, "posts")
params = {'limit': count,
'targetId': targetid,
- 'direction': direction}
+ 'direction': direction.upper()}
return get(path, params=params)
def iterfeed(self, count=200, targetid=None):
while True:
feed = self.feed(count=count,
targetid=targetid,
direction="PREV")
if len(feed) == 0:
break
yield feed
targetid = feed[-1]['post']['postId'] - 1
if targetid <= 0:
break
if __name__ == "__main__":
tani = NanagogoTalk('tani-marika')
print(tani.info)
| Convert direction to upper case | ## Code Before:
from nanagogo.api import NanagogoRequest, NanagogoError
def get(path, params={}):
r = NanagogoRequest(path,
method="GET",
params=params)
return r.wrap()
def post(path, params={}, data=None):
r = NanagogoRequest(path,
method="POST",
params=params,
data=data)
return r.wrap()
class NanagogoTalk(object):
def __init__(self, name):
self.name = name
@property
def info(self):
path = ("talks", self.name)
return get(path)
def feed(self, count=30, targetid=None, direction="PREV"):
path = ("talks", self.name, "posts")
params = {'limit': count,
'targetId': targetid,
'direction': direction}
return get(path, params=params)
def iterfeed(self, count=200, targetid=None):
while True:
feed = self.feed(count=count,
targetid=targetid,
direction="PREV")
if len(feed) == 0:
break
yield feed
targetid = feed[-1]['post']['postId'] - 1
if targetid <= 0:
break
if __name__ == "__main__":
tani = NanagogoTalk('tani-marika')
print(tani.info)
## Instruction:
Convert direction to upper case
## Code After:
from nanagogo.api import NanagogoRequest, NanagogoError, s
def get(path, params={}):
r = NanagogoRequest(path,
method="GET",
params=params)
return r.wrap()
def post(path, params={}, data=None):
r = NanagogoRequest(path,
method="POST",
params=params,
data=data)
return r.wrap()
class NanagogoTalk(object):
def __init__(self, name):
self.name = name
@property
def info(self):
path = ("talks", self.name)
return get(path)
def feed(self, count=30, targetid=None, direction="PREV"):
path = ("talks", self.name, "posts")
params = {'limit': count,
'targetId': targetid,
'direction': direction.upper()}
return get(path, params=params)
def iterfeed(self, count=200, targetid=None):
while True:
feed = self.feed(count=count,
targetid=targetid,
direction="PREV")
if len(feed) == 0:
break
yield feed
targetid = feed[-1]['post']['postId'] - 1
if targetid <= 0:
break
if __name__ == "__main__":
tani = NanagogoTalk('tani-marika')
print(tani.info)
|
04cca2c87cc8e56ecd84e1b3125a7a7b8c67b026 | norc_utils/backup.py | norc_utils/backup.py |
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
| Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them. | Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
| Python | bsd-3-clause | darrellsilver/norc,darrellsilver/norc |
import os
- from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
+ from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
- AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
+ from norc.settings import (AWS_ACCESS_KEY_ID,
+ AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
| Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them. | ## Code Before:
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
## Instruction:
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
## Code After:
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
428fda845c79f70c6e3d64302bbc716da5130625 | src/django_richenum/forms/fields.py | src/django_richenum/forms/fields.py | from abc import ABCMeta
from abc import abstractmethod
from django import forms
class _BaseEnumField(forms.TypedChoiceField):
__metaclass__ = ABCMeta
def __init__(self, enum, *args, **kwargs):
self.enum = enum
kwargs.setdefault('empty_value', None)
if 'choices' in kwargs:
raise ValueError('Cannot explicitly supply choices to enum fields.')
if 'coerce' in kwargs:
raise ValueError('Cannot explicitly supply coercion function to enum fields.')
kwargs['choices'] = self.get_choices()
kwargs['coerce'] = self.coerce_value
super(_BaseEnumField, self).__init__(*args, **kwargs)
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def coerce_value(self, val):
pass
class CanonicalEnumField(_BaseEnumField):
"""
Uses the RichEnum/OrderedRichEnum canonical_name as form field values
"""
def get_choices(self):
return self.enum.choices()
def coerce_value(self, name):
return self.enum.from_canonical(name)
class IndexEnumField(_BaseEnumField):
"""
Uses the OrderedRichEnum index as form field values
"""
def get_choices(self):
return self.enum.choices(value_field='index')
def coerce_value(self, index):
return self.enum.from_index(int(index))
| from abc import ABCMeta
from abc import abstractmethod
from django import forms
class _BaseEnumField(forms.TypedChoiceField):
__metaclass__ = ABCMeta
def __init__(self, enum, *args, **kwargs):
self.enum = enum
kwargs.setdefault('empty_value', None)
if 'choices' in kwargs:
raise ValueError('Cannot explicitly supply choices to enum fields.')
if 'coerce' in kwargs:
raise ValueError('Cannot explicitly supply coercion function to enum fields.')
kwargs['choices'] = self.get_choices()
kwargs['coerce'] = self.coerce_value
super(_BaseEnumField, self).__init__(*args, **kwargs)
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def coerce_value(self, val):
pass
def run_validators(self, value):
# These have to be from a set, so it's hard for me to imagine a useful
# custom validator.
# The run_validators method in the superclass checks the value against
# None, [], {}, etc, which causes warnings in the RichEnum.__eq__
# method... arguably we shouldn't warn in those cases, but for now we
# do.
pass
class CanonicalEnumField(_BaseEnumField):
"""
Uses the RichEnum/OrderedRichEnum canonical_name as form field values
"""
def get_choices(self):
return self.enum.choices()
def coerce_value(self, name):
return self.enum.from_canonical(name)
class IndexEnumField(_BaseEnumField):
"""
Uses the OrderedRichEnum index as form field values
"""
def get_choices(self):
return self.enum.choices(value_field='index')
def coerce_value(self, index):
return self.enum.from_index(int(index))
| Make run_validators method a no-op | _BaseEnumField: Make run_validators method a no-op
See the comment in this commit-- I can't see value in allowing custom
validators on EnumFields and the implementation in the superclass causes
warnings in RichEnum.__eq__.
Arguably those warnings aren't useful (warning against []/falsy compare).
In that case, we can revert this when they're silenced.
Alternatively, if we need the warnings and need this functionality, we'd have
re-implement the method in the superclass without said check, or live with
warnings every time a form containing an EnumField is validated, which sucks.
| Python | mit | hearsaycorp/django-richenum,adepue/django-richenum,dhui/django-richenum,asherf/django-richenum,hearsaycorp/django-richenum | from abc import ABCMeta
from abc import abstractmethod
from django import forms
class _BaseEnumField(forms.TypedChoiceField):
__metaclass__ = ABCMeta
def __init__(self, enum, *args, **kwargs):
self.enum = enum
kwargs.setdefault('empty_value', None)
if 'choices' in kwargs:
raise ValueError('Cannot explicitly supply choices to enum fields.')
if 'coerce' in kwargs:
raise ValueError('Cannot explicitly supply coercion function to enum fields.')
kwargs['choices'] = self.get_choices()
kwargs['coerce'] = self.coerce_value
super(_BaseEnumField, self).__init__(*args, **kwargs)
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def coerce_value(self, val):
pass
+ def run_validators(self, value):
+ # These have to be from a set, so it's hard for me to imagine a useful
+ # custom validator.
+ # The run_validators method in the superclass checks the value against
+ # None, [], {}, etc, which causes warnings in the RichEnum.__eq__
+ # method... arguably we shouldn't warn in those cases, but for now we
+ # do.
+ pass
+
class CanonicalEnumField(_BaseEnumField):
"""
Uses the RichEnum/OrderedRichEnum canonical_name as form field values
"""
def get_choices(self):
return self.enum.choices()
def coerce_value(self, name):
return self.enum.from_canonical(name)
class IndexEnumField(_BaseEnumField):
"""
Uses the OrderedRichEnum index as form field values
"""
def get_choices(self):
return self.enum.choices(value_field='index')
def coerce_value(self, index):
return self.enum.from_index(int(index))
| Make run_validators method a no-op | ## Code Before:
from abc import ABCMeta
from abc import abstractmethod
from django import forms
class _BaseEnumField(forms.TypedChoiceField):
__metaclass__ = ABCMeta
def __init__(self, enum, *args, **kwargs):
self.enum = enum
kwargs.setdefault('empty_value', None)
if 'choices' in kwargs:
raise ValueError('Cannot explicitly supply choices to enum fields.')
if 'coerce' in kwargs:
raise ValueError('Cannot explicitly supply coercion function to enum fields.')
kwargs['choices'] = self.get_choices()
kwargs['coerce'] = self.coerce_value
super(_BaseEnumField, self).__init__(*args, **kwargs)
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def coerce_value(self, val):
pass
class CanonicalEnumField(_BaseEnumField):
"""
Uses the RichEnum/OrderedRichEnum canonical_name as form field values
"""
def get_choices(self):
return self.enum.choices()
def coerce_value(self, name):
return self.enum.from_canonical(name)
class IndexEnumField(_BaseEnumField):
"""
Uses the OrderedRichEnum index as form field values
"""
def get_choices(self):
return self.enum.choices(value_field='index')
def coerce_value(self, index):
return self.enum.from_index(int(index))
## Instruction:
Make run_validators method a no-op
## Code After:
from abc import ABCMeta
from abc import abstractmethod
from django import forms
class _BaseEnumField(forms.TypedChoiceField):
__metaclass__ = ABCMeta
def __init__(self, enum, *args, **kwargs):
self.enum = enum
kwargs.setdefault('empty_value', None)
if 'choices' in kwargs:
raise ValueError('Cannot explicitly supply choices to enum fields.')
if 'coerce' in kwargs:
raise ValueError('Cannot explicitly supply coercion function to enum fields.')
kwargs['choices'] = self.get_choices()
kwargs['coerce'] = self.coerce_value
super(_BaseEnumField, self).__init__(*args, **kwargs)
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def coerce_value(self, val):
pass
def run_validators(self, value):
# These have to be from a set, so it's hard for me to imagine a useful
# custom validator.
# The run_validators method in the superclass checks the value against
# None, [], {}, etc, which causes warnings in the RichEnum.__eq__
# method... arguably we shouldn't warn in those cases, but for now we
# do.
pass
class CanonicalEnumField(_BaseEnumField):
"""
Uses the RichEnum/OrderedRichEnum canonical_name as form field values
"""
def get_choices(self):
return self.enum.choices()
def coerce_value(self, name):
return self.enum.from_canonical(name)
class IndexEnumField(_BaseEnumField):
"""
Uses the OrderedRichEnum index as form field values
"""
def get_choices(self):
return self.enum.choices(value_field='index')
def coerce_value(self, index):
return self.enum.from_index(int(index))
|
0782ab8774f840c7ab2e66ddd168ac3ccfa3fc4f | openprescribing/pipeline/management/commands/clean_up_bq_test_data.py | openprescribing/pipeline/management/commands/clean_up_bq_test_data.py | import os
from django.core.management import BaseCommand, CommandError
from gcutils.bigquery import Client
class Command(BaseCommand):
help = 'Removes any datasets whose tables have all expired'
def handle(self, *args, **kwargs):
if os.environ['DJANGO_SETTINGS_MODULE'] != \
'openprescribing.settings.test':
raise CommandError('Command must run with test settings')
gcbq_client = Client().gcbq_client
for dataset_list_item in gcbq_client.list_datasets():
dataset_ref = dataset_list_item.reference
tables = list(gcbq_client.list_tables(dataset_ref))
if len(tables) == 0:
gcbq_client.delete_dataset(dataset_ref)
| import os
from django.core.management import BaseCommand, CommandError
from gcutils.bigquery import Client
class Command(BaseCommand):
help = 'Removes any datasets whose tables have all expired'
def handle(self, *args, **kwargs):
if os.environ['DJANGO_SETTINGS_MODULE'] != \
'openprescribing.settings.test':
raise CommandError('Command must run with test settings')
gcbq_client = Client().gcbq_client
datasets = list(gcbq_client.list_datasets())
for dataset_list_item in datasets:
dataset_ref = dataset_list_item.reference
tables = list(gcbq_client.list_tables(dataset_ref))
if len(tables) == 0:
gcbq_client.delete_dataset(dataset_ref)
| Clean up BQ test data properly | Clean up BQ test data properly
If you delete datasets while iterating over datasets, you eventually get
errors. This fixes that by building a list of all datasets before we
delete any.
| Python | mit | ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc | import os
from django.core.management import BaseCommand, CommandError
from gcutils.bigquery import Client
class Command(BaseCommand):
help = 'Removes any datasets whose tables have all expired'
def handle(self, *args, **kwargs):
if os.environ['DJANGO_SETTINGS_MODULE'] != \
'openprescribing.settings.test':
raise CommandError('Command must run with test settings')
gcbq_client = Client().gcbq_client
+ datasets = list(gcbq_client.list_datasets())
+
- for dataset_list_item in gcbq_client.list_datasets():
+ for dataset_list_item in datasets:
dataset_ref = dataset_list_item.reference
tables = list(gcbq_client.list_tables(dataset_ref))
if len(tables) == 0:
gcbq_client.delete_dataset(dataset_ref)
| Clean up BQ test data properly | ## Code Before:
import os
from django.core.management import BaseCommand, CommandError
from gcutils.bigquery import Client
class Command(BaseCommand):
help = 'Removes any datasets whose tables have all expired'
def handle(self, *args, **kwargs):
if os.environ['DJANGO_SETTINGS_MODULE'] != \
'openprescribing.settings.test':
raise CommandError('Command must run with test settings')
gcbq_client = Client().gcbq_client
for dataset_list_item in gcbq_client.list_datasets():
dataset_ref = dataset_list_item.reference
tables = list(gcbq_client.list_tables(dataset_ref))
if len(tables) == 0:
gcbq_client.delete_dataset(dataset_ref)
## Instruction:
Clean up BQ test data properly
## Code After:
import os
from django.core.management import BaseCommand, CommandError
from gcutils.bigquery import Client
class Command(BaseCommand):
help = 'Removes any datasets whose tables have all expired'
def handle(self, *args, **kwargs):
if os.environ['DJANGO_SETTINGS_MODULE'] != \
'openprescribing.settings.test':
raise CommandError('Command must run with test settings')
gcbq_client = Client().gcbq_client
datasets = list(gcbq_client.list_datasets())
for dataset_list_item in datasets:
dataset_ref = dataset_list_item.reference
tables = list(gcbq_client.list_tables(dataset_ref))
if len(tables) == 0:
gcbq_client.delete_dataset(dataset_ref)
|
90dfa38014ba91de2e8c0c75d63788aab3c95f38 | Python/python2_version/klampt/__init__.py | Python/python2_version/klampt/__init__.py | from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
| from __future__ import print_function,division
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
| Allow some compatibility between python2 and updated python 3 files | Allow some compatibility between python2 and updated python 3 files
| Python | bsd-3-clause | krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt | + from __future__ import print_function,division
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
| Allow some compatibility between python2 and updated python 3 files | ## Code Before:
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
## Instruction:
Allow some compatibility between python2 and updated python 3 files
## Code After:
from __future__ import print_function,division
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
|
3a321a93f9779f9e27da8e85e3ffc7460bbbef12 | src/python/yalix/test/utils_test.py | src/python/yalix/test/utils_test.py |
import unittest
import yalix.utils as utils
class UtilsTest(unittest.TestCase):
def test_log_progress_reports_FAILED(self):
with utils.capture() as out:
with self.assertRaises(KeyError):
with utils.log_progress("Testing log message"):
raise KeyError
self.assertTrue('Testing log message' in out[0])
self.assertTrue('FAILED' in out[0])
def test_log_progress_reports_DONE(self):
with utils.capture() as out:
with utils.log_progress("Testing log message"):
pass
self.assertTrue('Testing log message' in out[0])
self.assertTrue('DONE' in out[0])
def test_syntax_highligher(self):
import hashlib
sample_code = "(define (identity x) x)"
output = utils.highlight_syntax(sample_code)
m = hashlib.sha224(bytes(output))
self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
if __name__ == '__main__':
unittest.main()
|
import unittest
import yalix.utils as utils
class UtilsTest(unittest.TestCase):
def test_log_progress_reports_FAILED(self):
with utils.capture() as out:
with self.assertRaises(KeyError):
with utils.log_progress("Testing log message"):
raise KeyError
self.assertTrue('Testing log message' in out[0])
self.assertTrue('FAILED' in out[0])
def test_log_progress_reports_DONE(self):
with utils.capture() as out:
with utils.log_progress("Testing log message"):
pass
self.assertTrue('Testing log message' in out[0])
self.assertTrue('DONE' in out[0])
# def test_syntax_highligher(self):
# import hashlib
# sample_code = "(define (identity x) x)"
# output = utils.highlight_syntax(sample_code)
# if output != sample_code:
# # Pygments in action
# m = hashlib.sha224(output.encode('utf-8'))
# self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
if __name__ == '__main__':
unittest.main()
| Comment out failing test on Python3 env | Comment out failing test on Python3 env
| Python | mit | rm-hull/yalix |
import unittest
import yalix.utils as utils
class UtilsTest(unittest.TestCase):
def test_log_progress_reports_FAILED(self):
with utils.capture() as out:
with self.assertRaises(KeyError):
with utils.log_progress("Testing log message"):
raise KeyError
self.assertTrue('Testing log message' in out[0])
self.assertTrue('FAILED' in out[0])
def test_log_progress_reports_DONE(self):
with utils.capture() as out:
with utils.log_progress("Testing log message"):
pass
self.assertTrue('Testing log message' in out[0])
self.assertTrue('DONE' in out[0])
- def test_syntax_highligher(self):
+ # def test_syntax_highligher(self):
- import hashlib
+ # import hashlib
- sample_code = "(define (identity x) x)"
+ # sample_code = "(define (identity x) x)"
- output = utils.highlight_syntax(sample_code)
+ # output = utils.highlight_syntax(sample_code)
- m = hashlib.sha224(bytes(output))
+ # if output != sample_code:
+ # # Pygments in action
+ # m = hashlib.sha224(output.encode('utf-8'))
- self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
+ # self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
if __name__ == '__main__':
unittest.main()
| Comment out failing test on Python3 env | ## Code Before:
import unittest
import yalix.utils as utils
class UtilsTest(unittest.TestCase):
def test_log_progress_reports_FAILED(self):
with utils.capture() as out:
with self.assertRaises(KeyError):
with utils.log_progress("Testing log message"):
raise KeyError
self.assertTrue('Testing log message' in out[0])
self.assertTrue('FAILED' in out[0])
def test_log_progress_reports_DONE(self):
with utils.capture() as out:
with utils.log_progress("Testing log message"):
pass
self.assertTrue('Testing log message' in out[0])
self.assertTrue('DONE' in out[0])
def test_syntax_highligher(self):
import hashlib
sample_code = "(define (identity x) x)"
output = utils.highlight_syntax(sample_code)
m = hashlib.sha224(bytes(output))
self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
if __name__ == '__main__':
unittest.main()
## Instruction:
Comment out failing test on Python3 env
## Code After:
import unittest
import yalix.utils as utils
class UtilsTest(unittest.TestCase):
def test_log_progress_reports_FAILED(self):
with utils.capture() as out:
with self.assertRaises(KeyError):
with utils.log_progress("Testing log message"):
raise KeyError
self.assertTrue('Testing log message' in out[0])
self.assertTrue('FAILED' in out[0])
def test_log_progress_reports_DONE(self):
with utils.capture() as out:
with utils.log_progress("Testing log message"):
pass
self.assertTrue('Testing log message' in out[0])
self.assertTrue('DONE' in out[0])
# def test_syntax_highligher(self):
# import hashlib
# sample_code = "(define (identity x) x)"
# output = utils.highlight_syntax(sample_code)
# if output != sample_code:
# # Pygments in action
# m = hashlib.sha224(output.encode('utf-8'))
# self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest())
if __name__ == '__main__':
unittest.main()
|
94790371e7ec8dc189409e39e193680b9c6b1a08 | raven/contrib/django/apps.py | raven/contrib/django/apps.py | from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
| from __future__ import absolute_import
from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
| Add missing __future__ import to pass coding guidelines. | Add missing __future__ import to pass coding guidelines.
| Python | bsd-3-clause | getsentry/raven-python,lepture/raven-python,smarkets/raven-python,Photonomie/raven-python,akalipetis/raven-python,danriti/raven-python,jbarbuto/raven-python,akheron/raven-python,ronaldevers/raven-python,johansteffner/raven-python,smarkets/raven-python,jmagnusson/raven-python,akheron/raven-python,jbarbuto/raven-python,Photonomie/raven-python,smarkets/raven-python,percipient/raven-python,Photonomie/raven-python,arthurlogilab/raven-python,arthurlogilab/raven-python,nikolas/raven-python,johansteffner/raven-python,lepture/raven-python,ewdurbin/raven-python,hzy/raven-python,jmp0xf/raven-python,arthurlogilab/raven-python,ronaldevers/raven-python,jbarbuto/raven-python,dbravender/raven-python,jmagnusson/raven-python,getsentry/raven-python,ronaldevers/raven-python,hzy/raven-python,johansteffner/raven-python,jmp0xf/raven-python,jbarbuto/raven-python,akalipetis/raven-python,recht/raven-python,hzy/raven-python,arthurlogilab/raven-python,jmagnusson/raven-python,percipient/raven-python,smarkets/raven-python,recht/raven-python,ewdurbin/raven-python,percipient/raven-python,dbravender/raven-python,ewdurbin/raven-python,akalipetis/raven-python,getsentry/raven-python,nikolas/raven-python,someonehan/raven-python,akheron/raven-python,danriti/raven-python,lepture/raven-python,danriti/raven-python,nikolas/raven-python,someonehan/raven-python,jmp0xf/raven-python,recht/raven-python,someonehan/raven-python,dbravender/raven-python,nikolas/raven-python | + from __future__ import absolute_import
from django.apps import AppConfig
+
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
| Add missing __future__ import to pass coding guidelines. | ## Code Before:
from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
## Instruction:
Add missing __future__ import to pass coding guidelines.
## Code After:
from __future__ import absolute_import
from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
|
ba3c46dc19afe79647ea07d80c495fbf7ad47514 | rocketleaguereplayanalysis/util/transcode.py | rocketleaguereplayanalysis/util/transcode.py | def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate), render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
| def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate),
'-crf', '18',
render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
| FIx render output (missing crf value) | FIx render output (missing crf value)
| Python | agpl-3.0 | enzanki-ars/rocket-league-minimap-generator | def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
- cmd += ['-r', str(out_frame_rate), render_type + '.mp4', '-y']
+ cmd += ['-r', str(out_frame_rate),
+ '-crf', '18',
+ render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
| FIx render output (missing crf value) | ## Code Before:
def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate), render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
## Instruction:
FIx render output (missing crf value)
## Code After:
def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate),
'-crf', '18',
render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
|
b870028ce8edcb5001f1a4823517d866db0324a8 | pyglab/apirequest.py | pyglab/apirequest.py | import enum
import json
from pyglab.exceptions import RequestError
import requests
@enum.unique
class RequestType(enum.Enum):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
| import json
from pyglab.exceptions import RequestError
import requests
class RequestType(object):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
| Make RequestType a normal class, not an enum. | Make RequestType a normal class, not an enum.
This removes the restriction of needing Python >= 3.4. RequestType is
now a normal class with class variables (fixes #19).
| Python | mit | sloede/pyglab,sloede/pyglab | - import enum
import json
from pyglab.exceptions import RequestError
import requests
- @enum.unique
- class RequestType(enum.Enum):
+ class RequestType(object):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
| Make RequestType a normal class, not an enum. | ## Code Before:
import enum
import json
from pyglab.exceptions import RequestError
import requests
@enum.unique
class RequestType(enum.Enum):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
## Instruction:
Make RequestType a normal class, not an enum.
## Code After:
import json
from pyglab.exceptions import RequestError
import requests
class RequestType(object):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
|
8ecb32004aca75c0b6cb70bd1a00e38f3a65c8c8 | sound/irc/auth/controller.py | sound/irc/auth/controller.py |
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
|
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url, session
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# Prevent users from specifying their session IDs (Some user-agents were sending null ids, leading to users
# authenticated with a session id of null
session.regenerate_id()
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
| Fix a bug where user-agents could specify their own session ID. | Fix a bug where user-agents could specify their own session ID.
| Python | mit | eve-val/irc,eve-val/irc,eve-val/irc |
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
- from web.core import config, url
+ from web.core import config, url, session
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
+ # Prevent users from specifying their session IDs (Some user-agents were sending null ids, leading to users
+ # authenticated with a session id of null
+ session.regenerate_id()
+
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
| Fix a bug where user-agents could specify their own session ID. | ## Code Before:
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
## Instruction:
Fix a bug where user-agents could specify their own session ID.
## Code After:
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url, session
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# Prevent users from specifying their session IDs (Some user-agents were sending null ids, leading to users
# authenticated with a session id of null
session.regenerate_id()
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
|
8befea283830f76dfa41cfd10d7eb916c68f7ef9 | intern/views.py | intern/views.py | from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all()
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) | from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all().order_by("-modified_at")
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) | Sort files by last modification | Sort files by last modification
| Python | mit | n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb | from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
- files = File.objects.all()
+ files = File.objects.all().order_by("-modified_at")
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) | Sort files by last modification | ## Code Before:
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all()
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
## Instruction:
Sort files by last modification
## Code After:
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all().order_by("-modified_at")
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) |
71b7885bc1e3740adf8c07c23b41835e1e69f8a2 | sqlobject/tests/test_class_hash.py | sqlobject/tests/test_class_hash.py | from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
# Test hashing a column instance
########################################
class ClassHashTest(SQLObject):
name = StringCol(length=50, alternateID=True, dbName='name_col')
def test_class_hash():
setupClass(ClassHashTest)
ClassHashTest(name='bob')
conn = ClassHashTest._connection
b = ClassHashTest.byName('bob')
hashed = hash(b)
b.expire()
b = ClassHashTest.byName('bob')
assert hash(b) == hashed
| from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
# Test hashing a column instance
########################################
class ClassHashTest(SQLObject):
name = StringCol(length=50, alternateID=True, dbName='name_col')
def test_class_hash():
setupClass(ClassHashTest)
ClassHashTest(name='bob')
b = ClassHashTest.byName('bob')
hashed = hash(b)
b.expire()
b = ClassHashTest.byName('bob')
assert hash(b) == hashed
| Fix flake8 warning in test case | Fix flake8 warning in test case
| Python | lgpl-2.1 | drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject | from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
# Test hashing a column instance
########################################
class ClassHashTest(SQLObject):
name = StringCol(length=50, alternateID=True, dbName='name_col')
def test_class_hash():
setupClass(ClassHashTest)
ClassHashTest(name='bob')
- conn = ClassHashTest._connection
b = ClassHashTest.byName('bob')
hashed = hash(b)
b.expire()
b = ClassHashTest.byName('bob')
assert hash(b) == hashed
| Fix flake8 warning in test case | ## Code Before:
from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
# Test hashing a column instance
########################################
class ClassHashTest(SQLObject):
name = StringCol(length=50, alternateID=True, dbName='name_col')
def test_class_hash():
setupClass(ClassHashTest)
ClassHashTest(name='bob')
conn = ClassHashTest._connection
b = ClassHashTest.byName('bob')
hashed = hash(b)
b.expire()
b = ClassHashTest.byName('bob')
assert hash(b) == hashed
## Instruction:
Fix flake8 warning in test case
## Code After:
from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
# Test hashing a column instance
########################################
class ClassHashTest(SQLObject):
name = StringCol(length=50, alternateID=True, dbName='name_col')
def test_class_hash():
setupClass(ClassHashTest)
ClassHashTest(name='bob')
b = ClassHashTest.byName('bob')
hashed = hash(b)
b.expire()
b = ClassHashTest.byName('bob')
assert hash(b) == hashed
|
725605cd20b29e200f6aaa90f29053bc623b0e51 | thefuck/rules/unknown_command.py | thefuck/rules/unknown_command.py | import re
from thefuck.utils import replace_command
def match(command):
return (re.search(r"([^:]*): Unknown command.*", command.stderr) != None
and re.search(r"Did you mean ([^?]*)?", command.stderr) != None)
def get_new_command(command):
broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0]
matched = re.findall(r"Did you mean ([^?]*)?", command.stderr)
return replace_command(command, broken_cmd, matched)
| import re
from thefuck.utils import replace_command
def match(command):
return (re.search(r"([^:]*): Unknown command.*", command.stderr) is not None
and re.search(r"Did you mean ([^?]*)?", command.stderr) is not None)
def get_new_command(command):
broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0]
matched = re.findall(r"Did you mean ([^?]*)?", command.stderr)
return replace_command(command, broken_cmd, matched)
| Fix flake8 errors: E711 comparison to None should be 'if cond is not None:' | Fix flake8 errors: E711 comparison to None should be 'if cond is not None:'
| Python | mit | mlk/thefuck,mlk/thefuck,nvbn/thefuck,Clpsplug/thefuck,SimenB/thefuck,nvbn/thefuck,scorphus/thefuck,Clpsplug/thefuck,SimenB/thefuck,scorphus/thefuck | import re
from thefuck.utils import replace_command
def match(command):
- return (re.search(r"([^:]*): Unknown command.*", command.stderr) != None
+ return (re.search(r"([^:]*): Unknown command.*", command.stderr) is not None
- and re.search(r"Did you mean ([^?]*)?", command.stderr) != None)
+ and re.search(r"Did you mean ([^?]*)?", command.stderr) is not None)
def get_new_command(command):
broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0]
matched = re.findall(r"Did you mean ([^?]*)?", command.stderr)
return replace_command(command, broken_cmd, matched)
| Fix flake8 errors: E711 comparison to None should be 'if cond is not None:' | ## Code Before:
import re
from thefuck.utils import replace_command
def match(command):
return (re.search(r"([^:]*): Unknown command.*", command.stderr) != None
and re.search(r"Did you mean ([^?]*)?", command.stderr) != None)
def get_new_command(command):
broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0]
matched = re.findall(r"Did you mean ([^?]*)?", command.stderr)
return replace_command(command, broken_cmd, matched)
## Instruction:
Fix flake8 errors: E711 comparison to None should be 'if cond is not None:'
## Code After:
import re
from thefuck.utils import replace_command
def match(command):
return (re.search(r"([^:]*): Unknown command.*", command.stderr) is not None
and re.search(r"Did you mean ([^?]*)?", command.stderr) is not None)
def get_new_command(command):
broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0]
matched = re.findall(r"Did you mean ([^?]*)?", command.stderr)
return replace_command(command, broken_cmd, matched)
|
27065fd302c20937d44b840472d943ce8aa652e7 | plugins/candela/girder_plugin_candela/__init__.py | plugins/candela/girder_plugin_candela/__init__.py |
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
| Add a plugin displayName property | Add a plugin displayName property
This allows the web client to display an arbitrary plugin title rather
than to be restricted to valid python/javascript tokens.
| Python | apache-2.0 | Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela |
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
+ DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
| Add a plugin displayName property | ## Code Before:
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
## Instruction:
Add a plugin displayName property
## Code After:
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
65b7d1f1eafd32d3895e3ec15a559dca608b5c23 | addons/sale_coupon/models/mail_compose_message.py | addons/sale_coupon/models/mail_compose_message.py |
from odoo import models
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
def send_mail(self, **kwargs):
for wizard in self:
if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids:
self.env[wizard.model].browse(wizard.res_id).state = 'sent'
return super().send_mail(**kwargs)
|
from odoo import models
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
def send_mail(self, **kwargs):
for wizard in self:
if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids:
# Mark coupon as sent in sudo, as helpdesk users don't have the right to write on coupons
self.env[wizard.model].sudo().browse(wizard.res_id).state = 'sent'
return super().send_mail(**kwargs)
| Allow helpdesk users to send coupon by email | [IMP] sale_coupon: Allow helpdesk users to send coupon by email
Purpose
=======
Helpdesk users don't have the right to write on a coupon.
When sending a coupon by email, the coupon is marked as 'sent'.
Allow users to send coupons by executing the state change in sudo.
closes odoo/odoo#45091
Taskid: 2179609
Related: odoo/enterprise#8143
Signed-off-by: Yannick Tivisse (yti) <200a91eb0e5cc4726d6a3430713b580138f34298@odoo.com>
| Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo |
from odoo import models
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
def send_mail(self, **kwargs):
for wizard in self:
if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids:
+ # Mark coupon as sent in sudo, as helpdesk users don't have the right to write on coupons
- self.env[wizard.model].browse(wizard.res_id).state = 'sent'
+ self.env[wizard.model].sudo().browse(wizard.res_id).state = 'sent'
return super().send_mail(**kwargs)
| Allow helpdesk users to send coupon by email | ## Code Before:
from odoo import models
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
def send_mail(self, **kwargs):
for wizard in self:
if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids:
self.env[wizard.model].browse(wizard.res_id).state = 'sent'
return super().send_mail(**kwargs)
## Instruction:
Allow helpdesk users to send coupon by email
## Code After:
from odoo import models
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
def send_mail(self, **kwargs):
for wizard in self:
if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids:
# Mark coupon as sent in sudo, as helpdesk users don't have the right to write on coupons
self.env[wizard.model].sudo().browse(wizard.res_id).state = 'sent'
return super().send_mail(**kwargs)
|
96b554c62fb9449760d423f7420ae75d78998269 | nodeconductor/quotas/handlers.py | nodeconductor/quotas/handlers.py |
def add_quotas_to_scope(sender, instance, created=False, **kwargs):
if created:
from nodeconductor.quotas import models
for quota_name in sender.QUOTAS_NAMES:
models.Quota.objects.create(name=quota_name, scope=instance)
| from django.db.models import signals
def add_quotas_to_scope(sender, instance, created=False, **kwargs):
if created:
from nodeconductor.quotas import models
for quota_name in sender.QUOTAS_NAMES:
models.Quota.objects.create(name=quota_name, scope=instance)
def quantity_quota_handler_fabric(path_to_quota_scope, quota_name, count=1):
"""
Return signal handler that increases or decreases quota usage by <count> on object creation or deletion
:param path_to_quota_scope: path to object with quotas from created object
:param quota_name: name of changed quota
:param count: value, that will be added to quota usage
Example.
This code will add 1 to customer "nc-instances" quotas on instance creation and remove 1 on instance deletion:
.. code-block:: python
# handlers.py:
increase_customer_nc_instances_quota = quotas_handlers.quantity_quota_handler_fabric(
path_to_quota_scope='cloud_project_membership.project.customer',
quota_name='nc-instances',
count=1,
)
# apps.py
signals.post_save.connect(
handlers.increase_customer_nc_instances_quota,
sender=Instance,
dispatch_uid='nodeconductor.iaas.handlers.increase_customer_nc_instances_quota',
)
"""
def handler(sender, instance, **kwargs):
signal = kwargs['signal']
assert signal in (signals.post_save, signals.post_delete), \
'"quantity_quota_handler" can be used only with post_delete or post_save signals'
scope = reduce(getattr, path_to_quota_scope.split("."), instance)
if signal == signals.post_save and kwargs.get('created'):
scope.add_quota_usage(quota_name, count)
elif signal == signals.post_delete:
scope.add_quota_usage(quota_name, -count)
return handler
| Create generic quantity quota handler(saas-217) | Create generic quantity quota handler(saas-217)
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | + from django.db.models import signals
+
def add_quotas_to_scope(sender, instance, created=False, **kwargs):
if created:
from nodeconductor.quotas import models
for quota_name in sender.QUOTAS_NAMES:
models.Quota.objects.create(name=quota_name, scope=instance)
+
+ def quantity_quota_handler_fabric(path_to_quota_scope, quota_name, count=1):
+ """
+ Return signal handler that increases or decreases quota usage by <count> on object creation or deletion
+
+ :param path_to_quota_scope: path to object with quotas from created object
+ :param quota_name: name of changed quota
+ :param count: value, that will be added to quota usage
+
+ Example.
+ This code will add 1 to customer "nc-instances" quotas on instance creation and remove 1 on instance deletion:
+
+ .. code-block:: python
+
+ # handlers.py:
+
+ increase_customer_nc_instances_quota = quotas_handlers.quantity_quota_handler_fabric(
+ path_to_quota_scope='cloud_project_membership.project.customer',
+ quota_name='nc-instances',
+ count=1,
+ )
+
+ # apps.py
+
+ signals.post_save.connect(
+ handlers.increase_customer_nc_instances_quota,
+ sender=Instance,
+ dispatch_uid='nodeconductor.iaas.handlers.increase_customer_nc_instances_quota',
+ )
+
+ """
+ def handler(sender, instance, **kwargs):
+ signal = kwargs['signal']
+ assert signal in (signals.post_save, signals.post_delete), \
+ '"quantity_quota_handler" can be used only with post_delete or post_save signals'
+
+ scope = reduce(getattr, path_to_quota_scope.split("."), instance)
+ if signal == signals.post_save and kwargs.get('created'):
+ scope.add_quota_usage(quota_name, count)
+ elif signal == signals.post_delete:
+ scope.add_quota_usage(quota_name, -count)
+
+ return handler
+ | Create generic quantity quota handler(saas-217) | ## Code Before:
def add_quotas_to_scope(sender, instance, created=False, **kwargs):
if created:
from nodeconductor.quotas import models
for quota_name in sender.QUOTAS_NAMES:
models.Quota.objects.create(name=quota_name, scope=instance)
## Instruction:
Create generic quantity quota handler(saas-217)
## Code After:
from django.db.models import signals
def add_quotas_to_scope(sender, instance, created=False, **kwargs):
if created:
from nodeconductor.quotas import models
for quota_name in sender.QUOTAS_NAMES:
models.Quota.objects.create(name=quota_name, scope=instance)
def quantity_quota_handler_fabric(path_to_quota_scope, quota_name, count=1):
"""
Return signal handler that increases or decreases quota usage by <count> on object creation or deletion
:param path_to_quota_scope: path to object with quotas from created object
:param quota_name: name of changed quota
:param count: value, that will be added to quota usage
Example.
This code will add 1 to customer "nc-instances" quotas on instance creation and remove 1 on instance deletion:
.. code-block:: python
# handlers.py:
increase_customer_nc_instances_quota = quotas_handlers.quantity_quota_handler_fabric(
path_to_quota_scope='cloud_project_membership.project.customer',
quota_name='nc-instances',
count=1,
)
# apps.py
signals.post_save.connect(
handlers.increase_customer_nc_instances_quota,
sender=Instance,
dispatch_uid='nodeconductor.iaas.handlers.increase_customer_nc_instances_quota',
)
"""
def handler(sender, instance, **kwargs):
signal = kwargs['signal']
assert signal in (signals.post_save, signals.post_delete), \
'"quantity_quota_handler" can be used only with post_delete or post_save signals'
scope = reduce(getattr, path_to_quota_scope.split("."), instance)
if signal == signals.post_save and kwargs.get('created'):
scope.add_quota_usage(quota_name, count)
elif signal == signals.post_delete:
scope.add_quota_usage(quota_name, -count)
return handler
|
8be551ad39f3aedff5ea0ceb536378ea0e851864 | src/waldur_auth_openid/management/commands/import_openid_accounts.py | src/waldur_auth_openid/management/commands/import_openid_accounts.py | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import transaction
from waldur_core.core.utils import DryRunCommand
User = get_user_model()
class Command(DryRunCommand):
help_text = 'Append civil number with country code for OpenID users.'
def handle(self, dry_run, *args, **options):
conf = settings.WALDUR_AUTH_OPENID
country_code = conf['COUNTRY_CODE']
registration_method = conf['NAME']
with transaction.atomic():
users = User.objects.filter(registration_method=registration_method)\
.exclude(civil_number__startswith=country_code)\
.exclude(civil_number='') \
.exclude(civil_number=None)
count = users.count()
if not dry_run:
for user in users:
user.civil_number = '%s%s' % (country_code, user.civil_number)
user.save(update_fields=['civil_number'])
self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
| from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import transaction
from waldur_core.core.utils import DryRunCommand
User = get_user_model()
class Command(DryRunCommand):
help_text = 'Append civil number with country code for OpenID users.'
def handle(self, dry_run, *args, **options):
conf = settings.WALDUR_AUTH_OPENID
country_code = conf['COUNTRY_CODE']
registration_method = conf['NAME']
with transaction.atomic():
users = User.objects.filter(registration_method=registration_method)\
.exclude(civil_number__startswith=country_code)\
.exclude(civil_number='') \
.exclude(civil_number=None)
count = users.count()
for user in users:
new_civil_number = '%s%s' % (country_code, user.civil_number)
self.stdout.write('Username: %s, before: %s, after: %s' % (
user.username, user.civil_number, new_civil_number))
if not dry_run:
user.civil_number = new_civil_number
user.save(update_fields=['civil_number'])
self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
| Print out civil_number before and after | Print out civil_number before and after [WAL-2172]
| Python | mit | opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import transaction
from waldur_core.core.utils import DryRunCommand
User = get_user_model()
class Command(DryRunCommand):
help_text = 'Append civil number with country code for OpenID users.'
def handle(self, dry_run, *args, **options):
conf = settings.WALDUR_AUTH_OPENID
country_code = conf['COUNTRY_CODE']
registration_method = conf['NAME']
with transaction.atomic():
users = User.objects.filter(registration_method=registration_method)\
.exclude(civil_number__startswith=country_code)\
.exclude(civil_number='') \
.exclude(civil_number=None)
count = users.count()
- if not dry_run:
- for user in users:
+ for user in users:
- user.civil_number = '%s%s' % (country_code, user.civil_number)
+ new_civil_number = '%s%s' % (country_code, user.civil_number)
+ self.stdout.write('Username: %s, before: %s, after: %s' % (
+ user.username, user.civil_number, new_civil_number))
+ if not dry_run:
+ user.civil_number = new_civil_number
user.save(update_fields=['civil_number'])
self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
| Print out civil_number before and after | ## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import transaction
from waldur_core.core.utils import DryRunCommand
User = get_user_model()
class Command(DryRunCommand):
help_text = 'Append civil number with country code for OpenID users.'
def handle(self, dry_run, *args, **options):
conf = settings.WALDUR_AUTH_OPENID
country_code = conf['COUNTRY_CODE']
registration_method = conf['NAME']
with transaction.atomic():
users = User.objects.filter(registration_method=registration_method)\
.exclude(civil_number__startswith=country_code)\
.exclude(civil_number='') \
.exclude(civil_number=None)
count = users.count()
if not dry_run:
for user in users:
user.civil_number = '%s%s' % (country_code, user.civil_number)
user.save(update_fields=['civil_number'])
self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
## Instruction:
Print out civil_number before and after
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import transaction
from waldur_core.core.utils import DryRunCommand
User = get_user_model()
class Command(DryRunCommand):
help_text = 'Append civil number with country code for OpenID users.'
def handle(self, dry_run, *args, **options):
conf = settings.WALDUR_AUTH_OPENID
country_code = conf['COUNTRY_CODE']
registration_method = conf['NAME']
with transaction.atomic():
users = User.objects.filter(registration_method=registration_method)\
.exclude(civil_number__startswith=country_code)\
.exclude(civil_number='') \
.exclude(civil_number=None)
count = users.count()
for user in users:
new_civil_number = '%s%s' % (country_code, user.civil_number)
self.stdout.write('Username: %s, before: %s, after: %s' % (
user.username, user.civil_number, new_civil_number))
if not dry_run:
user.civil_number = new_civil_number
user.save(update_fields=['civil_number'])
self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
|
53c4d10ecb7a9592f3cdf311ca2ddc5cb52c413c | gitlabform/gitlabform/test/test_project_settings.py | gitlabform/gitlabform/test/test_project_settings.py | import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME
PROJECT_NAME = 'project_settings_project'
GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME
@pytest.fixture(scope="module")
def gitlab(request):
create_group(GROUP_NAME)
create_project_in_group(GROUP_NAME, PROJECT_NAME)
gl = get_gitlab()
def fin():
gl.delete_project(GROUP_AND_PROJECT_NAME)
request.addfinalizer(fin)
return gl # provide fixture value
config_builds_for_private_projects = """
gitlab:
api_version: 4
project_settings:
project_settings:
builds_access_level: private
visibility: private
"""
class TestProjectSettings:
def test__builds_for_private_projects(self, gitlab):
gf = GitLabForm(config_string=config_builds_for_private_projects,
project_or_group=GROUP_AND_PROJECT_NAME)
gf.main()
settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME)
assert settings['builds_access_level'] is 'private'
assert settings['visibility'] is 'private'
| import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME
PROJECT_NAME = 'project_settings_project'
GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME
@pytest.fixture(scope="module")
def gitlab(request):
create_group(GROUP_NAME)
create_project_in_group(GROUP_NAME, PROJECT_NAME)
gl = get_gitlab()
def fin():
gl.delete_project(GROUP_AND_PROJECT_NAME)
request.addfinalizer(fin)
return gl # provide fixture value
config_builds_for_private_projects = """
gitlab:
api_version: 4
project_settings:
project_settings:
builds_access_level: private
visibility: private
"""
class TestProjectSettings:
def test__builds_for_private_projects(self, gitlab):
gf = GitLabForm(config_string=config_builds_for_private_projects,
project_or_group=GROUP_AND_PROJECT_NAME)
gf.main()
settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME)
assert settings['visibility'] is 'private'
# there is no such field in the "Get single project" API :/
#assert settings['builds_access_level'] is 'private'
| Comment out what can't be checked | Comment out what can't be checked
| Python | mit | egnyte/gitlabform,egnyte/gitlabform | import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME
PROJECT_NAME = 'project_settings_project'
GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME
@pytest.fixture(scope="module")
def gitlab(request):
create_group(GROUP_NAME)
create_project_in_group(GROUP_NAME, PROJECT_NAME)
gl = get_gitlab()
def fin():
gl.delete_project(GROUP_AND_PROJECT_NAME)
request.addfinalizer(fin)
return gl # provide fixture value
config_builds_for_private_projects = """
gitlab:
api_version: 4
project_settings:
project_settings:
builds_access_level: private
visibility: private
"""
class TestProjectSettings:
def test__builds_for_private_projects(self, gitlab):
gf = GitLabForm(config_string=config_builds_for_private_projects,
project_or_group=GROUP_AND_PROJECT_NAME)
gf.main()
settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME)
- assert settings['builds_access_level'] is 'private'
assert settings['visibility'] is 'private'
+ # there is no such field in the "Get single project" API :/
+ #assert settings['builds_access_level'] is 'private'
| Comment out what can't be checked | ## Code Before:
import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME
PROJECT_NAME = 'project_settings_project'
GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME
@pytest.fixture(scope="module")
def gitlab(request):
create_group(GROUP_NAME)
create_project_in_group(GROUP_NAME, PROJECT_NAME)
gl = get_gitlab()
def fin():
gl.delete_project(GROUP_AND_PROJECT_NAME)
request.addfinalizer(fin)
return gl # provide fixture value
config_builds_for_private_projects = """
gitlab:
api_version: 4
project_settings:
project_settings:
builds_access_level: private
visibility: private
"""
class TestProjectSettings:
def test__builds_for_private_projects(self, gitlab):
gf = GitLabForm(config_string=config_builds_for_private_projects,
project_or_group=GROUP_AND_PROJECT_NAME)
gf.main()
settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME)
assert settings['builds_access_level'] is 'private'
assert settings['visibility'] is 'private'
## Instruction:
Comment out what can't be checked
## Code After:
import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME
PROJECT_NAME = 'project_settings_project'
GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME
@pytest.fixture(scope="module")
def gitlab(request):
create_group(GROUP_NAME)
create_project_in_group(GROUP_NAME, PROJECT_NAME)
gl = get_gitlab()
def fin():
gl.delete_project(GROUP_AND_PROJECT_NAME)
request.addfinalizer(fin)
return gl # provide fixture value
config_builds_for_private_projects = """
gitlab:
api_version: 4
project_settings:
project_settings:
builds_access_level: private
visibility: private
"""
class TestProjectSettings:
def test__builds_for_private_projects(self, gitlab):
gf = GitLabForm(config_string=config_builds_for_private_projects,
project_or_group=GROUP_AND_PROJECT_NAME)
gf.main()
settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME)
assert settings['visibility'] is 'private'
# there is no such field in the "Get single project" API :/
#assert settings['builds_access_level'] is 'private'
|
e5fb2f327b5ec51cd908e5915ef5415ff2b9dcc3 | stackviz/views/dstat/api.py | stackviz/views/dstat/api.py | from django.http import HttpResponse
from django.views.generic import View
from stackviz import settings
_cached_csv = None
def _load_csv():
global _cached_csv
if _cached_csv:
return _cached_csv
with open(settings.DSTAT_CSV, 'r') as f:
_cached_csv = f.readlines()
return _cached_csv
class DStatCSVEndpoint(View):
def get(self, request):
return HttpResponse(_load_csv(), content_type="text/csv")
| import os
from django.http import HttpResponse, Http404
from django.views.generic import View
from stackviz import settings
_cached_csv = None
def _load_csv():
global _cached_csv
if _cached_csv:
return _cached_csv
try:
with open(settings.DSTAT_CSV, 'r') as f:
_cached_csv = f.readlines()
return _cached_csv
except IOError:
return None
class DStatCSVEndpoint(View):
def get(self, request):
csv = _load_csv()
if not csv:
raise Http404("DStat log not loaded.")
return HttpResponse(csv, content_type="text/csv")
| Return a 404 error when no dstat csv can be loaded | Return a 404 error when no dstat csv can be loaded
| Python | apache-2.0 | openstack/stackviz,timothyb89/stackviz-ng,dklyle/stackviz-ng,timothyb89/stackviz-ng,timothyb89/stackviz-ng,timothyb89/stackviz,timothyb89/stackviz,timothyb89/stackviz,dklyle/stackviz-ng,openstack/stackviz,openstack/stackviz | + import os
+
- from django.http import HttpResponse
+ from django.http import HttpResponse, Http404
from django.views.generic import View
from stackviz import settings
_cached_csv = None
def _load_csv():
global _cached_csv
if _cached_csv:
return _cached_csv
+ try:
- with open(settings.DSTAT_CSV, 'r') as f:
+ with open(settings.DSTAT_CSV, 'r') as f:
- _cached_csv = f.readlines()
+ _cached_csv = f.readlines()
- return _cached_csv
+ return _cached_csv
+ except IOError:
+ return None
class DStatCSVEndpoint(View):
def get(self, request):
- return HttpResponse(_load_csv(), content_type="text/csv")
+ csv = _load_csv()
+ if not csv:
+ raise Http404("DStat log not loaded.")
+
+ return HttpResponse(csv, content_type="text/csv")
+ | Return a 404 error when no dstat csv can be loaded | ## Code Before:
from django.http import HttpResponse
from django.views.generic import View
from stackviz import settings
_cached_csv = None
def _load_csv():
global _cached_csv
if _cached_csv:
return _cached_csv
with open(settings.DSTAT_CSV, 'r') as f:
_cached_csv = f.readlines()
return _cached_csv
class DStatCSVEndpoint(View):
def get(self, request):
return HttpResponse(_load_csv(), content_type="text/csv")
## Instruction:
Return a 404 error when no dstat csv can be loaded
## Code After:
import os
from django.http import HttpResponse, Http404
from django.views.generic import View
from stackviz import settings
_cached_csv = None
def _load_csv():
global _cached_csv
if _cached_csv:
return _cached_csv
try:
with open(settings.DSTAT_CSV, 'r') as f:
_cached_csv = f.readlines()
return _cached_csv
except IOError:
return None
class DStatCSVEndpoint(View):
def get(self, request):
csv = _load_csv()
if not csv:
raise Http404("DStat log not loaded.")
return HttpResponse(csv, content_type="text/csv")
|
fe0d86df9c4be9d33a461578b71c43865f79c715 | tests/builtins/test_input.py | tests/builtins/test_input.py | from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["input"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_list',
'test_set',
'test_str',
'test_tuple',
]
| from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
# class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
# functions = ["input"]
# not_implemented = [
# 'test_bool',
# 'test_bytearray',
# 'test_bytes',
# 'test_class',
# 'test_complex',
# 'test_dict',
# 'test_float',
# 'test_frozenset',
# 'test_int',
# 'test_list',
# 'test_set',
# 'test_str',
# 'test_tuple',
# ]
| Disable builtin tests for input() as it hangs | Disable builtin tests for input() as it hangs
| Python | bsd-3-clause | cflee/voc,Felix5721/voc,ASP1234/voc,cflee/voc,glasnt/voc,ASP1234/voc,glasnt/voc,freakboy3742/voc,freakboy3742/voc,gEt-rIgHt-jR/voc,Felix5721/voc,gEt-rIgHt-jR/voc,pombredanne/voc,pombredanne/voc | from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
- class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
+ # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
- functions = ["input"]
+ # functions = ["input"]
- not_implemented = [
+ # not_implemented = [
- 'test_bool',
+ # 'test_bool',
- 'test_bytearray',
+ # 'test_bytearray',
- 'test_bytes',
+ # 'test_bytes',
- 'test_class',
+ # 'test_class',
- 'test_complex',
+ # 'test_complex',
- 'test_dict',
+ # 'test_dict',
- 'test_float',
+ # 'test_float',
- 'test_frozenset',
+ # 'test_frozenset',
- 'test_int',
+ # 'test_int',
- 'test_list',
+ # 'test_list',
- 'test_set',
+ # 'test_set',
- 'test_str',
+ # 'test_str',
- 'test_tuple',
+ # 'test_tuple',
- ]
+ # ]
| Disable builtin tests for input() as it hangs | ## Code Before:
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["input"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_list',
'test_set',
'test_str',
'test_tuple',
]
## Instruction:
Disable builtin tests for input() as it hangs
## Code After:
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
# class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
# functions = ["input"]
# not_implemented = [
# 'test_bool',
# 'test_bytearray',
# 'test_bytes',
# 'test_class',
# 'test_complex',
# 'test_dict',
# 'test_float',
# 'test_frozenset',
# 'test_int',
# 'test_list',
# 'test_set',
# 'test_str',
# 'test_tuple',
# ]
|
a72468f6988ba3fc5f815b68a07c990809f80864 | main.py | main.py |
import obd
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value) |
import obd
import signal
import sys
#What to do when we receive a signal
def signal_handler(signal, frame):
connection.close()
sys.exit(0)
#Register our signal handler
signal.signal(signal.SIGINT, signal_handler)
#Find and connect OBD adapter
connection = obd.OBD()
while True:
request = connection.query(obd.commands.RPM)
if not request.is_null():
print(request.value) | Handle ctrl+c with signal Fix more typos | Handle ctrl+c with signal
Fix more typos
| Python | mit | ProtaconSolutions/iot-hackday-2015-obd2 |
import obd
+ import signal
+ import sys
+ #What to do when we receive a signal
+ def signal_handler(signal, frame):
+ connection.close()
+ sys.exit(0)
+
+ #Register our signal handler
+ signal.signal(signal.SIGINT, signal_handler)
+
+ #Find and connect OBD adapter
connection = obd.OBD()
- while true:
+ while True:
request = connection.query(obd.commands.RPM)
- if not r.is_null():
+ if not request.is_null():
- print(r.value)
+ print(request.value) | Handle ctrl+c with signal Fix more typos | ## Code Before:
import obd
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
## Instruction:
Handle ctrl+c with signal Fix more typos
## Code After:
import obd
import signal
import sys
#What to do when we receive a signal
def signal_handler(signal, frame):
connection.close()
sys.exit(0)
#Register our signal handler
signal.signal(signal.SIGINT, signal_handler)
#Find and connect OBD adapter
connection = obd.OBD()
while True:
request = connection.query(obd.commands.RPM)
if not request.is_null():
print(request.value) |
8d7657ed52a40070136bbbe3da7069dcbe3fc1c3 | altair/vegalite/v2/examples/stem_and_leaf.py | altair/vegalite/v2/examples/stem_and_leaf.py |
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating Random Data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting Steam and Leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
# Grouping Leafs for each Stem
grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
for key, group in original_data.groupby('stem'):
grouped_data = grouped_data.append({'stem':key,
'leaf': ''.join(group['leaf'].sort_values())},
ignore_index=True)
# Plotting Stems and Leafs
chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
text = 'leaf'
).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20) |
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
original_data.sort_values(by=['stem', 'leaf'], inplace=True)
# Determining position
position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
position = np.hstack([position, [*group.reset_index().index.values]])
original_data['position'] = position + 1
# Creating stem and leaf plot
chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
text = 'leaf:N'
).configure_axis(labelFontSize=20).configure_text(fontSize=20)
| Modify example to calculate leaf position | Modify example to calculate leaf position
| Python | bsd-3-clause | altair-viz/altair,ellisonbg/altair,jakevdp/altair |
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
- # Generating Random Data
+ # Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
- # Splitting Steam and Leaf
+ # Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
- # Grouping Leafs for each Stem
- grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
+ original_data.sort_values(by=['stem', 'leaf'], inplace=True)
+
+ # Determining position
+ position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
- grouped_data = grouped_data.append({'stem':key,
- 'leaf': ''.join(group['leaf'].sort_values())},
- ignore_index=True)
+ position = np.hstack([position, [*group.reset_index().index.values]])
+
+ original_data['position'] = position + 1
- # Plotting Stems and Leafs
+ # Creating stem and leaf plot
- chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
+ chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
- y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
+ y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
+ x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
- text = 'leaf'
+ text = 'leaf:N'
- ).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
+ ).configure_axis(labelFontSize=20).configure_text(fontSize=20)
+ | Modify example to calculate leaf position | ## Code Before:
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating Random Data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting Steam and Leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
# Grouping Leafs for each Stem
grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
for key, group in original_data.groupby('stem'):
grouped_data = grouped_data.append({'stem':key,
'leaf': ''.join(group['leaf'].sort_values())},
ignore_index=True)
# Plotting Stems and Leafs
chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
text = 'leaf'
).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
## Instruction:
Modify example to calculate leaf position
## Code After:
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
original_data.sort_values(by=['stem', 'leaf'], inplace=True)
# Determining position
position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
position = np.hstack([position, [*group.reset_index().index.values]])
original_data['position'] = position + 1
# Creating stem and leaf plot
chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
text = 'leaf:N'
).configure_axis(labelFontSize=20).configure_text(fontSize=20)
|
4c3fee1ebce086d93424592f7145a378c40fd794 | medical_prescription_disease/models/medical_prescription_order_line.py | medical_prescription_disease/models/medical_prescription_order_line.py |
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
required=True,
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
|
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
| Remove required from disease_id in medical_prescription_disease | Remove required from disease_id in medical_prescription_disease
| Python | agpl-3.0 | laslabs/vertical-medical,laslabs/vertical-medical |
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
- required=True,
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
| Remove required from disease_id in medical_prescription_disease | ## Code Before:
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
required=True,
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
## Instruction:
Remove required from disease_id in medical_prescription_disease
## Code After:
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
|
f9b2f8cd60af9b37ad80db10c42b36059ca5a10f | tests/unit/core/migrations_tests.py | tests/unit/core/migrations_tests.py |
import os
from django.test import TestCase
import oscar.apps
class TestMigrations(TestCase):
def check_for_auth_model(self, filepath):
with open(filepath) as f:
s = f.read()
return 'auth.User' in s or 'auth.user' in s
def test_dont_contain_hardcoded_user_model(self):
root_path = os.path.dirname(oscar.apps.__file__)
matches = []
for dir, __, migrations in os.walk(root_path):
if dir.endswith('migrations'):
paths = [os.path.join(dir, migration) for migration in migrations
if migration.endswith('.py')]
matches += filter(self.check_for_auth_model, paths)
if matches:
pretty_matches = '\n'.join(
[match.replace(root_path, '') for match in matches])
self.fail('References to hardcoded User model found in the '
'following migration(s):\n' + pretty_matches)
| import os
import re
from django.test import TestCase
import oscar.apps
class TestMigrations(TestCase):
def setUp(self):
self.root_path = os.path.dirname(oscar.apps.__file__)
self.migration_filenames = []
for path, __, migrations in os.walk(self.root_path):
if path.endswith('migrations'):
paths = [
os.path.join(path, migration) for migration in migrations
if migration.endswith('.py') and migration != '__init__.py']
self.migration_filenames += paths
def test_dont_contain_hardcoded_user_model(self):
def check_for_auth_model(filepath):
with open(filepath) as f:
s = f.read()
return 'auth.User' in s or 'auth.user' in s
matches = filter(check_for_auth_model, self.migration_filenames)
if matches:
pretty_matches = '\n'.join(
[match.replace(self.root_path, '') for match in matches])
self.fail('References to hardcoded User model found in the '
'following migration(s):\n' + pretty_matches)
def test_no_duplicate_migration_numbers(self):
# pull app name and migration number
regexp = re.compile(r'^.+oscar/apps/([\w/]+)/migrations/(\d{4}).+$')
keys = []
for migration in self.migration_filenames:
match = regexp.match(migration)
keys.append(match.group(1) + match.group(2))
self.assertEqual(len(keys), len(set(keys)))
| Add unit test for duplicate migration numbers | Add unit test for duplicate migration numbers
Duplicate migration numbers can happen when merging changes from
different branches. This test ensures that we address the issue right
away.
| Python | bsd-3-clause | django-oscar/django-oscar,django-oscar/django-oscar,Bogh/django-oscar,anentropic/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,nickpack/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,faratro/django-oscar,QLGu/django-oscar,eddiep1101/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,dongguangming/django-oscar,amirrpp/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,adamend/django-oscar,jmt4/django-oscar,thechampanurag/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,bschuon/django-oscar,machtfit/django-oscar,monikasulik/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,itbabu/django-oscar,sonofatailor/django-oscar,pasqualguerrero/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,spartonia/django-oscar,kapari/django-oscar,anentropic/django-oscar,QLGu/django-oscar,manevant/django-oscar,mexeniz/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,jinnykoo/wuyisj.com,manevant/django-oscar,spartonia/django-oscar,nickpack/django-oscar,itbabu/django-oscar,pasqualguerrero/django-oscar,eddiep1101/django-oscar,thechampanurag/django-oscar,jinnykoo/wuyisj,rocopartners/django-oscar,django-oscar/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,jinnykoo/christmas,sasha0/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,mexeniz/django-oscar,michaelkuty/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,kapt/django-oscar,faratro/django-oscar,QLGu/django-oscar,bnprk/django-oscar,eddiep1101/django-oscar,sasha0/django-oscar,faratro/django-oscar,josesanch/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj.com,sasha0/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,saadatqadri/django-oscar,dongguangming/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,taedori81/django-oscar,QLGu/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,john-parton/django-oscar,mexeniz/django-oscar,amirrpp/django-oscar,marcoantoniooliveira/labweb,marcoantoniooliveira/labweb,kapt/django-oscar,josesanch/django-oscar,Jannes123/django-oscar,WillisXChen/django-oscar,binarydud/django-oscar,lijoantony/django-oscar,adamend/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar,machtfit/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,itbabu/django-oscar,john-parton/django-oscar,pdonadeo/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,bschuon/django-oscar,jlmadurga/django-oscar,ademuk/django-oscar,machtfit/django-oscar,jinnykoo/wuyisj,ademuk/django-oscar,pdonadeo/django-oscar,dongguangming/django-oscar,spartonia/django-oscar,kapari/django-oscar,adamend/django-oscar,bnprk/django-oscar,amirrpp/django-oscar,ka7eh/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj,nfletton/django-oscar,WillisXChen/django-oscar,jlmadurga/django-oscar,WadeYuChen/django-oscar,Bogh/django-oscar,nickpack/django-oscar,solarissmoke/django-oscar,Bogh/django-oscar,Bogh/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,kapt/django-oscar,manevant/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,monikasulik/django-oscar,okfish/django-oscar,jinnykoo/christmas,Jannes123/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,michaelkuty/django-oscar,nfletton/django-oscar,lijoantony/django-oscar,thechampanurag/django-oscar,anentropic/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,taedori81/django-oscar,nickpack/django-oscar,josesanch/django-oscar,kapari/django-oscar,lijoantony/django-oscar,MatthewWilkes/django-oscar,thechampanurag/django-oscar,jinnykoo/christmas,taedori81/django-oscar,faratro/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,saadatqadri/django-oscar,bschuon/django-oscar,pdonadeo/django-oscar,ahmetdaglarbas/e-commerce,pasqualguerrero/django-oscar,vovanbo/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,Jannes123/django-oscar,ademuk/django-oscar | -
import os
+ import re
from django.test import TestCase
import oscar.apps
class TestMigrations(TestCase):
- def check_for_auth_model(self, filepath):
- with open(filepath) as f:
- s = f.read()
- return 'auth.User' in s or 'auth.user' in s
+ def setUp(self):
+ self.root_path = os.path.dirname(oscar.apps.__file__)
+ self.migration_filenames = []
+ for path, __, migrations in os.walk(self.root_path):
+ if path.endswith('migrations'):
+ paths = [
+ os.path.join(path, migration) for migration in migrations
+ if migration.endswith('.py') and migration != '__init__.py']
+ self.migration_filenames += paths
def test_dont_contain_hardcoded_user_model(self):
+ def check_for_auth_model(filepath):
+ with open(filepath) as f:
+ s = f.read()
+ return 'auth.User' in s or 'auth.user' in s
+
+ matches = filter(check_for_auth_model, self.migration_filenames)
- root_path = os.path.dirname(oscar.apps.__file__)
- matches = []
- for dir, __, migrations in os.walk(root_path):
- if dir.endswith('migrations'):
- paths = [os.path.join(dir, migration) for migration in migrations
- if migration.endswith('.py')]
- matches += filter(self.check_for_auth_model, paths)
if matches:
pretty_matches = '\n'.join(
- [match.replace(root_path, '') for match in matches])
+ [match.replace(self.root_path, '') for match in matches])
self.fail('References to hardcoded User model found in the '
'following migration(s):\n' + pretty_matches)
+ def test_no_duplicate_migration_numbers(self):
+ # pull app name and migration number
+ regexp = re.compile(r'^.+oscar/apps/([\w/]+)/migrations/(\d{4}).+$')
+ keys = []
+ for migration in self.migration_filenames:
+ match = regexp.match(migration)
+ keys.append(match.group(1) + match.group(2))
+ self.assertEqual(len(keys), len(set(keys)))
+ | Add unit test for duplicate migration numbers | ## Code Before:
import os
from django.test import TestCase
import oscar.apps
class TestMigrations(TestCase):
def check_for_auth_model(self, filepath):
with open(filepath) as f:
s = f.read()
return 'auth.User' in s or 'auth.user' in s
def test_dont_contain_hardcoded_user_model(self):
root_path = os.path.dirname(oscar.apps.__file__)
matches = []
for dir, __, migrations in os.walk(root_path):
if dir.endswith('migrations'):
paths = [os.path.join(dir, migration) for migration in migrations
if migration.endswith('.py')]
matches += filter(self.check_for_auth_model, paths)
if matches:
pretty_matches = '\n'.join(
[match.replace(root_path, '') for match in matches])
self.fail('References to hardcoded User model found in the '
'following migration(s):\n' + pretty_matches)
## Instruction:
Add unit test for duplicate migration numbers
## Code After:
import os
import re
from django.test import TestCase
import oscar.apps
class TestMigrations(TestCase):
def setUp(self):
self.root_path = os.path.dirname(oscar.apps.__file__)
self.migration_filenames = []
for path, __, migrations in os.walk(self.root_path):
if path.endswith('migrations'):
paths = [
os.path.join(path, migration) for migration in migrations
if migration.endswith('.py') and migration != '__init__.py']
self.migration_filenames += paths
def test_dont_contain_hardcoded_user_model(self):
def check_for_auth_model(filepath):
with open(filepath) as f:
s = f.read()
return 'auth.User' in s or 'auth.user' in s
matches = filter(check_for_auth_model, self.migration_filenames)
if matches:
pretty_matches = '\n'.join(
[match.replace(self.root_path, '') for match in matches])
self.fail('References to hardcoded User model found in the '
'following migration(s):\n' + pretty_matches)
def test_no_duplicate_migration_numbers(self):
# pull app name and migration number
regexp = re.compile(r'^.+oscar/apps/([\w/]+)/migrations/(\d{4}).+$')
keys = []
for migration in self.migration_filenames:
match = regexp.match(migration)
keys.append(match.group(1) + match.group(2))
self.assertEqual(len(keys), len(set(keys)))
|
36f4144a01ed56baea9036e4e09a5d90b1c13372 | crits/core/management/commands/mapreduces.py | crits/core/management/commands/mapreduces.py | from django.core.management.base import BaseCommand
import crits.stats.handlers as stats
class Command(BaseCommand):
"""
Script Class.
"""
help = "Runs mapreduces for CRITs."
def handle(self, *args, **options):
"""
Script Execution.
"""
stats.generate_yara_hits()
stats.generate_sources()
stats.generate_filetypes()
stats.generate_filetypes()
stats.generate_campaign_stats()
stats.generate_counts()
stats.target_user_stats()
stats.campaign_date_stats()
| from django.core.management.base import BaseCommand
import crits.stats.handlers as stats
class Command(BaseCommand):
"""
Script Class.
"""
help = "Runs mapreduces for CRITs."
def handle(self, *args, **options):
"""
Script Execution.
"""
stats.generate_yara_hits()
stats.generate_sources()
stats.generate_filetypes()
stats.generate_campaign_stats()
stats.generate_counts()
stats.target_user_stats()
stats.campaign_date_stats()
| Remove duplicate call to generate_filetypes() | Remove duplicate call to generate_filetypes() | Python | mit | Magicked/crits,lakiw/cripts,Magicked/crits,lakiw/cripts,lakiw/cripts,Magicked/crits,Magicked/crits,lakiw/cripts | from django.core.management.base import BaseCommand
import crits.stats.handlers as stats
class Command(BaseCommand):
"""
Script Class.
"""
help = "Runs mapreduces for CRITs."
def handle(self, *args, **options):
"""
Script Execution.
"""
stats.generate_yara_hits()
stats.generate_sources()
stats.generate_filetypes()
- stats.generate_filetypes()
stats.generate_campaign_stats()
stats.generate_counts()
stats.target_user_stats()
stats.campaign_date_stats()
| Remove duplicate call to generate_filetypes() | ## Code Before:
from django.core.management.base import BaseCommand
import crits.stats.handlers as stats
class Command(BaseCommand):
"""
Script Class.
"""
help = "Runs mapreduces for CRITs."
def handle(self, *args, **options):
"""
Script Execution.
"""
stats.generate_yara_hits()
stats.generate_sources()
stats.generate_filetypes()
stats.generate_filetypes()
stats.generate_campaign_stats()
stats.generate_counts()
stats.target_user_stats()
stats.campaign_date_stats()
## Instruction:
Remove duplicate call to generate_filetypes()
## Code After:
from django.core.management.base import BaseCommand
import crits.stats.handlers as stats
class Command(BaseCommand):
"""
Script Class.
"""
help = "Runs mapreduces for CRITs."
def handle(self, *args, **options):
"""
Script Execution.
"""
stats.generate_yara_hits()
stats.generate_sources()
stats.generate_filetypes()
stats.generate_campaign_stats()
stats.generate_counts()
stats.target_user_stats()
stats.campaign_date_stats()
|
027f89292c1d8e334e9e69222d1ec8753020e8bd | candidates/management/commands/candidates_check_for_inconsistent_data.py | candidates/management/commands/candidates_check_for_inconsistent_data.py | from __future__ import print_function, unicode_literals
import sys
from django.core.management.base import BaseCommand
from candidates.models import check_paired_models
class Command(BaseCommand):
def handle(self, *args, **options):
errors = check_paired_models()
if errors:
for error in errors:
print(error)
sys.exit(1)
| from __future__ import print_function, unicode_literals
import sys
from django.core.management.base import BaseCommand
from candidates.models import (
check_paired_models, check_membership_elections_consistent)
class Command(BaseCommand):
def handle(self, *args, **options):
errors = check_paired_models() + check_membership_elections_consistent()
if errors:
for error in errors:
print(error)
sys.exit(1)
| Add check_membership_elections_consistent to the data checking command | Add check_membership_elections_consistent to the data checking command
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | from __future__ import print_function, unicode_literals
import sys
from django.core.management.base import BaseCommand
- from candidates.models import check_paired_models
+ from candidates.models import (
+ check_paired_models, check_membership_elections_consistent)
class Command(BaseCommand):
def handle(self, *args, **options):
- errors = check_paired_models()
+ errors = check_paired_models() + check_membership_elections_consistent()
if errors:
for error in errors:
print(error)
sys.exit(1)
| Add check_membership_elections_consistent to the data checking command | ## Code Before:
from __future__ import print_function, unicode_literals
import sys
from django.core.management.base import BaseCommand
from candidates.models import check_paired_models
class Command(BaseCommand):
def handle(self, *args, **options):
errors = check_paired_models()
if errors:
for error in errors:
print(error)
sys.exit(1)
## Instruction:
Add check_membership_elections_consistent to the data checking command
## Code After:
from __future__ import print_function, unicode_literals
import sys
from django.core.management.base import BaseCommand
from candidates.models import (
check_paired_models, check_membership_elections_consistent)
class Command(BaseCommand):
def handle(self, *args, **options):
errors = check_paired_models() + check_membership_elections_consistent()
if errors:
for error in errors:
print(error)
sys.exit(1)
|
0bff34400d912806a9d831f5e0436082d359a531 | tomviz/python/tomviz/state/_pipeline.py | tomviz/python/tomviz/state/_pipeline.py | from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
# Need to define a constructor as the implementation on the C++ side is
# static.
def __init__(self):
pass
def __call__(cls):
if cls._instance is None:
cls._instance = super(PipelineStateManager, cls).__call__()
return cls._instances | from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = PipelineStateManagerBase.__new__(cls, *args, **kwargs)
return cls._instance
| Fix singleton to work with wrapped manager class | Fix singleton to work with wrapped manager class
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>
| Python | bsd-3-clause | OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz | from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
+ def __new__(cls, *args, **kwargs):
+ if cls._instance is None:
+ cls._instance = PipelineStateManagerBase.__new__(cls, *args, **kwargs)
- # Need to define a constructor as the implementation on the C++ side is
- # static.
- def __init__(self):
- pass
+ return cls._instance
- def __call__(cls):
- if cls._instance is None:
- cls._instance = super(PipelineStateManager, cls).__call__()
- return cls._instances | Fix singleton to work with wrapped manager class | ## Code Before:
from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
# Need to define a constructor as the implementation on the C++ side is
# static.
def __init__(self):
pass
def __call__(cls):
if cls._instance is None:
cls._instance = super(PipelineStateManager, cls).__call__()
return cls._instances
## Instruction:
Fix singleton to work with wrapped manager class
## Code After:
from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = PipelineStateManagerBase.__new__(cls, *args, **kwargs)
return cls._instance
|
cbdcdf16285823a8e13a68c8e86d6957aa7aa6d8 | kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py | kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py | import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
| import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
| Fix GST_PLUGIN_PATH in runtime hook | Fix GST_PLUGIN_PATH in runtime hook
- Only include `gst-plugins`
- Also, semicolon was only correct on Windows
| Python | mit | inclement/kivy,inclement/kivy,kivy/kivy,kivy/kivy,akshayaurora/kivy,akshayaurora/kivy,kivy/kivy,matham/kivy,rnixx/kivy,matham/kivy,inclement/kivy,matham/kivy,matham/kivy,rnixx/kivy,akshayaurora/kivy,rnixx/kivy | import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
+ os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
- os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
- sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
| Fix GST_PLUGIN_PATH in runtime hook | ## Code Before:
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
## Instruction:
Fix GST_PLUGIN_PATH in runtime hook
## Code After:
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
|
f22cabf494f13535cdbb489f12e98c7358a29f74 | openstack/tests/functional/telemetry/v2/test_sample.py | openstack/tests/functional/telemetry/v2/test_sample.py |
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
sot = next(self.conn.telemetry.samples(meter))
assert isinstance(sot, sample.Sample)
|
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
for sot in self.conn.telemetry.samples(meter):
assert isinstance(sot, sample.Sample)
| Fix the telemetry sample test | Fix the telemetry sample test
This test works fine on devstack, but on the test gate not all
the meters have samples, so only iterate over them if there are
samples.
Partial-bug: #1665495
Change-Id: I8f327737a53194aeba08925391f1976f1b506aa0
| Python | apache-2.0 | dtroyer/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk |
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
- sot = next(self.conn.telemetry.samples(meter))
+ for sot in self.conn.telemetry.samples(meter):
- assert isinstance(sot, sample.Sample)
+ assert isinstance(sot, sample.Sample)
| Fix the telemetry sample test | ## Code Before:
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
sot = next(self.conn.telemetry.samples(meter))
assert isinstance(sot, sample.Sample)
## Instruction:
Fix the telemetry sample test
## Code After:
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
for sot in self.conn.telemetry.samples(meter):
assert isinstance(sot, sample.Sample)
|
618a1f520f2584ec3cf56b29cf71c9ad6b4240fd | tests/acceptance/assignments/one_second_timeout/correct_solution/sleep.py | tests/acceptance/assignments/one_second_timeout/correct_solution/sleep.py | from time import sleep
sleep(1)
| from time import sleep
# Due to the overhead of Python, sleeping for 1 second will cause testing to
# time out if the timeout is 1 second
sleep(1)
| Add comment to one_second_timeout assignment | Add comment to one_second_timeout assignment
| Python | agpl-3.0 | git-keeper/git-keeper,git-keeper/git-keeper | from time import sleep
+ # Due to the overhead of Python, sleeping for 1 second will cause testing to
+ # time out if the timeout is 1 second
sleep(1)
| Add comment to one_second_timeout assignment | ## Code Before:
from time import sleep
sleep(1)
## Instruction:
Add comment to one_second_timeout assignment
## Code After:
from time import sleep
# Due to the overhead of Python, sleeping for 1 second will cause testing to
# time out if the timeout is 1 second
sleep(1)
|
008f0a2b0a7823e619410c5af70061d093c6f3de | timeseries.py | timeseries.py |
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
x.append(obj['timestamp'])
autoscale(True, 'both')
plot(x, y, label = y_key)
legend(loc='upper left')
show()
if __name__ == "__main__":
main()
|
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
parser.add_argument("-x",
help = "the key to use for the function being plotted",
default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
if x_key is None:
x.append(obj['timestamp'])
if obj['name'] == x_key:
x.append(obj['value'])
autoscale(True, 'both')
xlabel(x_key or 'timestamp')
ylabel(y_key)
plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
| Allow plotting two types against one another. | Allow plotting two types against one another.
| Python | bsd-3-clause | openxc/openxc-data-tools |
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
+ parser.add_argument("-x",
+ help = "the key to use for the function being plotted",
+ default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
+ x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
+ if x_key is None:
- x.append(obj['timestamp'])
+ x.append(obj['timestamp'])
+ if obj['name'] == x_key:
+ x.append(obj['value'])
autoscale(True, 'both')
- plot(x, y, label = y_key)
- legend(loc='upper left')
+ xlabel(x_key or 'timestamp')
+ ylabel(y_key)
+ plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
| Allow plotting two types against one another. | ## Code Before:
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
x.append(obj['timestamp'])
autoscale(True, 'both')
plot(x, y, label = y_key)
legend(loc='upper left')
show()
if __name__ == "__main__":
main()
## Instruction:
Allow plotting two types against one another.
## Code After:
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
parser.add_argument("-x",
help = "the key to use for the function being plotted",
default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
if x_key is None:
x.append(obj['timestamp'])
if obj['name'] == x_key:
x.append(obj['value'])
autoscale(True, 'both')
xlabel(x_key or 'timestamp')
ylabel(y_key)
plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
7481c6aad4cd844b0c3fab6f05e4d24aa3c17770 | src/nodeconductor_assembly_waldur/invoices/log.py | src/nodeconductor_assembly_waldur/invoices/log.py | from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_logger.register('invoice', InvoiceLogger)
| from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_groups = {
'customers': event_types,
'invoices': event_types,
}
event_logger.register('invoice', InvoiceLogger)
| Define groups for the invoice events. | Define groups for the invoice events.
- wal-202
| Python | mit | opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
+ event_groups = {
+ 'customers': event_types,
+ 'invoices': event_types,
+ }
+
event_logger.register('invoice', InvoiceLogger)
| Define groups for the invoice events. | ## Code Before:
from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_logger.register('invoice', InvoiceLogger)
## Instruction:
Define groups for the invoice events.
## Code After:
from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_groups = {
'customers': event_types,
'invoices': event_types,
}
event_logger.register('invoice', InvoiceLogger)
|
6618b12cef2759174148d1c7f69cbb91b8ea4482 | mygpo/podcasts/migrations/0015_auto_20140616_2126.py | mygpo/podcasts/migrations/0015_auto_20140616_2126.py | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
def set_scope(apps, schema_editor):
URL = apps.get_model('podcasts', 'URL')
Slug = apps.get_model('podcasts', 'Slug')
URL.objects.filter(scope__isnull=True).update(scope='')
Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.RunPython(set_scope),
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
]
| Fix data migration when making scope non-null | [DB] Fix data migration when making scope non-null
| Python | agpl-3.0 | gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo | from __future__ import unicode_literals
from django.db import models, migrations
+
+
+ def set_scope(apps, schema_editor):
+ URL = apps.get_model('podcasts', 'URL')
+ Slug = apps.get_model('podcasts', 'Slug')
+
+ URL.objects.filter(scope__isnull=True).update(scope='')
+ Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
- field=models.CharField(db_index=True, max_length=32, blank=True),
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
- field=models.CharField(db_index=True, max_length=32, blank=True),
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
+ migrations.RunPython(set_scope),
+ migrations.AlterField(
+ model_name='slug',
+ name='scope',
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
+ ),
+ migrations.AlterField(
+ model_name='url',
+ name='scope',
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
+ ),
+
]
| Fix data migration when making scope non-null | ## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
]
## Instruction:
Fix data migration when making scope non-null
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
def set_scope(apps, schema_editor):
URL = apps.get_model('podcasts', 'URL')
Slug = apps.get_model('podcasts', 'Slug')
URL.objects.filter(scope__isnull=True).update(scope='')
Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.RunPython(set_scope),
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
]
|
d2a0d0d22a8369c99626ca754a337ea8076f7efa | aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py | aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py |
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight', sa.Integer(),
nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
| Fix bug in migration script | Fix bug in migration script
| Python | apache-2.0 | asidev/aybu-core |
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
- op.add_column('files', sa.Column('weight', sa.Integer(),
+ op.add_column('files', sa.Column('weight',
+ sa.Integer(),
+ nullable=True,
- nullable=False, default=0))
+ default=0))
- ### end Alembic commands ###
+ connection = op.get_bind()
+ connection.execute('UPDATE files SET weight=0')
+ op.alter_column('files',
+ 'weight',
+ existing_type=sa.Integer,
+ nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
| Fix bug in migration script | ## Code Before:
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight', sa.Integer(),
nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
## Instruction:
Fix bug in migration script
## Code After:
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
29c437e15f7793886c80b71ca6764184caff2597 | readthedocs/oauth/management/commands/load_project_remote_repo_relation.py | readthedocs/oauth/management/commands/load_project_remote_repo_relation.py | import json
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
class Command(BaseCommand):
help = "Load Project and RemoteRepository Relationship from JSON file"
def add_arguments(self, parser):
# File path of the json file containing relationship data
parser.add_argument(
'--file',
required=True,
nargs=1,
type=str,
help='File path of the json file containing relationship data.',
)
def handle(self, *args, **options):
file = options.get('file')[0]
try:
# Load data from the json file
with open(file, 'r') as f:
data = json.load(f)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f'Exception occurred while trying to load the file "{file}". '
f'Exception: {e}.'
)
)
return
for item in data:
try:
RemoteRepository.objects.filter(
remote_id=item['remote_id']
).update(project_id=item['project_id'])
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Exception occurred while trying to update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"username: {item['username']}, Exception: {e}."
)
)
| import json
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
class Command(BaseCommand):
help = "Load Project and RemoteRepository Relationship from JSON file"
def add_arguments(self, parser):
# File path of the json file containing relationship data
parser.add_argument(
'--file',
required=True,
nargs=1,
type=str,
help='File path of the json file containing relationship data.',
)
def handle(self, *args, **options):
file = options.get('file')[0]
try:
# Load data from the json file
with open(file, 'r') as f:
data = json.load(f)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f'Exception occurred while trying to load the file "{file}". '
f'Exception: {e}.'
)
)
return
for item in data:
try:
update_count = RemoteRepository.objects.filter(
remote_id=item['remote_id']
).update(project_id=item['project_id'])
if update_count < 1:
self.stdout.write(
self.style.ERROR(
f"Could not update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"remote_id {item['remote_id']}, "
f"username: {item['username']}."
)
)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Exception occurred while trying to update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"username: {item['username']}, Exception: {e}."
)
)
| Check if the remote_repo was updated or not and log error | Check if the remote_repo was updated or not and log error
| Python | mit | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | import json
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
class Command(BaseCommand):
help = "Load Project and RemoteRepository Relationship from JSON file"
def add_arguments(self, parser):
# File path of the json file containing relationship data
parser.add_argument(
'--file',
required=True,
nargs=1,
type=str,
help='File path of the json file containing relationship data.',
)
def handle(self, *args, **options):
file = options.get('file')[0]
try:
# Load data from the json file
with open(file, 'r') as f:
data = json.load(f)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f'Exception occurred while trying to load the file "{file}". '
f'Exception: {e}.'
)
)
return
for item in data:
try:
- RemoteRepository.objects.filter(
+ update_count = RemoteRepository.objects.filter(
remote_id=item['remote_id']
).update(project_id=item['project_id'])
+
+ if update_count < 1:
+ self.stdout.write(
+ self.style.ERROR(
+ f"Could not update {item['slug']}'s "
+ f"relationship with {item['html_url']}, "
+ f"remote_id {item['remote_id']}, "
+ f"username: {item['username']}."
+ )
+ )
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Exception occurred while trying to update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"username: {item['username']}, Exception: {e}."
)
)
| Check if the remote_repo was updated or not and log error | ## Code Before:
import json
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
class Command(BaseCommand):
help = "Load Project and RemoteRepository Relationship from JSON file"
def add_arguments(self, parser):
# File path of the json file containing relationship data
parser.add_argument(
'--file',
required=True,
nargs=1,
type=str,
help='File path of the json file containing relationship data.',
)
def handle(self, *args, **options):
file = options.get('file')[0]
try:
# Load data from the json file
with open(file, 'r') as f:
data = json.load(f)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f'Exception occurred while trying to load the file "{file}". '
f'Exception: {e}.'
)
)
return
for item in data:
try:
RemoteRepository.objects.filter(
remote_id=item['remote_id']
).update(project_id=item['project_id'])
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Exception occurred while trying to update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"username: {item['username']}, Exception: {e}."
)
)
## Instruction:
Check if the remote_repo was updated or not and log error
## Code After:
import json
from django.core.management.base import BaseCommand
from readthedocs.oauth.models import RemoteRepository
class Command(BaseCommand):
help = "Load Project and RemoteRepository Relationship from JSON file"
def add_arguments(self, parser):
# File path of the json file containing relationship data
parser.add_argument(
'--file',
required=True,
nargs=1,
type=str,
help='File path of the json file containing relationship data.',
)
def handle(self, *args, **options):
file = options.get('file')[0]
try:
# Load data from the json file
with open(file, 'r') as f:
data = json.load(f)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f'Exception occurred while trying to load the file "{file}". '
f'Exception: {e}.'
)
)
return
for item in data:
try:
update_count = RemoteRepository.objects.filter(
remote_id=item['remote_id']
).update(project_id=item['project_id'])
if update_count < 1:
self.stdout.write(
self.style.ERROR(
f"Could not update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"remote_id {item['remote_id']}, "
f"username: {item['username']}."
)
)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Exception occurred while trying to update {item['slug']}'s "
f"relationship with {item['html_url']}, "
f"username: {item['username']}, Exception: {e}."
)
)
|
dd42c1c1b1cd0cbe55c27cafe9d2db5466782bc4 | server/users-microservice/src/api/users/userModel.py | server/users-microservice/src/api/users/userModel.py | from index import db
class UserModel(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.String(80), unique=True, nullable=False)
fullname = db.Column(db.String(80), unique=True, nullable=False)
initials = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(80), unique=True, nullable=False)
application = db.Column(db.String(80), unique=True, nullable=False)
def __init__(self, name, fullname, initials, email, password, application):
self.name = name
self.fullname = fullname
self.initials = initials
self.email = email
self.password = password
self.application = application
def __repr__(self):
return self.name | from index import db, brcypt
class UserModel(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.String(80), unique=True, nullable=False)
fullname = db.Column(db.String(80), unique=True, nullable=False)
initials = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(80), unique=True, nullable=False)
application = db.Column(db.String(80), unique=True, nullable=False)
def __init__(self, name, fullname, initials, email, password, application):
self.name = name
self.fullname = fullname
self.initials = initials
self.email = email
self.application = application
self.set_password(password)
def __repr__(self):
return self.name
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def check_password(self, password):
return bcrypt.check_password_hash(self.password, password) | Encrypt password before saving user | Encrypt password before saving user
| Python | mit | Madmous/Trello-Clone,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/Trello-Clone,Madmous/Trello-Clone | - from index import db
+ from index import db, brcypt
class UserModel(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.String(80), unique=True, nullable=False)
fullname = db.Column(db.String(80), unique=True, nullable=False)
initials = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(80), unique=True, nullable=False)
application = db.Column(db.String(80), unique=True, nullable=False)
def __init__(self, name, fullname, initials, email, password, application):
self.name = name
self.fullname = fullname
self.initials = initials
self.email = email
- self.password = password
self.application = application
+
+ self.set_password(password)
def __repr__(self):
return self.name
+
+ def set_password(self, password):
+ self.password = bcrypt.generate_password_hash(password)
+
+ def check_password(self, password):
+ return bcrypt.check_password_hash(self.password, password) | Encrypt password before saving user | ## Code Before:
from index import db
class UserModel(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.String(80), unique=True, nullable=False)
fullname = db.Column(db.String(80), unique=True, nullable=False)
initials = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(80), unique=True, nullable=False)
application = db.Column(db.String(80), unique=True, nullable=False)
def __init__(self, name, fullname, initials, email, password, application):
self.name = name
self.fullname = fullname
self.initials = initials
self.email = email
self.password = password
self.application = application
def __repr__(self):
return self.name
## Instruction:
Encrypt password before saving user
## Code After:
from index import db, brcypt
class UserModel(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.String(80), unique=True, nullable=False)
fullname = db.Column(db.String(80), unique=True, nullable=False)
initials = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(80), unique=True, nullable=False)
application = db.Column(db.String(80), unique=True, nullable=False)
def __init__(self, name, fullname, initials, email, password, application):
self.name = name
self.fullname = fullname
self.initials = initials
self.email = email
self.application = application
self.set_password(password)
def __repr__(self):
return self.name
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def check_password(self, password):
return bcrypt.check_password_hash(self.password, password) |
17ab8c01a88bda8dba4aaa5e57c857babfeb9444 | debtcollector/fixtures/disable.py | debtcollector/fixtures/disable.py |
from __future__ import absolute_import
import fixtures
from debtcollector import _utils
class DisableFixture(fixtures.Fixture):
"""Fixture that disables debtcollector triggered warnings.
This does **not** disable warnings calls emitted by other libraries.
This can be used like::
from debtcollector.fixtures import disable
with disable.DisableFixture():
<some code that calls into depreciated code>
"""
def _setUp(self):
self.addCleanup(setattr, _utils, "_enabled", True)
_utils._enabled = False
|
import fixtures
from debtcollector import _utils
class DisableFixture(fixtures.Fixture):
"""Fixture that disables debtcollector triggered warnings.
This does **not** disable warnings calls emitted by other libraries.
This can be used like::
from debtcollector.fixtures import disable
with disable.DisableFixture():
<some code that calls into depreciated code>
"""
def _setUp(self):
self.addCleanup(setattr, _utils, "_enabled", True)
_utils._enabled = False
| Stop to use the __future__ module. | Stop to use the __future__ module.
The __future__ module [1] was used in this context to ensure compatibility
between python 2 and python 3.
We previously dropped the support of python 2.7 [2] and now we only support
python 3 so we don't need to continue to use this module and the imports
listed below.
Imports commonly used and their related PEPs:
- `division` is related to PEP 238 [3]
- `print_function` is related to PEP 3105 [4]
- `unicode_literals` is related to PEP 3112 [5]
- `with_statement` is related to PEP 343 [6]
- `absolute_import` is related to PEP 328 [7]
[1] https://docs.python.org/3/library/__future__.html
[2] https://governance.openstack.org/tc/goals/selected/ussuri/drop-py27.html
[3] https://www.python.org/dev/peps/pep-0238
[4] https://www.python.org/dev/peps/pep-3105
[5] https://www.python.org/dev/peps/pep-3112
[6] https://www.python.org/dev/peps/pep-0343
[7] https://www.python.org/dev/peps/pep-0328
Change-Id: I2b2f006e0ec145730bec843add4147345797b920
| Python | apache-2.0 | openstack/debtcollector | -
- from __future__ import absolute_import
import fixtures
from debtcollector import _utils
class DisableFixture(fixtures.Fixture):
"""Fixture that disables debtcollector triggered warnings.
This does **not** disable warnings calls emitted by other libraries.
This can be used like::
from debtcollector.fixtures import disable
with disable.DisableFixture():
<some code that calls into depreciated code>
"""
def _setUp(self):
self.addCleanup(setattr, _utils, "_enabled", True)
_utils._enabled = False
| Stop to use the __future__ module. | ## Code Before:
from __future__ import absolute_import
import fixtures
from debtcollector import _utils
class DisableFixture(fixtures.Fixture):
"""Fixture that disables debtcollector triggered warnings.
This does **not** disable warnings calls emitted by other libraries.
This can be used like::
from debtcollector.fixtures import disable
with disable.DisableFixture():
<some code that calls into depreciated code>
"""
def _setUp(self):
self.addCleanup(setattr, _utils, "_enabled", True)
_utils._enabled = False
## Instruction:
Stop to use the __future__ module.
## Code After:
import fixtures
from debtcollector import _utils
class DisableFixture(fixtures.Fixture):
"""Fixture that disables debtcollector triggered warnings.
This does **not** disable warnings calls emitted by other libraries.
This can be used like::
from debtcollector.fixtures import disable
with disable.DisableFixture():
<some code that calls into depreciated code>
"""
def _setUp(self):
self.addCleanup(setattr, _utils, "_enabled", True)
_utils._enabled = False
|
5aba92fff0303546be0850f786a25659453674a6 | masters/master.chromium.webkit/master_source_cfg.py | masters/master.chromium.webkit/master_source_cfg.py |
from buildbot.changes import svnpoller
from buildbot.scheduler import AnyBranchScheduler
from common import chromium_utils
from master import build_utils
from master import gitiles_poller
def WebkitFileSplitter(path):
"""split_file for webkit.org repository."""
projects = ['trunk']
return build_utils.SplitPath(projects, path)
def Update(config, _active_master, c):
# Polls config.Master.trunk_url for changes
cr_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/chromium/src',
pollInterval=30, project='chromium')
c['change_source'].append(cr_poller)
webkit_url = 'http://src.chromium.org/viewvc/blink?view=rev&revision=%s'
webkit_poller = svnpoller.SVNPoller(
svnurl=config.Master.webkit_root_url,
svnbin=chromium_utils.SVN_BIN,
split_file=WebkitFileSplitter,
pollinterval=30,
revlinktmpl=webkit_url,
cachepath='webkit.svnrev',
project='webkit')
c['change_source'].append(webkit_poller)
c['schedulers'].append(AnyBranchScheduler(
name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60,
builderNames=[]))
c['schedulers'].append(AnyBranchScheduler(
name='global_deps_scheduler', branches=['master'], treeStableTimer=60,
builderNames=[]))
|
from buildbot.scheduler import AnyBranchScheduler
from master import gitiles_poller
def Update(config, _active_master, c):
# Polls config.Master.trunk_url for changes
cr_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/chromium/src',
pollInterval=30, project='chromium')
c['change_source'].append(cr_poller)
c['schedulers'].append(AnyBranchScheduler(
name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60,
builderNames=[]))
| Remove blink scheduler from chromium.webkit | Remove blink scheduler from chromium.webkit
For context, please see:
https://groups.google.com/a/chromium.org/d/msg/blink-dev/S-P3N0kdkMM/ohfRyTNyAwAJ
https://groups.google.com/a/chromium.org/d/msg/blink-dev/3APcgCM52JQ/OyqNugnFAAAJ
BUG=431478
Review URL: https://codereview.chromium.org/1351623005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@296754 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
- from buildbot.changes import svnpoller
from buildbot.scheduler import AnyBranchScheduler
- from common import chromium_utils
-
- from master import build_utils
from master import gitiles_poller
- def WebkitFileSplitter(path):
- """split_file for webkit.org repository."""
- projects = ['trunk']
- return build_utils.SplitPath(projects, path)
def Update(config, _active_master, c):
# Polls config.Master.trunk_url for changes
cr_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/chromium/src',
pollInterval=30, project='chromium')
c['change_source'].append(cr_poller)
- webkit_url = 'http://src.chromium.org/viewvc/blink?view=rev&revision=%s'
- webkit_poller = svnpoller.SVNPoller(
- svnurl=config.Master.webkit_root_url,
- svnbin=chromium_utils.SVN_BIN,
- split_file=WebkitFileSplitter,
- pollinterval=30,
- revlinktmpl=webkit_url,
- cachepath='webkit.svnrev',
- project='webkit')
- c['change_source'].append(webkit_poller)
-
c['schedulers'].append(AnyBranchScheduler(
name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60,
builderNames=[]))
- c['schedulers'].append(AnyBranchScheduler(
- name='global_deps_scheduler', branches=['master'], treeStableTimer=60,
- builderNames=[]))
- | Remove blink scheduler from chromium.webkit | ## Code Before:
from buildbot.changes import svnpoller
from buildbot.scheduler import AnyBranchScheduler
from common import chromium_utils
from master import build_utils
from master import gitiles_poller
def WebkitFileSplitter(path):
"""split_file for webkit.org repository."""
projects = ['trunk']
return build_utils.SplitPath(projects, path)
def Update(config, _active_master, c):
# Polls config.Master.trunk_url for changes
cr_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/chromium/src',
pollInterval=30, project='chromium')
c['change_source'].append(cr_poller)
webkit_url = 'http://src.chromium.org/viewvc/blink?view=rev&revision=%s'
webkit_poller = svnpoller.SVNPoller(
svnurl=config.Master.webkit_root_url,
svnbin=chromium_utils.SVN_BIN,
split_file=WebkitFileSplitter,
pollinterval=30,
revlinktmpl=webkit_url,
cachepath='webkit.svnrev',
project='webkit')
c['change_source'].append(webkit_poller)
c['schedulers'].append(AnyBranchScheduler(
name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60,
builderNames=[]))
c['schedulers'].append(AnyBranchScheduler(
name='global_deps_scheduler', branches=['master'], treeStableTimer=60,
builderNames=[]))
## Instruction:
Remove blink scheduler from chromium.webkit
## Code After:
from buildbot.scheduler import AnyBranchScheduler
from master import gitiles_poller
def Update(config, _active_master, c):
# Polls config.Master.trunk_url for changes
cr_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/chromium/src',
pollInterval=30, project='chromium')
c['change_source'].append(cr_poller)
c['schedulers'].append(AnyBranchScheduler(
name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60,
builderNames=[]))
|
659614a6b845a95ce7188e86adae4bdc2c5416e7 | examples/benchmark/__init__.py | examples/benchmark/__init__.py | import benchmark_twisted_names
__all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
| import benchmark_fibonacci
import benchmark_twisted_names
__all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
| Add back commented out Fibonacci benchmark. | Add back commented out Fibonacci benchmark.
| Python | mit | AlekSi/benchmarking-py | + import benchmark_fibonacci
import benchmark_twisted_names
__all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
| Add back commented out Fibonacci benchmark. | ## Code Before:
import benchmark_twisted_names
__all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
## Instruction:
Add back commented out Fibonacci benchmark.
## Code After:
import benchmark_fibonacci
import benchmark_twisted_names
__all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
|
aaaaa3a143c370f387edf42ebd6b22c924845afa | falcom/luhn/check_digit_number.py | falcom/luhn/check_digit_number.py |
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def generate_from_int (self, n):
raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
| Make it clear that the user must implement generate_from_int | Make it clear that the user must implement generate_from_int
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation |
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
+
+ def generate_from_int (self, n):
+ raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
| Make it clear that the user must implement generate_from_int | ## Code Before:
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
## Instruction:
Make it clear that the user must implement generate_from_int
## Code After:
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def generate_from_int (self, n):
raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
0dcecfbd1e6ce9e35febc9f4ee9bcbfac1fb8f6a | hytra/util/skimage_tifffile_hack.py | hytra/util/skimage_tifffile_hack.py | from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(name),
what will now behave well.
"""
name = []; path = str()
for i in input_tif:
name.append(i.split('/')[-1])
path_split = list(input_tif)[0].split('/')[0:-1]
for i in path_split:
path += i+'/'
return path, name | from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
import os.path
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(names),
what will now behave well.
"""
assert len(input_tif) > 0
names = []
path = str()
for i in input_tif:
names.append(os.path.basename(i))
path = os.path.dirname(input_tif[0])
return path, names | Fix tiffile hack to use os.path | Fix tiffile hack to use os.path
| Python | mit | chaubold/hytra,chaubold/hytra,chaubold/hytra | from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
+ import os.path
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
- Then, one can os.chdir(path) and call tifffile.imread(name),
+ Then, one can os.chdir(path) and call tifffile.imread(names),
what will now behave well.
"""
- name = []; path = str()
+ assert len(input_tif) > 0
+ names = []
+ path = str()
for i in input_tif:
+ names.append(os.path.basename(i))
+ path = os.path.dirname(input_tif[0])
- name.append(i.split('/')[-1])
- path_split = list(input_tif)[0].split('/')[0:-1]
- for i in path_split:
- path += i+'/'
- return path, name
+ return path, names | Fix tiffile hack to use os.path | ## Code Before:
from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(name),
what will now behave well.
"""
name = []; path = str()
for i in input_tif:
name.append(i.split('/')[-1])
path_split = list(input_tif)[0].split('/')[0:-1]
for i in path_split:
path += i+'/'
return path, name
## Instruction:
Fix tiffile hack to use os.path
## Code After:
from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
import os.path
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(names),
what will now behave well.
"""
assert len(input_tif) > 0
names = []
path = str()
for i in input_tif:
names.append(os.path.basename(i))
path = os.path.dirname(input_tif[0])
return path, names |
f48063cfb9674c1e5f1f94e62ff43b239f687abd | examples/plot_tot_histogram.py | examples/plot_tot_histogram.py |
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import pandas as pd
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
hits = pd.read_hdf(filename, 'hits', mode='r')
hits.hist("tot", bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
with tb.File(filename) as f:
tots = f.get_node("/hits/tot")[:]
plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
| Fix for new km3hdf5 version 4 | Fix for new km3hdf5 version 4
| Python | mit | tamasgal/km3pipe,tamasgal/km3pipe |
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
- import pandas as pd
+ import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
- hits = pd.read_hdf(filename, 'hits', mode='r')
+ with tb.File(filename) as f:
+ tots = f.get_node("/hits/tot")[:]
+
- hits.hist("tot", bins=254, log=True, edgecolor='none')
+ plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
| Fix for new km3hdf5 version 4 | ## Code Before:
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import pandas as pd
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
hits = pd.read_hdf(filename, 'hits', mode='r')
hits.hist("tot", bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
## Instruction:
Fix for new km3hdf5 version 4
## Code After:
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
with tb.File(filename) as f:
tots = f.get_node("/hits/tot")[:]
plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
f0b188f398d82b000fdaa40e0aa776520a962a65 | integration_tests/testpyagglom.py | integration_tests/testpyagglom.py | import sys
import platform
import h5py
import numpy
segh5 = sys.argv[1]
predh5 = sys.argv[2]
classifier = sys.argv[3]
threshold = float(sys.argv[4])
from neuroproof import Agglomeration
# open as uint32 and float respectively
seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32)
pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32)
pred = pred.transpose((2,1,0,3))
pred = pred.copy()
res = Agglomeration.agglomerate(seg, pred, classifier, threshold)
# The 'golden' results depend on std::unordered, and therefore
# the expected answer is different on Mac and Linux.
if platform.system() == "Darwin":
expected_unique = 239
else:
expected_unique = 233
result_unique = len(numpy.unique(res))
assert result_unique == expected_unique, \
"Expected {} unique labels (including 0) in the resulting segmentation, but got {}"\
.format(expected_unique, len(numpy.unique(res)))
print("SUCCESS")
| import sys
import platform
import h5py
import numpy
segh5 = sys.argv[1]
predh5 = sys.argv[2]
classifier = sys.argv[3]
threshold = float(sys.argv[4])
from neuroproof import Agglomeration
# open as uint32 and float respectively
seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32)
pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32)
pred = pred.transpose((2,1,0,3))
pred = pred.copy()
res = Agglomeration.agglomerate(seg, pred, classifier, threshold)
# The 'golden' results depend on std::unordered, and therefore
# the expected answer is different on Mac and Linux.
if platform.system() == "Darwin":
expected_unique = [239]
else:
# Depending on which linux stdlib we use, we might get different results
expected_unique = [232, 233]
result_unique = len(numpy.unique(res))
assert result_unique in expected_unique, \
"Wrong number of unique labels in the segmentation. Expected one of {}, but got {}"\
.format(expected_unique, len(numpy.unique(res)))
print("SUCCESS")
| Allow multiple 'golden' results for agglomeration test on Linux | tests: Allow multiple 'golden' results for agglomeration test on Linux
| Python | bsd-3-clause | janelia-flyem/NeuroProof,janelia-flyem/NeuroProof,janelia-flyem/NeuroProof,janelia-flyem/NeuroProof | import sys
import platform
import h5py
import numpy
segh5 = sys.argv[1]
predh5 = sys.argv[2]
classifier = sys.argv[3]
threshold = float(sys.argv[4])
from neuroproof import Agglomeration
# open as uint32 and float respectively
seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32)
pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32)
pred = pred.transpose((2,1,0,3))
pred = pred.copy()
res = Agglomeration.agglomerate(seg, pred, classifier, threshold)
# The 'golden' results depend on std::unordered, and therefore
# the expected answer is different on Mac and Linux.
if platform.system() == "Darwin":
- expected_unique = 239
+ expected_unique = [239]
else:
+ # Depending on which linux stdlib we use, we might get different results
- expected_unique = 233
+ expected_unique = [232, 233]
result_unique = len(numpy.unique(res))
- assert result_unique == expected_unique, \
+ assert result_unique in expected_unique, \
- "Expected {} unique labels (including 0) in the resulting segmentation, but got {}"\
+ "Wrong number of unique labels in the segmentation. Expected one of {}, but got {}"\
.format(expected_unique, len(numpy.unique(res)))
print("SUCCESS")
| Allow multiple 'golden' results for agglomeration test on Linux | ## Code Before:
import sys
import platform
import h5py
import numpy
segh5 = sys.argv[1]
predh5 = sys.argv[2]
classifier = sys.argv[3]
threshold = float(sys.argv[4])
from neuroproof import Agglomeration
# open as uint32 and float respectively
seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32)
pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32)
pred = pred.transpose((2,1,0,3))
pred = pred.copy()
res = Agglomeration.agglomerate(seg, pred, classifier, threshold)
# The 'golden' results depend on std::unordered, and therefore
# the expected answer is different on Mac and Linux.
if platform.system() == "Darwin":
expected_unique = 239
else:
expected_unique = 233
result_unique = len(numpy.unique(res))
assert result_unique == expected_unique, \
"Expected {} unique labels (including 0) in the resulting segmentation, but got {}"\
.format(expected_unique, len(numpy.unique(res)))
print("SUCCESS")
## Instruction:
Allow multiple 'golden' results for agglomeration test on Linux
## Code After:
import sys
import platform
import h5py
import numpy
segh5 = sys.argv[1]
predh5 = sys.argv[2]
classifier = sys.argv[3]
threshold = float(sys.argv[4])
from neuroproof import Agglomeration
# open as uint32 and float respectively
seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32)
pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32)
pred = pred.transpose((2,1,0,3))
pred = pred.copy()
res = Agglomeration.agglomerate(seg, pred, classifier, threshold)
# The 'golden' results depend on std::unordered, and therefore
# the expected answer is different on Mac and Linux.
if platform.system() == "Darwin":
expected_unique = [239]
else:
# Depending on which linux stdlib we use, we might get different results
expected_unique = [232, 233]
result_unique = len(numpy.unique(res))
assert result_unique in expected_unique, \
"Wrong number of unique labels in the segmentation. Expected one of {}, but got {}"\
.format(expected_unique, len(numpy.unique(res)))
print("SUCCESS")
|
c2a1ce0ad4e2f2e9ff5ec72b89eb98967e445ea5 | labsys/utils/custom_fields.py | labsys/utils/custom_fields.py | from wtforms.fields import RadioField
class NullBooleanField(RadioField):
DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado'))
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES)
def iter_choices(self):
for value, label in self.choices:
yield (value, label, value == self.data)
def process_data(self, value):
if isinstance(value, bool) is False and value is not None:
self.data = None
else:
self.data = value
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = valuelist[0]
except ValueError:
raise ValueError(self.gettext('Invalid Choice: could not coerce'))
def pre_validate(self, form):
for value, _ in self.choices:
if self.data == value:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
| from wtforms.fields import RadioField
class NullBooleanField(RadioField):
DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado'))
TRUE_VALUES = ('True', 'true')
FALSE_VALUES = ('False', 'false')
NONE_VALUES = ('None', 'none', 'null', '')
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES)
def iter_choices(self):
for value, label in self.choices:
yield (value, label, value == self.data)
def process_data(self, value):
if value not in (True, False):
self.data = None
else:
self.data = value
def _parse_str_to_null_bool(self, input_str):
if input_str in self.TRUE_VALUES:
return True
if input_str in self.FALSE_VALUES:
return False
if input_str in self.NONE_VALUES:
return None
raise ValueError
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = self._parse_str_to_null_bool(valuelist[0])
except ValueError:
raise ValueError(self.gettext(
'Invalid Choice: could not coerce'))
def pre_validate(self, form):
for value, _ in self.choices:
if self.data == value:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
| Improve NullBooleanField with Truthy/Falsy values | :art: Improve NullBooleanField with Truthy/Falsy values
| Python | mit | gems-uff/labsys,gems-uff/labsys,gems-uff/labsys | from wtforms.fields import RadioField
+
class NullBooleanField(RadioField):
DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado'))
+ TRUE_VALUES = ('True', 'true')
+ FALSE_VALUES = ('False', 'false')
+ NONE_VALUES = ('None', 'none', 'null', '')
+
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES)
def iter_choices(self):
for value, label in self.choices:
yield (value, label, value == self.data)
def process_data(self, value):
- if isinstance(value, bool) is False and value is not None:
+ if value not in (True, False):
self.data = None
else:
self.data = value
+ def _parse_str_to_null_bool(self, input_str):
+ if input_str in self.TRUE_VALUES:
+ return True
+ if input_str in self.FALSE_VALUES:
+ return False
+ if input_str in self.NONE_VALUES:
+ return None
+ raise ValueError
+
def process_formdata(self, valuelist):
if valuelist:
try:
- self.data = valuelist[0]
+ self.data = self._parse_str_to_null_bool(valuelist[0])
except ValueError:
+ raise ValueError(self.gettext(
- raise ValueError(self.gettext('Invalid Choice: could not coerce'))
+ 'Invalid Choice: could not coerce'))
def pre_validate(self, form):
for value, _ in self.choices:
if self.data == value:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
| Improve NullBooleanField with Truthy/Falsy values | ## Code Before:
from wtforms.fields import RadioField
class NullBooleanField(RadioField):
DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado'))
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES)
def iter_choices(self):
for value, label in self.choices:
yield (value, label, value == self.data)
def process_data(self, value):
if isinstance(value, bool) is False and value is not None:
self.data = None
else:
self.data = value
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = valuelist[0]
except ValueError:
raise ValueError(self.gettext('Invalid Choice: could not coerce'))
def pre_validate(self, form):
for value, _ in self.choices:
if self.data == value:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
## Instruction:
Improve NullBooleanField with Truthy/Falsy values
## Code After:
from wtforms.fields import RadioField
class NullBooleanField(RadioField):
DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado'))
TRUE_VALUES = ('True', 'true')
FALSE_VALUES = ('False', 'false')
NONE_VALUES = ('None', 'none', 'null', '')
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES)
def iter_choices(self):
for value, label in self.choices:
yield (value, label, value == self.data)
def process_data(self, value):
if value not in (True, False):
self.data = None
else:
self.data = value
def _parse_str_to_null_bool(self, input_str):
if input_str in self.TRUE_VALUES:
return True
if input_str in self.FALSE_VALUES:
return False
if input_str in self.NONE_VALUES:
return None
raise ValueError
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = self._parse_str_to_null_bool(valuelist[0])
except ValueError:
raise ValueError(self.gettext(
'Invalid Choice: could not coerce'))
def pre_validate(self, form):
for value, _ in self.choices:
if self.data == value:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
|
b4e3461277669bf42225d278d491b7c714968491 | vm_server/test/execute_macro/code/execute.py | vm_server/test/execute_macro/code/execute.py | import os
import shutil
import win32com.client
import pythoncom
import repackage
repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, current_path +
"\\action\\output\\excelsheet.xlsm")
shutil.move(current_path + "\\action\\data\\output.txt", current_path +
"\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| import os
import shutil
import win32com.client
import pythoncom
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| Modify excel screenshot test so that it works with the new directory structure | Modify excel screenshot test so that it works with the new directory structure
| Python | apache-2.0 | googleinterns/automated-windows-vms,googleinterns/automated-windows-vms | import os
import shutil
import win32com.client
import pythoncom
- import repackage
- repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
- path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
+ path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
+ shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
+ shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
- shutil.move(path_to_file, current_path +
- "\\action\\output\\excelsheet.xlsm")
- shutil.move(current_path + "\\action\\data\\output.txt", current_path +
- "\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| Modify excel screenshot test so that it works with the new directory structure | ## Code Before:
import os
import shutil
import win32com.client
import pythoncom
import repackage
repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, current_path +
"\\action\\output\\excelsheet.xlsm")
shutil.move(current_path + "\\action\\data\\output.txt", current_path +
"\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
## Instruction:
Modify excel screenshot test so that it works with the new directory structure
## Code After:
import os
import shutil
import win32com.client
import pythoncom
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
|
c242ad95221c9c5b2f76795abd7dcbad5145cb2a | datagrid_gtk3/tests/utils/test_transformations.py | datagrid_gtk3/tests/utils/test_transformations.py | """Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
| """Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('1234567'),
'1.234567',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
self.assertEqual(
degree_decimal_str_transform('12345'),
'0.012345',
)
| Add more test cases to verify transformer behavior | Add more test cases to verify transformer behavior
| Python | mit | nowsecure/datagrid-gtk3,jcollado/datagrid-gtk3 | """Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
+ degree_decimal_str_transform('1234567'),
+ '1.234567',
+ )
+ self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
+ self.assertEqual(
+ degree_decimal_str_transform('12345'),
+ '0.012345',
+ )
| Add more test cases to verify transformer behavior | ## Code Before:
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
## Instruction:
Add more test cases to verify transformer behavior
## Code After:
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('1234567'),
'1.234567',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
self.assertEqual(
degree_decimal_str_transform('12345'),
'0.012345',
)
|
f4d66a5820582c995f1d31fe6a2442fc42d71077 | saulify/scrapers/newspaper.py | saulify/scrapers/newspaper.py | from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
article = Article(url_to_clean)
article.download()
article.parse()
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
| from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
""" Parse an article at a given url using newspaper.
Args:
url (str): Url where the article is found.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url_to_clean)
article.download()
article.parse()
return construct_result(article)
def clean_source(url, source):
""" Parse a pre-downloaded article using newspaper.
Args:
url (str): The url where the article was sourced (necessary for the
newspaper API).
source (str): Html source of the article page.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url)
article.set_html(source)
article.parse()
return construct_result(article)
def construct_result(article):
""" Construct article extraction result dictionary in standard format.
Args:
article (Article): A parsed `newspaper` `Article` object.
Returns:
Dictionary providing cleaned article and extracted content;
author, title, markdown, plaintext, html.
"""
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
| Split `clean_content` into component functions | Split `clean_content` into component functions
Provides ability to use newspaper to parse articles whose source has
already been downloaded.
| Python | agpl-3.0 | asm-products/saulify-web,asm-products/saulify-web,asm-products/saulify-web | from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
+ """ Parse an article at a given url using newspaper.
+
+ Args:
+ url (str): Url where the article is found.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
article = Article(url_to_clean)
article.download()
article.parse()
+
+ return construct_result(article)
+
+
+ def clean_source(url, source):
+ """ Parse a pre-downloaded article using newspaper.
+
+ Args:
+ url (str): The url where the article was sourced (necessary for the
+ newspaper API).
+
+ source (str): Html source of the article page.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
+ article = Article(url)
+ article.set_html(source)
+ article.parse()
+
+ return construct_result(article)
+
+
+ def construct_result(article):
+ """ Construct article extraction result dictionary in standard format.
+
+ Args:
+ article (Article): A parsed `newspaper` `Article` object.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content;
+ author, title, markdown, plaintext, html.
+ """
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
| Split `clean_content` into component functions | ## Code Before:
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
article = Article(url_to_clean)
article.download()
article.parse()
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
## Instruction:
Split `clean_content` into component functions
## Code After:
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
""" Parse an article at a given url using newspaper.
Args:
url (str): Url where the article is found.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url_to_clean)
article.download()
article.parse()
return construct_result(article)
def clean_source(url, source):
""" Parse a pre-downloaded article using newspaper.
Args:
url (str): The url where the article was sourced (necessary for the
newspaper API).
source (str): Html source of the article page.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url)
article.set_html(source)
article.parse()
return construct_result(article)
def construct_result(article):
""" Construct article extraction result dictionary in standard format.
Args:
article (Article): A parsed `newspaper` `Article` object.
Returns:
Dictionary providing cleaned article and extracted content;
author, title, markdown, plaintext, html.
"""
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
412dc6e29e47148758382646dd65e0a9c5ff4505 | pymanopt/tools/autodiff/__init__.py | pymanopt/tools/autodiff/__init__.py | class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
| from ._callable import CallableBackend
from ._autograd import AutogradBackend
from ._pytorch import PyTorchBackend
from ._theano import TheanoBackend
from ._tensorflow import TensorflowBackend
class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
| Revert "autodiff: remove unused imports" | Revert "autodiff: remove unused imports"
This reverts commit d0ad4944671d94673d0051bd8faf4f3cf5d93ca9.
| Python | bsd-3-clause | pymanopt/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,nkoep/pymanopt,nkoep/pymanopt | + from ._callable import CallableBackend
+ from ._autograd import AutogradBackend
+ from ._pytorch import PyTorchBackend
+ from ._theano import TheanoBackend
+ from ._tensorflow import TensorflowBackend
+
+
class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
| Revert "autodiff: remove unused imports" | ## Code Before:
class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
## Instruction:
Revert "autodiff: remove unused imports"
## Code After:
from ._callable import CallableBackend
from ._autograd import AutogradBackend
from ._pytorch import PyTorchBackend
from ._theano import TheanoBackend
from ._tensorflow import TensorflowBackend
class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
|
b16474b4523e8e804f28188ba74c992896748efe | broctl/Napatech.py | broctl/Napatech.py | import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
| import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script = ''
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
| Fix minor bug in broctl plugin. | Fix minor bug in broctl plugin.
| Python | bsd-3-clause | hosom/bro-napatech,hosom/bro-napatech | import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
+ script = ''
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
| Fix minor bug in broctl plugin. | ## Code Before:
import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
## Instruction:
Fix minor bug in broctl plugin.
## Code After:
import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script = ''
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
|
7c894c716cb712bbcb137df3a5df5548bdca9d93 | wafer/sponsors/migrations/0005_sponsorshippackage_symbol.py | wafer/sponsors/migrations/0005_sponsorshippackage_symbol.py | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(help_text='Optional symbol to display next to sponsors backing at this level sponsors list', max_length=1, blank=True),
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(blank=True, help_text='Optional symbol to display in the sponsors list next to sponsors who have sponsored at this list, (for example *).', max_length=1),
),
]
| Update the migration to changed text | Update the migration to changed text
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
- field=models.CharField(help_text='Optional symbol to display next to sponsors backing at this level sponsors list', max_length=1, blank=True),
+ field=models.CharField(blank=True, help_text='Optional symbol to display in the sponsors list next to sponsors who have sponsored at this list, (for example *).', max_length=1),
),
]
| Update the migration to changed text | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(help_text='Optional symbol to display next to sponsors backing at this level sponsors list', max_length=1, blank=True),
),
]
## Instruction:
Update the migration to changed text
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(blank=True, help_text='Optional symbol to display in the sponsors list next to sponsors who have sponsored at this list, (for example *).', max_length=1),
),
]
|
216216df9e3b42766a755f63519c84fda2fcebe0 | amy/workshops/migrations/0221_workshoprequest_rq_jobs.py | amy/workshops/migrations/0221_workshoprequest_rq_jobs.py |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0220_event_public_status'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0221_auto_20201025_1113'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
| Fix migrations conflict after rebase | Fix migrations conflict after rebase
| Python | mit | swcarpentry/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,swcarpentry/amy,swcarpentry/amy |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
- ('workshops', '0220_event_public_status'),
+ ('workshops', '0221_auto_20201025_1113'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
| Fix migrations conflict after rebase | ## Code Before:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0220_event_public_status'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
## Instruction:
Fix migrations conflict after rebase
## Code After:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0221_auto_20201025_1113'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
|
355d71bb600df850b3914772d0dca9e0a68e64c8 | setup.py | setup.py |
from distutils.core import setup
setup(name="django-sanitizer",
version="0.3",
description="Django template filter application for sanitizing user submitted HTML",
author="Calvin Spealman",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
|
from distutils.core import setup
setup(name="django-sanitizer",
version="0.4",
description="Django template filter application for sanitizing user submitted HTML",
author="Caktus Consulting Group",
maintainer="Calvin Spealman",
maintainer_email="calvin@caktusgroup.com",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
| Make caktus the owner, listing myself as a maintainer. | Make caktus the owner, listing myself as a maintainer.
| Python | bsd-3-clause | caktus/django-sanitizer |
from distutils.core import setup
setup(name="django-sanitizer",
- version="0.3",
+ version="0.4",
description="Django template filter application for sanitizing user submitted HTML",
+ author="Caktus Consulting Group",
- author="Calvin Spealman",
+ maintainer="Calvin Spealman",
+ maintainer_email="calvin@caktusgroup.com",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
| Make caktus the owner, listing myself as a maintainer. | ## Code Before:
from distutils.core import setup
setup(name="django-sanitizer",
version="0.3",
description="Django template filter application for sanitizing user submitted HTML",
author="Calvin Spealman",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
## Instruction:
Make caktus the owner, listing myself as a maintainer.
## Code After:
from distutils.core import setup
setup(name="django-sanitizer",
version="0.4",
description="Django template filter application for sanitizing user submitted HTML",
author="Caktus Consulting Group",
maintainer="Calvin Spealman",
maintainer_email="calvin@caktusgroup.com",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
|