commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
01d4279b40eb9e3029f857bf9d81d66d0314532d | Bump version to 1.5.1 | Rockhopper-Technologies/enlighten | enlighten/__init__.py | enlighten/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2017 - 2020 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten Progress Bar**
Provides progress bars and counters which play nice in a TTY console
"""
from enlighten.counter import Counter, SubCounter
from enlighten._manager import Manager, get_manager
__version__ = '1.5.1'
__all__ = ('Counter', 'Manager', 'SubCounter', 'get_manager')
| # -*- coding: utf-8 -*-
# Copyright 2017 - 2020 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten Progress Bar**
Provides progress bars and counters which play nice in a TTY console
"""
from enlighten.counter import Counter, SubCounter
from enlighten._manager import Manager, get_manager
__version__ = '1.5.0'
__all__ = ('Counter', 'Manager', 'SubCounter', 'get_manager')
| mpl-2.0 | Python |
b54d7b8079bf414b1fe79061b33e41c6350707d6 | use integer instead of string | KreMat/mopidy-rotaryencoder | mopidy_rotaryencoder/__init__.py | mopidy_rotaryencoder/__init__.py | from __future__ import unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-RotaryEncoder'
ext_name = 'rotaryencoder'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['datapin'] = config.Integer()
schema['clkpin'] = config.Integer()
schema['swpin'] = config.Integer()
return schema
def setup(self, registry):
from .frontend import RotaryEncoderFrontend
registry.add('frontend', RotaryEncoderFrontend)
| from __future__ import unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-RotaryEncoder'
ext_name = 'rotaryencoder'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['datapin'] = config.String()
schema['clkpin'] = config.String()
schema['swpin'] = config.String()
return schema
def setup(self, registry):
from .frontend import RotaryEncoderFrontend
registry.add('frontend', RotaryEncoderFrontend)
| apache-2.0 | Python |
c2a79d8cbbb174530991d8b59578169ee9b2be44 | use absolute paths for external scripts in Spidermonkey wrapper | FND/jslint-cli,FND/jslint-cli | wrapper_spidermonkey.py | wrapper_spidermonkey.py | #!/usr/bin/env python
"""
wrapper for JSLint
requires Spidermonkey
Usage:
$ wrapper_spidermonkey.py <filepath>
TODO:
* support for JSLint options
"""
import sys
import os
import spidermonkey
from simplejson import loads as json
cwd = sys.path[0]
lint_path = os.path.join(cwd, "fulljslint.js")
json_path = os.path.join(cwd, "json2.js") # XXX: built in from Spidermonkey 1.8
def main(args=None):
filepath = args[1]
status, errors = lint(filepath)
print format(errors, filepath)
return status
def lint(filepath):
rt = spidermonkey.Runtime()
cx = rt.new_context()
options = {} # TODO: read from argument
cx.add_global("options", options)
cx.add_global("getFileContents", get_file_contents)
# load JavaScript code
for path in (lint_path, json_path):
cx.execute('eval(getFileContents("%s"));' % path)
cx.execute('var code = getFileContents("%s");' % filepath)
# lint code
status = cx.execute("JSLINT(code, options);") # True if clean, False otherwise
errors = cx.execute("JSON.stringify(JSLINT.errors);");
# XXX: errors incomplete (e.g. not reporting missing var)!?
return status, errors
def format(errors, file):
"""
convert JSLint errors object into report using standard error format
<filepath>:<line>:<column>:<message>
"""
lines = [":".join([
file,
str(error["line"] + 1),
str(error["character"] + 1),
error["reason"]
]) for error in json(errors)] # XXX: don't use generator expression!?
# XXX: ignoring members id, evidence, raw, a, b, c, d
return "\n".join(lines)
def get_file_contents(filepath):
return open(filepath).read()
if __name__ == "__main__":
status = not main(sys.argv)
sys.exit(status)
| #!/usr/bin/env python
"""
wrapper for JSLint
requires Spidermonkey
Usage:
$ wrapper_spidermonkey.py <filepath>
TODO:
* support for JSLint options
"""
import sys
import spidermonkey
from simplejson import loads as json
lint_path = "fulljslint.js"
json_path = "json2.js"
def main(args=None):
filepath = args[1]
status, errors = lint(filepath)
print format(errors, filepath)
return status
def lint(filepath):
rt = spidermonkey.Runtime()
cx = rt.new_context()
options = {} # TODO: read from argument
cx.add_global("options", options)
cx.add_global("getFileContents", get_file_contents)
# load JavaScript code
for path in (lint_path, json_path):
cx.execute('eval(getFileContents("%s"));' % path)
cx.execute('var code = getFileContents("%s");' % filepath)
# lint code
status = cx.execute("JSLINT(code, options);") # True if clean, False otherwise
errors = cx.execute("JSON.stringify(JSLINT.errors);");
# XXX: errors incomplete (e.g. not reporting missing var)!?
return status, errors
def format(errors, file):
"""
convert JSLint errors object into report using standard error format
<filepath>:<line>:<column>:<message>
"""
lines = [":".join([
file,
str(error["line"] + 1),
str(error["character"] + 1),
error["reason"]
]) for error in json(errors)] # XXX: don't use generator expression!?
# XXX: ignoring members id, evidence, raw, a, b, c, d
return "\n".join(lines)
def get_file_contents(filepath):
return open(filepath).read()
if __name__ == "__main__":
status = not main(sys.argv)
sys.exit(status)
| bsd-3-clause | Python |
2271131d5c2794eeba256a9d9547fa925f7bdf73 | bump __version__ | dougnd/matplotlib2tikz,danielhkl/matplotlib2tikz,m-rossi/matplotlib2tikz,nschloe/matplotlib2tikz | matplotlib2tikz/__init__.py | matplotlib2tikz/__init__.py | # -*- coding: utf-8 -*-
#
'''Script to convert Matplotlib generated figures into TikZ/PGFPlots figures.
'''
__author__ = 'Nico Schlömer'
__email__ = 'nico.schloemer@gmail.com'
__copyright__ = 'Copyright (c) 2010-2016, %s <%s>' % (__author__, __email__)
__credits__ = []
__license__ = 'MIT License'
__version__ = '0.5.7'
__maintainer__ = 'Nico Schlömer'
__status__ = 'Production'
from matplotlib2tikz.save import save
| # -*- coding: utf-8 -*-
#
'''Script to convert Matplotlib generated figures into TikZ/PGFPlots figures.
'''
__author__ = 'Nico Schlömer'
__email__ = 'nico.schloemer@gmail.com'
__copyright__ = 'Copyright (c) 2010-2016, %s <%s>' % (__author__, __email__)
__credits__ = []
__license__ = 'MIT License'
__version__ = '0.5.6'
__maintainer__ = 'Nico Schlömer'
__status__ = 'Production'
from matplotlib2tikz.save import save
| mit | Python |
cd25fd1bd40a98886b92f5e3b357ee0ab2796c7b | add /query route, with plain text for mongo | pdpino/FlaskApp,pdpino/FlaskApp,pdpino/FlaskApp | flaskr/__init__.py | flaskr/__init__.py | #!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "dbEscuchas"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/FlaskApp/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "no query"
@app.route("/query")
def ruta_query():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
# return render_template('mongo.html', results=results)
return str(results);
else:
return "{}" # No query
@app.route("/postgres")
def postgres():
return "Postgres API is not available"
query = request.args.get("query")
if not query is None:
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
else:
return "no query"
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
| #!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "dbEscuchas"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/FlaskApp/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "no query"
@app.route("/postgres")
def postgres():
return "Postgres API is not available"
query = request.args.get("query")
if not query is None:
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
else:
return "no query"
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
| mit | Python |
af4b53a85aec95c9ec7bf20b1c019ec0f397eacb | Bump version to 0.2.2 | flav-io/flavio,flav-io/flavio | flavio/_version.py | flavio/_version.py | __version__='0.2.2'
| __version__='0.2.1'
| mit | Python |
90571c86f39fee14fafcc9c030de66d4255c5d82 | Change naming style | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | lexos/interfaces/statistics_interface.py | lexos/interfaces/statistics_interface.py | from flask import request, session, render_template, Blueprint
from lexos.helpers import constants as constants
from lexos.interfaces.base_interface import detect_active_docs
from lexos.managers import utility, session_manager as session_manager
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
stats_view = Blueprint('statistics', __name__)
# Tells Flask to load this function when someone is at '/statsgenerator'
@stats_view.route("/statistics", methods=["GET", "POST"])
def statistics():
"""
Handles the functionality on the Statistics page ...
Note: Returns a response object (often a render_template call) to flask and
eventually to the browser.
"""
# Detect the number of active documents.
num_active_docs = detect_active_docs()
file_manager = utility.load_file_manager()
labels = file_manager.get_active_labels()
if request.method == "GET":
# "GET" request occurs when the page is first loaded.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'statisticoption' not in session:
session['statisticoption'] = {'segmentlist': list(
map(str,
list(file_manager.files.keys())))} # default is all on
return render_template(
'statistics.html',
labels=labels,
labels2=labels,
itm="statistics",
numActiveDocs=num_active_docs)
if request.method == "POST":
token = request.form['tokenType']
file_info_list, corpus_info = utility.generate_statistics(
file_manager)
session_manager.cache_analysis_option()
session_manager.cache_statistic_option()
# DO NOT save fileManager!
return render_template(
'statistics.html',
labels=labels,
FileInfoList=file_info_list,
corpusInfo=corpus_info,
token=token,
itm="statistics",
numActiveDocs=num_active_docs)
| from flask import request, session, render_template, Blueprint
from lexos.helpers import constants as constants
from lexos.managers import utility, session_manager as session_manager
from lexos.interfaces.base_interface import detect_active_docs
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
stats_view = Blueprint('statistics', __name__)
# Tells Flask to load this function when someone is at '/statsgenerator'
@stats_view.route("/statistics", methods=["GET", "POST"])
def statistics():
"""
Handles the functionality on the Statistics page ...
Note: Returns a response object (often a render_template call) to flask and
eventually to the browser.
"""
# Detect the number of active documents.
num_active_docs = detect_active_docs()
file_manager = utility.load_file_manager()
labels = file_manager.get_active_labels()
if request.method == "GET":
# "GET" request occurs when the page is first loaded.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'statisticoption' not in session:
session['statisticoption'] = {'segmentlist': list(
map(str,
list(file_manager.files.keys())))} # default is all on
return render_template(
'statistics.html',
labels=labels,
labels2=labels,
itm="statistics",
numActiveDocs=num_active_docs)
if request.method == "POST":
token = request.form['tokenType']
file_info_dict, corpus_info_dict = utility.generate_statistics(
file_manager)
session_manager.cache_analysis_option()
session_manager.cache_statistic_option()
# DO NOT save fileManager!
return render_template(
'statistics.html',
labels=labels,
FileInfoDict=file_info_dict,
corpusInfoDict=corpus_info_dict,
token=token,
itm="statistics",
numActiveDocs=num_active_docs)
| mit | Python |
009cdf804f0f730ed081c6003eedb1015283948f | update to test for non categorized event publishing | EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes | lg_replay/test/offline/test_lg_replay.py | lg_replay/test/offline/test_lg_replay.py | #!/usr/bin/env python
PKG = 'lg_replay'
NAME = 'test_lg_replay'
import rospy
import unittest
import json
from evdev import InputEvent
from lg_replay import DeviceReplay
from interactivespaces_msgs.msg import GenericMessage
class MockDevice:
def __init__(self):
self.events = [
InputEvent(1441716733L, 879280L, 3, 0, 9888L),
InputEvent(1441716733L, 879280L, 3, 1, 15600L),
InputEvent(1441716733L, 879280L, 0, 0, 0L),
InputEvent(1441716733L, 981276L, 3, 53, 9872L),
InputEvent(1441716733L, 981276L, 3, 54, 15664L),
InputEvent(1441716733L, 981276L, 3, 0, 9872L),
InputEvent(1441716733L, 981276L, 3, 1, 15664L),
InputEvent(1441716733L, 981276L, 0, 0, 0L),
InputEvent(1441716733L, 982263L, 3, 57, -1L),
InputEvent(1441716733L, 982263L, 1, 330, 0L) # < this event gets tested
]
def read_loop(self):
return self.events
class MockPublisher:
def __init__(self):
self.published_messages = []
def get_published_messages(self):
return self.published_messages
def publish_event(self, message):
self.published_messages.append(message)
class TestReplay(unittest.TestCase):
def setUp(self):
self.mock_device = MockDevice()
self.mock_publisher = MockPublisher()
self.replay = DeviceReplay(self.mock_publisher, 'blah', event_ecode='EV_KEY', device=self.mock_device)
def test_events_get_filtered_and_published(self):
self.replay.run()
self.assertEqual(type(self.mock_publisher.get_published_messages()), list)
self.assertEqual(len(self.mock_publisher.get_published_messages()), 1)
self.assertEqual(type(self.mock_publisher.get_published_messages()[0]), dict)
message = self.mock_publisher.get_published_messages()[0]
self.assertEqual(message['code'], 330)
self.assertEqual(message['value'], 0)
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, NAME, TestReplay)
| #!/usr/bin/env python
PKG = 'lg_replay'
NAME = 'test_lg_replay'
import rospy
import unittest
import json
from evdev import InputEvent
from lg_replay import DeviceReplay
from interactivespaces_msgs.msg import GenericMessage
class MockDevice:
def __init__(self):
self.events = [
InputEvent(1441716733L, 879280L, 3, 0, 9888L),
InputEvent(1441716733L, 879280L, 3, 1, 15600L),
InputEvent(1441716733L, 879280L, 0, 0, 0L),
InputEvent(1441716733L, 981276L, 3, 53, 9872L),
InputEvent(1441716733L, 981276L, 3, 54, 15664L),
InputEvent(1441716733L, 981276L, 3, 0, 9872L),
InputEvent(1441716733L, 981276L, 3, 1, 15664L),
InputEvent(1441716733L, 981276L, 0, 0, 0L),
InputEvent(1441716733L, 982263L, 3, 57, -1L),
InputEvent(1441716733L, 982263L, 1, 330, 0L) # < this event gets tested
]
def read_loop(self):
return self.events
class MockPublisher:
def __init__(self):
self.published_messages = []
def get_published_messages(self):
return self.published_messages
def publish_event(self, message):
self.published_messages.append(message)
class TestReplay(unittest.TestCase):
def setUp(self):
self.mock_device = MockDevice()
self.mock_publisher = MockPublisher()
self.replay = DeviceReplay(self.mock_publisher, 'blah', event_ecode='EV_KEY', device=self.mock_device)
def test_events_get_filtered_and_published(self):
self.replay.run()
self.assertEqual(type(self.mock_publisher.get_published_messages()), list)
self.assertEqual(len(self.mock_publisher.get_published_messages()), 1)
self.assertEqual(type(self.mock_publisher.get_published_messages()[0]), dict)
message = self.mock_publisher.get_published_messages()[0]
self.assertEqual(message['scancode'], 330)
self.assertEqual(message['keystate'], 0)
self.assertEqual(message['keycode'], 'BTN_TOUCH')
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, NAME, TestReplay)
| apache-2.0 | Python |
25f57a023f978fca94bbeb9655a4d90f0b2d95f0 | Fix typo | martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo | pints/toy/__init__.py | pints/toy/__init__.py | #
# Root of the toy module.
# Provides a number of toy models and logpdfs for tests of Pints' functions.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from ._toy_classes import ToyLogPDF, ToyModel, ToyODEModel
from ._annulus import AnnulusLogPDF
from ._beeler_reuter_model import ActionPotentialModel
from ._cone import ConeLogPDF
from ._constant_model import ConstantModel
from ._eight_schools import EightSchoolsLogPDF
from ._fitzhugh_nagumo_model import FitzhughNagumoModel
from ._gaussian import GaussianLogPDF
from ._german_credit import GermanCreditLogPDF
from ._german_credit_hierarchical import GermanCreditHierarchicalLogPDF
from ._goodwin_oscillator_model import GoodwinOscillatorModel
from ._hes1_michaelis_menten import Hes1Model
from ._hh_ik_model import HodgkinHuxleyIKModel
from ._high_dimensional_gaussian import HighDimensionalGaussianLogPDF
from ._logistic_model import LogisticModel
from ._lotka_volterra_model import LotkaVolterraModel
from ._multimodal_gaussian import MultimodalGaussianLogPDF
from ._neals_funnel import NealsFunnelLogPDF
from ._parabola import ParabolicError
from ._repressilator_model import RepressilatorModel
from ._rosenbrock import RosenbrockError, RosenbrockLogPDF
from ._sho_model import SimpleHarmonicOscillatorModel
from ._simple_egg_box import SimpleEggBoxLogPDF
from ._sir_model import SIRModel
from ._twisted_gaussian_banana import TwistedGaussianLogPDF
from ._stochastic_degradation_model import StochasticDegradationModel
from ._stochastic_logistic_model import StochasticLogisticModel
| #
# Root of the toy module.
# Provides a number of toy models and logpdfs for tests of Pints' functions.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from ._toy_classes import ToyLogPDF, ToyModel, ToyODEModel
from ._annulus import AnnulusLogPDF
from ._beeler_reuter_model import ActionPotentialModel
from ._cone import ConeLogPDF
from ._constant_model import ConstantModel
from ._eight_schools import EightSchoolsLogPDF
from ._fitzhugh_nagumo_model import FitzhughNagumoModel
from ._gaussian import GaussianLogPDF
from ._german_credit import GermanCreditLogPDF
from ._german_credit_hierarchical import GermanCreditHierarchicalLogPDF
from ._goodwin_oscillator_model import GoodwinOscillatorModel
from ._hes1_michaelis_menten import Hes1Model
from ._hh_ik_model import HodgkinHuxleyIKModel
from ._high_dimensional_gaussian import HighDimensionalGaussianLogPDF
from ._logistic_model import LogisticModel
from ._lotka_volterra_model import LotkaVolterraModel
from ._multimodal_gaussian import MultimodalGaussianLogPDF
from ._neals_funnel import NealsFunnelLogPDF
from ._parabola import ParabolicError
from ._repressilator_model import RepressilatorModel
from ._rosenbrock import RosenbrockError, RosenbrockLogPDF
from ._sho_model import SimpleHarmonicOscillatorModel
from ._simple_egg_box import SimpleEggBoxLogPDF
from ._sir_model import SIRModel
from ._twisted_gaussian_banana import TwistedGaussianLogPDF
from ._stochastic_degradation_model import StochasticDegradationModel
from ._stochastic_logistic model import StochasticLogisticModel
| bsd-3-clause | Python |
5b120e5b89c06a0a5c01f8c710f85a4a179f56f7 | Change HTML theme to match BIND ARM, add copyright, EPUB info | isc-projects/isc-dnssec-guide | doc/conf.py | doc/conf.py | ############################################################################
# Copyright (C) Internet Systems Consortium, Inc. ("ISC")
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at https://mozilla.org/MPL/2.0/.
#
# See the COPYRIGHT file distributed with this work for additional
# information regarding copyright ownership.
############################################################################
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'ISC DNSSEC Guide'
copyright = '2020, Internet Systems Consortium'
author = 'Internet Systems Consortium'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for EPUB output -------------------------------------------------
epub_basename = 'DNSSECGuide'
| # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'ISC DNSSEC Guide'
copyright = '2020, Internet Systems Consortium'
author = 'Internet Systems Consortium'
# The full version, including alpha/beta/rc tags
release = '2020'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| isc | Python |
6366fe6da78cd0e910b52352b918ff18d89f25c4 | update tests_forms | dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall | myideas/core/tests/test_forms.py | myideas/core/tests/test_forms.py | from django.test import TestCase
from django.shortcuts import resolve_url as r
from registration.forms import RegistrationForm
from myideas.core.forms import IdeasForm, IdeasFormUpdate
class IdeasFormTest(TestCase):
def setUp(self):
self.form = IdeasForm()
def test_form_has_fields(self):
"""IdeasForm must have 3 fields"""
expected = ('title', 'description', 'tags')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Ideasform field is required."""
form = IdeasForm({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Ideasform field is not present."""
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('user'))
class IdeasFormUpdateTest(TestCase):
def setUp(self):
self.form = IdeasFormUpdate()
def test_form_has_fields(self):
"""UpdateForm must have 2 fields"""
expected = ('title', 'description')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Updateform field is required."""
form = IdeasFormUpdate({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Updateform field is not present."""
self.assertFalse(self.form.fields.get('user'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('tags'))
class RegisterIdea(TestCase):
def test_registration_get(self):
resp = self.client.get(r('registration_register'))
self.failUnless(isinstance(resp.context['form'],
RegistrationForm))
| from django.test import TestCase
from django.shortcuts import resolve_url as r
from registration.forms import RegistrationForm
from myideas.core.forms import IdeasForm, IdeasFormUpdate
class IdeasFormTest(TestCase):
def setUp(self):
self.form = IdeasForm()
def test_form_has_fields(self):
"""IdeasForm must have 3 fields"""
expected = ('title', 'description', 'tags')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Ideasform field is required."""
form = IdeasForm({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Ideasform field is not present."""
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('user'))
class IdeasFormUpdateTest(TestCase):
def setUp(self):
self.form = IdeasFormUpdate()
def test_form_has_fields(self):
"""UpdateForm must have 2 fields"""
expected = ('title', 'description')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Updateform field is required."""
form = IdeasFormUpdate({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Updateform field is not present."""
self.assertFalse(self.form.fields.get('user'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('tags'))
class RegisterIdea(TestCase):
def test_registration_get(self):
resp = self.client.get(r('registration_register'))
self.failUnless(isinstance(resp.context['form'],
RegistrationForm))
| agpl-3.0 | Python |
b3dfb211d0d81210dcaa317a0d6f79b6ad249816 | Update netlogo_example.py | quaquel/EMAworkbench,quaquel/EMAworkbench | ema_workbench/examples/netlogo_example.py | ema_workbench/examples/netlogo_example.py | """
This example is a proof of principle for how NetLogo models can be
controlled using pyNetLogo and the ema_workbench. Note that this
example uses the NetLogo 6 version of the predator prey model that
comes with NetLogo. If you are using NetLogo 5, replace the model file
with the one that comes with NetLogo.
"""
import numpy as np
from ema_workbench import (RealParameter, ema_logging, ScalarOutcome,
TimeSeriesOutcome, MultiprocessingEvaluator)
from ema_workbench.connectors.netlogo import NetLogoModel
# Created on 20 mrt. 2013
#
# .. codeauthor:: jhkwakkel
if __name__ == '__main__':
# turn on logging
ema_logging.log_to_stderr(ema_logging.INFO)
model = NetLogoModel('predprey',
wd="./models/predatorPreyNetlogo",
model_file="Wolf Sheep Predation.nlogo")
model.run_length = 100
model.replications = 10
model.uncertainties = [RealParameter("grass-regrowth-time", 1, 99),
RealParameter("initial-number-sheep", 50, 100),
RealParameter("initial-number-wolves", 50, 100),
RealParameter("sheep-reproduce", 5, 10),
RealParameter("wolf-reproduce", 5, 10),
]
model.outcomes = [ScalarOutcome('sheep', variable_name='count sheep',
function=np.mean),
TimeSeriesOutcome('wolves'),
TimeSeriesOutcome('grass')]
# perform experiments
n = 10
with MultiprocessingEvaluator(model, n_processes=2,
maxtasksperchild=4) as evaluator:
results = evaluator.perform_experiments(n)
print() | '''
This example is a proof of principle for how NetLogo models can be
controlled using pyNetLogo and the ema_workbench. Note that this
example uses the NetLogo 6 version of the predator prey model that
comes with NetLogo. If you are using NetLogo 5, replace the model file
with the one that comes with NetLogo.
'''
from ema_workbench import (RealParameter, ema_logging,
TimeSeriesOutcome, MultiprocessingEvaluator)
from ema_workbench.connectors.netlogo import NetLogoModel
# Created on 20 mrt. 2013
#
# .. codeauthor:: jhkwakkel
if __name__ == '__main__':
# turn on logging
ema_logging.log_to_stderr(ema_logging.INFO)
model = NetLogoModel('predprey',
wd="./models/predatorPreyNetlogo",
model_file="Wolf Sheep Predation.nlogo")
model.run_length = 100
model.replications = 10
model.uncertainties = [RealParameter("grass-regrowth-time", 1, 99),
RealParameter("initial-number-sheep", 50, 100),
RealParameter("initial-number-wolves", 50, 100),
RealParameter("sheep-reproduce", 5, 10),
RealParameter("wolf-reproduce", 5, 10),
]
model.outcomes = [TimeSeriesOutcome('sheep'),
TimeSeriesOutcome('wolves'),
TimeSeriesOutcome('grass')]
# perform experiments
n = 10
with MultiprocessingEvaluator(model, n_processes=2,
maxtasksperchild=4) as evaluator:
results = evaluator.perform_experiments(n)
| bsd-3-clause | Python |
8c6f178782b6470b98536a2384391970e0cbafb9 | Update config file | edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Edwin Khoo'
SITENAME = 'Edwin Khoo'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = None
# Social widget
SOCIAL = (('GitHub', 'https://github.com/edwinksl'),
('Bitbucket', 'https://bitbucket.org/edwinksl'),
('Facebook', 'https://www.facebook.com/edwinksl'),
('Twitter', 'https://twitter.com/edwinksl'),
('LinkedIn', 'https://www.linkedin.com/in/edwinksl'))
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
THEME = '/home/edwinksl/Git/pelican-bootstrap3'
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Edwin Khoo'
SITENAME = 'Edwin Khoo'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = None
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/edwinksl'),
('GitHub', 'https://github.com/edwinksl'))
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
THEME = '/home/edwinksl/Git/pelican-bootstrap3'
| mit | Python |
5cf5c6028bd7007a867691af966f89574f02de1f | clean up setup | sxend/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zapov/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,actframework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,herloct/FrameworkBenchmarks,methane/FrameworkBenchmarks,herloct/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,herloct/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,methane/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,methane/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,joshk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,valyala/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,actframework/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sgml/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,denkab/FrameworkBenchmarks,herloct/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,herloct/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Verber/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,grob/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,joshk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,valyala/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,methane/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jamming/FrameworkBenchmarks,grob/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zloster/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,doom369/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,herloct/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zloster/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,grob/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Verber/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,grob/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,joshk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,testn/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jamming/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,khellang/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,valyala/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sxend/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,denkab/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,testn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jamming/FrameworkBenchmarks,actframework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jamming/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,denkab/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,denkab/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,valyala/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,herloct/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,khellang/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,testn/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zloster/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,khellang/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,methane/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,denkab/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,testn/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Verber/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,joshk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,denkab/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,testn/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,valyala/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,khellang/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks | mojolicious/setup.py | mojolicious/setup.py | import subprocess
import sys
import json
from os.path import expanduser
import os
import getpass
home = expanduser("~")
def start(args, logfile, errfile):
conf = {
'database_host' : args.database_host,
'workers' : args.max_threads,
}
with open('mojolicious/app.conf', 'w') as f:
f.write(json.dumps(conf))
try:
# os.environ["MOJO_MODE"] = "production"
subprocess.Popen("hypnotoad ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
try:
subprocess.call("hypnotoad -s ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hypnotoad' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
except subprocess.CalledProcessError:
return 1
| import subprocess
import sys
#import setup_util
import json
from os.path import expanduser
import os
import getpass
home = expanduser("~")
def start(args, logfile, errfile):
# setup_util.replace_text("mojolicious/app.pl", "localhost", ""+ args.database_host +"")
# str(args.max_threads)
conf = {
'database_host': args.database_host,
'workers': args.max_threads,
}
with open('mojolicious/app.conf', 'w') as f:
f.write(json.dumps(conf))
try:
# os.environ["MOJO_MODE"] = "production"
subprocess.Popen("hypnotoad ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
try:
subprocess.call("hypnotoad -s ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hypnotoad' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
except subprocess.CalledProcessError:
return 1
| bsd-3-clause | Python |
8d935a2141b8f5c080d922189df7d79bb838b3a0 | Use default router implementation | dz0ny/mopidy-lux,dz0ny/mopidy-lux | mopidy_lux/router.py | mopidy_lux/router.py | import os
from tinydb import TinyDB
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
import tornado.web
from mopidy import http
class LuxRouter(http.Router):
name = 'lux'
def setup_routes(self):
db = TinyDB(
self.config['lux']['db_file'],
storage=CachingMiddleware(JSONStorage)
)
args = dict(
config=self.config,
db=db
)
return [
(r"/%s/(.*)" % self.name, http.StaticFileHandler, {
'path': os.path.join(os.path.dirname(__file__), 'static'),
'default_filename': 'index.html'
}),
(r"/%s/playlist" % self.name, Playlists, args),
(r"/%s/loved" % self.name, Loved, args),
(r"/%s/discover" % self.name, EchoNestsDiscover, args),
]
class Playlists(tornado.web.RequestHandler):
"""
Permanent storage for playlists
"""
pass
class Loved(tornado.web.RequestHandler):
"""
Permanent storage for loved songs
"""
pass
class EchoNestsDiscover(tornado.web.RequestHandler):
"""
Discover tracks based on mood or similarity
"""
pass
| import os
from tinydb import TinyDB
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
import tornado.web
class LuxRouter(object):
def __init__(self, _config):
self.config = _config
self._db = TinyDB(
self.config['lux']['db_file'],
storage=CachingMiddleware(JSONStorage)
)
def setup_routes(self):
args = dict(
config=self.config,
db=self._db
)
return [
(r"/lux/(.*)", tornado.web.StaticFileHandler, {
'path': os.path.join(os.path.dirname(__file__), 'static'),
'default_filename': 'index.html'
}),
(r"/lux/playlist", Playlists, args),
(r"/lux/loved", Loved, args),
(r"/lux/discover", EchoNestsDiscover, args),
]
class Playlists(tornado.web.RequestHandler):
"""
Permanent storage for playlists
"""
pass
class Loved(tornado.web.RequestHandler):
"""
Permanent storage for loved songs
"""
pass
class EchoNestsDiscover(tornado.web.RequestHandler):
"""
Discover tracks based on mood or similarity
"""
pass
| mit | Python |
32481a906e00a1c5d301e6227ab43cf8feba31e0 | fix double-import trap | dpranke/pyjson5 | json5/__init__.py | json5/__init__.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A pure Python implementation of the JSON5 configuration language."""
from .lib import load, loads, dump, dumps
from .version import VERSION
__all__ = [
'VERSION',
'dump',
'dumps',
'load',
'loads',
]
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A pure Python implementation of the JSON5 configuration language."""
from . import tool
from .lib import load, loads, dump, dumps
from .version import VERSION
__all__ = [
'VERSION',
'dump',
'dumps',
'load',
'loads',
'tool',
]
| apache-2.0 | Python |
95bde4f783a4d11627d8bc64e24b383e945bdf01 | Revert local CDN location set by Jodok | crate/crate-web,jomolinare/crate-web,crate/crate-web,crate/crate-web,jomolinare/crate-web,jomolinare/crate-web | src/web/tags.py | src/web/tags.py | # -*- coding: utf-8 -*-
# vim: set fileencodings=utf-8
__docformat__ = "reStructuredText"
import json
import datetime
from django.template.base import Library
from django.utils.safestring import mark_safe
register = Library()
CDN_URL = 'https://cdn.crate.io'
def media(context, media_url):
"""
Get the path for a media file.
"""
if media_url.startswith('http://') or media_url.startswith('https://'):
url = media_url
elif media_url.startswith('/'):
url = u'{0}{1}'.format(CDN_URL, media_url)
else:
url = u'{0}/media/{1}'.format(CDN_URL, media_url)
return url
register.simple_tag(takes_context=True)(media)
| # -*- coding: utf-8 -*-
# vim: set fileencodings=utf-8
__docformat__ = "reStructuredText"
import json
import datetime
from django.template.base import Library
from django.utils.safestring import mark_safe
register = Library()
#CDN_URL = 'https://cdn.crate.io'
CDN_URL = 'http://localhost:8001'
def media(context, media_url):
"""
Get the path for a media file.
"""
if media_url.startswith('http://') or media_url.startswith('https://'):
url = media_url
elif media_url.startswith('/'):
url = u'{0}{1}'.format(CDN_URL, media_url)
else:
url = u'{0}/media/{1}'.format(CDN_URL, media_url)
return url
register.simple_tag(takes_context=True)(media)
| apache-2.0 | Python |
06f328b5843d83946b353697745ec82c7741ee3e | Allow colons in record label URLs (for timestamps such as '2013-02-13_08:42:00'). | babsey/sumatra,apdavison/sumatra,maxalbert/sumatra,babsey/sumatra,open-research/sumatra,dpad/sumatra,maxalbert/sumatra,open-research/sumatra,dpad/sumatra,babsey/sumatra,babsey/sumatra,maxalbert/sumatra,open-research/sumatra,dpad/sumatra,dpad/sumatra,apdavison/sumatra,maxalbert/sumatra,open-research/sumatra,apdavison/sumatra,maxalbert/sumatra,babsey/sumatra,apdavison/sumatra,dpad/sumatra,open-research/sumatra,apdavison/sumatra | src/web/urls.py | src/web/urls.py | """
Define URL dispatching for the Sumatra web interface.
"""
from django.conf.urls.defaults import *
from django.views.generic import list_detail
from django.conf import settings
from sumatra.web.views import Timeline
P = {
'project': r'(?P<project>\w+[\w ]*)',
'label': r'(?P<label>\w+[\w|\-\.:]*)',
}
urlpatterns = patterns('sumatra.web.views',
(r'^$', 'list_projects'),
(r'^%(project)s/$' % P, 'list_records'),
(r'^%(project)s/about/$' % P, 'show_project'),
(r'^%(project)s/delete/$' % P, 'delete_records'),
(r'^%(project)s/tag/$' % P, 'list_tags'),
(r'^%(project)s/%(label)s/$' % P, 'record_detail'),
(r'^%(project)s/%(label)s/datafile$' % P, 'show_file'),
(r'^%(project)s/%(label)s/download$' % P, 'download_file'),
(r'^%(project)s/%(label)s/image$' % P, 'show_image'),
(r'^%(project)s/%(label)s/diff/(?P<package>[\w_]+)*$' % P, 'show_diff'),
(r'^%(project)s/simulation$' % P, 'run_sim'),
(r'^%(project)s/settings$' % P, 'settings'),
(r'^%(project)s/search$' % P, 'search'),
(r'^%(project)s/settags$' % P, 'set_tags'),
)
urlpatterns += patterns('',
(r'^timeline/(?P<user>\w+[\w ]*)/', Timeline.as_view()),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)
| """
Define URL dispatching for the Sumatra web interface.
"""
from django.conf.urls.defaults import *
from django.views.generic import list_detail
from django.conf import settings
from sumatra.web.views import Timeline
P = {
'project': r'(?P<project>\w+[\w ]*)',
'label': r'(?P<label>\w+[\w|\-\.]*)',
}
urlpatterns = patterns('sumatra.web.views',
(r'^$', 'list_projects'),
(r'^%(project)s/$' % P, 'list_records'),
(r'^%(project)s/about/$' % P, 'show_project'),
(r'^%(project)s/delete/$' % P, 'delete_records'),
(r'^%(project)s/tag/$' % P, 'list_tags'),
(r'^%(project)s/%(label)s/$' % P, 'record_detail'),
(r'^%(project)s/%(label)s/datafile$' % P, 'show_file'),
(r'^%(project)s/%(label)s/download$' % P, 'download_file'),
(r'^%(project)s/%(label)s/image$' % P, 'show_image'),
(r'^%(project)s/%(label)s/diff/(?P<package>[\w_]+)*$' % P, 'show_diff'),
(r'^%(project)s/simulation$' % P, 'run_sim'),
(r'^%(project)s/settings$' % P, 'settings'),
(r'^%(project)s/search$' % P, 'search'),
(r'^%(project)s/settags$' % P, 'set_tags'),
)
urlpatterns += patterns('',
(r'^timeline/(?P<user>\w+[\w ]*)/', Timeline.as_view()),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) | bsd-2-clause | Python |
a1cbeb7f7a03d0618ec9f60f65308168e521af18 | Add encodings for imul instructions to RISC-V. | stoklund/cretonne,sunfishcode/cretonne,stoklund/cretonne,sunfishcode/cretonne,stoklund/cretonne,sunfishcode/cretonne | meta/isa/riscv/encodings.py | meta/isa/riscv/encodings.py | """
RISC-V Encodings.
"""
from __future__ import absolute_import
from cretonne import base
from .defs import RV32, RV64
from .recipes import OPIMM, OPIMM32, OP, OP32, R, Rshamt, I
from .settings import use_m
# Basic arithmetic binary instructions are encoded in an R-type instruction.
for inst, inst_imm, f3, f7 in [
(base.iadd, base.iadd_imm, 0b000, 0b0000000),
(base.isub, None, 0b000, 0b0100000),
(base.bxor, base.bxor_imm, 0b100, 0b0000000),
(base.bor, base.bor_imm, 0b110, 0b0000000),
(base.band, base.band_imm, 0b111, 0b0000000)
]:
RV32.enc(inst.i32, R, OP(f3, f7))
RV64.enc(inst.i64, R, OP(f3, f7))
# Immediate versions for add/xor/or/and.
if inst_imm:
RV32.enc(inst_imm.i32, I, OPIMM(f3))
RV64.enc(inst_imm.i64, I, OPIMM(f3))
# 32-bit ops in RV64.
RV64.enc(base.iadd.i32, R, OP32(0b000, 0b0000000))
RV64.enc(base.isub.i32, R, OP32(0b000, 0b0100000))
# There are no andiw/oriw/xoriw variations.
RV64.enc(base.iadd_imm.i32, I, OPIMM32(0b000))
# Dynamic shifts have the same masking semantics as the cton base instructions.
for inst, inst_imm, f3, f7 in [
(base.ishl, base.ishl_imm, 0b001, 0b0000000),
(base.ushr, base.ushr_imm, 0b101, 0b0000000),
(base.sshr, base.sshr_imm, 0b101, 0b0100000),
]:
RV32.enc(inst.i32.i32, R, OP(f3, f7))
RV64.enc(inst.i64.i64, R, OP(f3, f7))
RV64.enc(inst.i32.i32, R, OP32(f3, f7))
# Allow i32 shift amounts in 64-bit shifts.
RV64.enc(inst.i64.i32, R, OP(f3, f7))
RV64.enc(inst.i32.i64, R, OP32(f3, f7))
# Immediate shifts.
RV32.enc(inst_imm.i32, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i64, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i32, Rshamt, OPIMM32(f3, f7))
# "M" Standard Extension for Integer Multiplication and Division.
# Gated by the `use_m` flag.
RV32.enc(base.imul.i32, R, OP(0b000, 0b0000001), isap=use_m)
RV64.enc(base.imul.i64, R, OP(0b000, 0b0000001), isap=use_m)
RV64.enc(base.imul.i32, R, OP32(0b000, 0b0000001), isap=use_m)
| """
RISC-V Encodings.
"""
from __future__ import absolute_import
from cretonne import base
from .defs import RV32, RV64
from .recipes import OPIMM, OPIMM32, OP, OP32, R, Rshamt, I
# Basic arithmetic binary instructions are encoded in an R-type instruction.
for inst, inst_imm, f3, f7 in [
(base.iadd, base.iadd_imm, 0b000, 0b0000000),
(base.isub, None, 0b000, 0b0100000),
(base.bxor, base.bxor_imm, 0b100, 0b0000000),
(base.bor, base.bor_imm, 0b110, 0b0000000),
(base.band, base.band_imm, 0b111, 0b0000000)
]:
RV32.enc(inst.i32, R, OP(f3, f7))
RV64.enc(inst.i64, R, OP(f3, f7))
# Immediate versions for add/xor/or/and.
if inst_imm:
RV32.enc(inst_imm.i32, I, OPIMM(f3))
RV64.enc(inst_imm.i64, I, OPIMM(f3))
# 32-bit ops in RV64.
RV64.enc(base.iadd.i32, R, OP32(0b000, 0b0000000))
RV64.enc(base.isub.i32, R, OP32(0b000, 0b0100000))
# There are no andiw/oriw/xoriw variations.
RV64.enc(base.iadd_imm.i32, I, OPIMM32(0b000))
# Dynamic shifts have the same masking semantics as the cton base instructions.
for inst, inst_imm, f3, f7 in [
(base.ishl, base.ishl_imm, 0b001, 0b0000000),
(base.ushr, base.ushr_imm, 0b101, 0b0000000),
(base.sshr, base.sshr_imm, 0b101, 0b0100000),
]:
RV32.enc(inst.i32.i32, R, OP(f3, f7))
RV64.enc(inst.i64.i64, R, OP(f3, f7))
RV64.enc(inst.i32.i32, R, OP32(f3, f7))
# Allow i32 shift amounts in 64-bit shifts.
RV64.enc(inst.i64.i32, R, OP(f3, f7))
RV64.enc(inst.i32.i64, R, OP32(f3, f7))
# Immediate shifts.
RV32.enc(inst_imm.i32, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i64, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i32, Rshamt, OPIMM32(f3, f7))
| apache-2.0 | Python |
d017c2a2e09d043caecd555217a399453c7e60b8 | fix migration imports | praekeltfoundation/ndoh-hub,praekeltfoundation/ndoh-hub,praekeltfoundation/ndoh-hub | eventstore/migrations/0050_askfeedback.py | eventstore/migrations/0050_askfeedback.py | # Generated by Django 2.2.24 on 2021-12-07 06:26
import uuid
import django.contrib.postgres.fields.jsonb
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("eventstore", "0049_auto_20211202_1220")]
operations = [
migrations.CreateModel(
name="AskFeedback",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("contact_id", models.UUIDField()),
("question_answered", models.BooleanField(default=False)),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
(
"created_by",
models.CharField(blank=True, default="", max_length=255),
),
(
"data",
django.contrib.postgres.fields.jsonb.JSONField(
blank=True, default=dict, null=True
),
),
],
)
]
| # Generated by Django 2.2.24 on 2021-12-07 06:26
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [("eventstore", "0049_auto_20211202_1220")]
operations = [
migrations.CreateModel(
name="AskFeedback",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("contact_id", models.UUIDField()),
("question_answered", models.BooleanField(default=False)),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
(
"created_by",
models.CharField(blank=True, default="", max_length=255),
),
(
"data",
django.contrib.postgres.fields.jsonb.JSONField(
blank=True, default=dict, null=True
),
),
],
)
]
| bsd-3-clause | Python |
5763c341a1660e13b70780a37d822eed65b00255 | refactor example fit_text_path_into_box.py | mozman/ezdxf,mozman/ezdxf,mozman/ezdxf,mozman/ezdxf,mozman/ezdxf | examples/addons/fit_text_path_into_box.py | examples/addons/fit_text_path_into_box.py | # Copyright (c) 2021-2022, Manfred Moitzi
# License: MIT License
import pathlib
import ezdxf
from ezdxf import path, zoom
from ezdxf.math import Matrix44
from ezdxf.tools import fonts
from ezdxf.addons import text2path
CWD = pathlib.Path("~/Desktop/Outbox").expanduser()
if not CWD.exists():
CWD = pathlib.Path(".")
# ------------------------------------------------------------------------------
# This example shows how create outline paths from a text and fit them into a
# specified rectangle.
# ------------------------------------------------------------------------------
def main():
doc = ezdxf.new()
msp = doc.modelspace()
ff = fonts.FontFace(family="Arial")
box_width, box_height = 4, 2
# Draw the target box:
msp.add_lwpolyline(
[(0, 0), (box_width, 0), (box_width, box_height), (0, box_height)],
close=True,
dxfattribs={"color": 1},
)
# Convert text string into path objects:
text_as_paths = text2path.make_paths_from_str("Squeeze Me", ff)
# Fit text paths into a given box size by scaling, does not move the path
# objects:
# - uniform=True, keeps the text aspect ratio
# - uniform=False, scales the text to touch all 4 sides of the box
final_paths = path.fit_paths_into_box(
text_as_paths, size=(box_width, box_height, 0), uniform=False
)
# Mirror text about the x-axis
final_paths = path.transform_paths(final_paths, Matrix44.scale(-1, 1, 1))
# Move bottom/left corner to (0, 0) if required:
bbox = path.bbox(final_paths)
dx, dy, dz = -bbox.extmin
final_paths = path.transform_paths(
final_paths, Matrix44.translate(dx, dy, dz)
)
path.render_lwpolylines(
msp, final_paths, distance=0.01, dxfattribs={"color": 2}
)
zoom.extents(msp)
doc.saveas(CWD / "SqueezeMe.dxf")
if __name__ == "__main__":
main()
| # Copyright (c) 2021, Manfred Moitzi
# License: MIT License
from pathlib import Path
import ezdxf
from ezdxf import path, zoom
from ezdxf.math import Matrix44
from ezdxf.tools import fonts
from ezdxf.addons import text2path
DIR = Path("~/Desktop/Outbox").expanduser()
fonts.load()
doc = ezdxf.new()
doc.layers.new("OUTLINE")
doc.layers.new("FILLING")
msp = doc.modelspace()
attr = {"color": 2}
ff = fonts.FontFace(family="Arial")
sx, sy = 4, 2
# create the target box:
msp.add_lwpolyline(
[(0, 0), (sx, 0), (sx, sy), (0, sy)], close=True, dxfattribs={"color": 1}
)
# convert text string into path objects:
text_as_paths = text2path.make_paths_from_str("Squeeze Me", ff)
# fit text paths into a given box size by scaling, does not move the path objects:
# uniform=True, keeps the text aspect ratio
# uniform=False, scales the text to touch all 4 sides of the box
final_paths = path.fit_paths_into_box(
text_as_paths, size=(sx, sy, 0), uniform=False
)
# mirror text along x-axis
final_paths = path.transform_paths(final_paths, Matrix44.scale(-1, 1, 1))
# move bottom/left corner to (0, 0) if required:
bbox = path.bbox(final_paths)
dx, dy, dz = -bbox.extmin
final_paths = path.transform_paths(final_paths, Matrix44.translate(dx, dy, dz))
path.render_lwpolylines(
msp, final_paths, distance=0.01, dxfattribs={"color": 2}
)
zoom.extents(msp)
doc.saveas(DIR / "SqeezeMe.dxf")
| mit | Python |
daed646ff987bc86b333a995bac1283360a583ef | bump up version to 0.1.2 | mogproject/javactl,mogproject/javactl | src/javactl/__init__.py | src/javactl/__init__.py | __version__ = '0.1.2'
| __version__ = '0.1.1'
| apache-2.0 | Python |
c6453752f9630a760cd2b2508d9ba39413871d86 | Update SensorMotorTest.py | WeirdCoder/LilyPadOS,WeirdCoder/LilyPadOS,WeirdCoder/LilyPadOS,WeirdCoder/LilyPadOS | 04Dan/SensorMotorTest.py | 04Dan/SensorMotorTest.py | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
##GPIO.setup(18, GPIO.OUT) servo
##GPIO.setup(22, GPIO.OUT) motor
GPIO.setup(16, GPIO.IN) ##button
try:
while True:
i = GPIO.input(16)
print(i)
delay(1000)
except Keyboardinterrupt:
GPIO.cleanup()
| import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
##GPIO.setup(18, GPIO.OUT) servo
##GPIO.setup(22, GPIO.OUT) motor
GPIO.setup(16, GPIO.IN) ##button
try:
while True:
i = GPIO.input(16)
print(i)
delay(1000)
except Keyboardinterupt:
GPIO.cleanup()
| mit | Python |
c3ecc4a06a212da11f52c9c0cd5c7b5c8d500516 | Support -h/--help on createdb.py | jeremycline/fmn,jeremycline/fmn,jeremycline/fmn | createdb.py | createdb.py | #!/usr/bin/env python
import sys
import fedmsg.config
import fmn.lib.models
config = fedmsg.config.load_config()
uri = config.get('fmn.sqlalchemy.uri')
if not uri:
raise ValueError("fmn.sqlalchemy.uri must be present")
if '-h' in sys.argv or '--help'in sys.argv:
print "createdb.py [--with-dev-data]"
sys.exit(0)
session = fmn.lib.models.init(uri, debug=True, create=True)
if '--with-dev-data' in sys.argv:
user1 = fmn.lib.models.User.get_or_create(session, username="ralph")
user2 = fmn.lib.models.User.get_or_create(session, username="toshio")
user3 = fmn.lib.models.User.get_or_create(session, username="toshio")
context1 = fmn.lib.models.Context.create(
session, name="irc", description="Internet Relay Chat",
detail_name="irc nick", icon="user",
placeholder="z3r0_c00l",
)
context2 = fmn.lib.models.Context.create(
session, name="email", description="Electronic Mail",
detail_name="email address", icon="envelope",
placeholder="jane@fedoraproject.org",
)
context3 = fmn.lib.models.Context.create(
session, name="gcm", description="Google Cloud Messaging",
detail_name="registration id", icon="phone",
placeholder="laksdjfasdlfkj183097falkfj109f"
)
prefs1 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context1,
detail_value="threebean",
)
prefs2 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context2,
detail_value="ralph@fedoraproject.org",
)
session.commit()
| #!/usr/bin/env python
import sys
import fedmsg.config
import fmn.lib.models
config = fedmsg.config.load_config()
uri = config.get('fmn.sqlalchemy.uri')
if not uri:
raise ValueError("fmn.sqlalchemy.uri must be present")
session = fmn.lib.models.init(uri, debug=True, create=True)
if '--with-dev-data' in sys.argv:
user1 = fmn.lib.models.User.get_or_create(session, username="ralph")
user2 = fmn.lib.models.User.get_or_create(session, username="toshio")
user3 = fmn.lib.models.User.get_or_create(session, username="toshio")
context1 = fmn.lib.models.Context.create(
session, name="irc", description="Internet Relay Chat",
detail_name="irc nick", icon="user",
placeholder="z3r0_c00l",
)
context2 = fmn.lib.models.Context.create(
session, name="email", description="Electronic Mail",
detail_name="email address", icon="envelope",
placeholder="jane@fedoraproject.org",
)
context3 = fmn.lib.models.Context.create(
session, name="gcm", description="Google Cloud Messaging",
detail_name="registration id", icon="phone",
placeholder="laksdjfasdlfkj183097falkfj109f"
)
prefs1 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context1,
detail_value="threebean",
)
prefs2 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context2,
detail_value="ralph@fedoraproject.org",
)
session.commit()
| lgpl-2.1 | Python |
29205582e07eaa8b28eea4b0691a9556d0999015 | Remove unused LoginForm | keybar/keybar | src/keybar/web/forms.py | src/keybar/web/forms.py | from django.utils.translation import ugettext_lazy as _
from django.contrib import auth
import floppyforms.__future__ as forms
from keybar.models.user import User
class RegisterForm(forms.ModelForm):
name = forms.CharField(label=_('Your name'),
widget=forms.TextInput(
attrs={'placeholder': _('e.g Jorah Mormont')}))
email = forms.EmailField(label=_('Email'))
class Meta:
model = User
fields = ('name', 'email')
| from django.utils.translation import ugettext_lazy as _
from django.contrib import auth
import floppyforms.__future__ as forms
from keybar.models.user import User
class RegisterForm(forms.ModelForm):
name = forms.CharField(label=_('Your name'),
widget=forms.TextInput(
attrs={'placeholder': _('e.g Jorah Mormont')}))
email = forms.EmailField(label=_('Email'))
class Meta:
model = User
fields = ('name', 'email')
class LoginForm(forms.Form):
email = forms.EmailField(label=_('Email'))
password = forms.CharField(label=_('Password'), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _('Please enter a correct email and password. '
'Note that both fields may be case-sensitive.'),
}
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.authenticated_user = None
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
self.authenticated_user = auth.authenticate(email=email, password=password)
if self.authenticated_user is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login')
return self.cleaned_data
def get_user(self):
return self.authenticated_user
| bsd-3-clause | Python |
8d8002062a0ecbf3720870d7561670a8c7e98da2 | Fix test for auth tokens store | cgwire/zou | test/stores/test_auth_tokens_store.py | test/stores/test_auth_tokens_store.py | from test.base import ApiTestCase
from zou.app.stores import auth_tokens_store
class CommandsTestCase(ApiTestCase):
def setUp(self):
super(CommandsTestCase, self).setUp()
self.store = auth_tokens_store
self.store.clear()
def tearDown(self):
self.store.clear()
def test_get_and_add(self):
self.assertIsNone(self.store.get("key-1"))
self.store.add("key-1", "true")
self.assertEquals(self.store.get("key-1"), "true")
def test_delete(self):
self.store.add("key-1", "true")
self.store.delete("key-1")
self.assertIsNone(self.store.get("key-1"))
def test_is_revoked(self):
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "true")
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "false")
self.assertFalse(self.store.is_revoked({"jti": "key-1"}))
def test_keys(self):
self.store.add("key-1", "true")
self.store.add("key-2", "true")
self.assertTrue("key-1" in self.store.keys())
self.assertTrue("key-2" in self.store.keys())
| from test.base import ApiTestCase
from zou.app.stores import auth_tokens_store
class CommandsTestCase(ApiTestCase):
def setUp(self):
super(CommandsTestCase, self).setUp()
self.store = auth_tokens_store
self.store.clear()
def tearDown(self):
self.store.clear()
def test_get_and_add(self):
self.assertIsNone(self.store.get("key-1"))
self.store.add("key-1", "true")
self.assertEquals(self.store.get("key-1"), "true")
def test_delete(self):
self.store.add("key-1", "true")
self.store.delete("key-1")
self.assertIsNone(self.store.get("key-1"))
def test_is_revoked(self):
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "true")
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "false")
self.assertFalse(self.store.is_revoked({"jti": "key-1"}))
def test_keys(self):
self.store.add("key-1", "true")
self.store.add("key-2", "true")
self.assertEquals(
self.store.keys(), ["key-1", "key-2"]
)
| agpl-3.0 | Python |
b1244fe396e2ed6acb72b3ccd151fbde24f78198 | Adjust yaml_reader for Waypoint type | CIR-KIT/fifth_robot_pkg,CIR-KIT/fifth_robot_pkg,CIR-KIT/fifth_robot_pkg | src/waypoints_reader/scripts/yaml_reader.py | src/waypoints_reader/scripts/yaml_reader.py | #!/usr/bin/env python
# coding UTF-8
import yaml
import rospy
from goal_sender_msgs.srv import ApplyGoals
from goal_sender_msgs.msg import GoalSequence, Waypoint
from geometry_msgs.msg import Point, Quaternion
def read_yaml(path):
f = open(path, 'r')
waypoints = yaml.load(f)
f.close()
return waypoints
def get_waypoints():
sequence = GoalSequence()
for waypoint_data in read_yaml(rospy.get_param('~path', 'waypoints.yaml')):
waypoint = Waypoint(name = waypoint_data.get('name', ""),
position = Point(x = waypoint_data['x'],
y = waypoint_data['y']),
orientation = Quaternion(w = 1), # not havn't rotation
radius = waypoint_data['radius'], # required
importance = waypoint_data.get('importance', 0),
drag = waypoint_data.get('drag', 0))
sequence.waypoints.append(waypoint)
return sequence
if __name__ == '__main__':
rospy.init_node('yaml_reader', anonymous=True)
goal_sequence = get_waypoints()
rospy.wait_for_service('apply_goals')
try:
apply_goals = rospy.ServiceProxy('apply_goals', ApplyGoals)
resp = apply_goals(goal_sequence)
print resp.message
except rospy.ServiceException, e:
print e
except rospy.ROSInterruptException:
pass
| #!/usr/bin/env python
# coding UTF-8
import yaml
import rospy
from goal_sender_msgs.srv import ApplyGoals
from goal_sender_msgs.msg import GoalSequence
from goal_sender_msgs.msg import Waypoint
def read_yaml(path):
f = open(path, 'r')
waypoints = yaml.load(f)
f.close()
return waypoints
def get_waypoints():
sequence = GoalSequence()
for waypoint_data in read_yaml(rospy.get_param('~path', 'waypoints.yaml')):
waypoint = Waypoint(name = waypoint_data.get('name', ""),
x = waypoint_data['x'], # required
y = waypoint_data['y'], # required
radius = waypoint_data['radius'], # required
importance = waypoint_data.get('importance', 0),
drag = waypoint_data.get('drag', 0))
sequence.waypoints.append(waypoint)
return sequence
if __name__ == '__main__':
rospy.init_node('yaml_reader', anonymous=True)
goal_sequence = get_waypoints()
rospy.wait_for_service('apply_goals')
try:
apply_goals = rospy.ServiceProxy('apply_goals', ApplyGoals)
resp = apply_goals(goal_sequence)
print resp.message
except rospy.ServiceException, e:
print e
except rospy.ROSInterruptException:
pass
| bsd-3-clause | Python |
0f1551db96cd27ed20e62545cac1540a405e8f1a | fix bug | andrewsy97/Treehacks,andrewsy97/Treehacks | FlaskWebProject/views.py | FlaskWebProject/views.py | """
Routes and views for the flask application.
"""
import os
from datetime import datetime
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@app.route('/summarize', methods=['GET'])
def summarize():
access_token = os.getenv('TEST_TEAM_SLACK_ACCESS_TOKEN')
member_id = request.args.get('user_id')
channel_id = request.args.get('channel_id')
channel_name = request.args.get('channel_name')
num_messages = request.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, access_token)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | """
Routes and views for the flask application.
"""
import os
from datetime import datetime
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@app.route('/summarize', methods=['GET'])
def summarize():
access_token = os.getenv('TREEHACKS_SLACK_ACCESS_TOKEN')
member_id = request.args.get('user_id')
channel_id = request.args.get('channel_id')
channel_name = request.args.get('channel_name')
num_messages = request.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | mit | Python |
529987bb17a05c041cdbf3bbe2a98edda72872fc | remove unneeded Todo | SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree | InvenTree/plugin/urls.py | InvenTree/plugin/urls.py | """
URL lookup for plugin app
"""
from django.conf.urls import url, include
from plugin import plugin_reg
PLUGIN_BASE = 'plugin' # Constant for links
def get_plugin_urls():
"""returns a urlpattern that can be integrated into the global urls"""
urls = []
for plugin in plugin_reg.plugins.values():
if plugin.mixin_enabled('urls'):
urls.append(plugin.urlpatterns)
return url(f'^{PLUGIN_BASE}/', include((urls, 'plugin')))
| """
URL lookup for plugin app
"""
from django.conf.urls import url, include
from plugin import plugin_reg
PLUGIN_BASE = 'plugin' # Constant for links
def get_plugin_urls():
"""returns a urlpattern that can be integrated into the global urls"""
urls = []
for plugin in plugin_reg.plugins.values():
if plugin.mixin_enabled('urls'):
urls.append(plugin.urlpatterns)
# TODO wrap everything in plugin_url_wrapper
return url(f'^{PLUGIN_BASE}/', include((urls, 'plugin')))
| mit | Python |
6137a6f00abbeb81b080f534481bb255f950dd83 | access oauth token securely through azure | andrewsy97/Treehacks,andrewsy97/Treehacks | FlaskWebProject/views.py | FlaskWebProject/views.py | """
Routes and views for the Flask application.
"""
import os
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
ACCESS_TOKEN = os.getenv('TREEHACKS_SLACK_ACCESS_TOKEN')
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page'
)
# text is number of messages
@app.route('/summarize', methods=['GET'])
def summarize(ACCESS_TOKEN):
member_id = requests.args.get('user_id')
channel_id = requests.args.get('channel_id')
channel_name = requests.args.get('channel_name')
num_messages = requests.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | """
Routes and views for the Flask application.
"""
from flask import render_template, request
from FlaskWebProject import app
from oauth_constants import TEST_TEAM_SLACK_ACCESS_TOKEN
from generate_summary_json import generate_summary_json
global TEST_TEAM_SLACK_ACCESS_TOKEN
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page'
)
# text is number of messages
@app.route('/summarize', methods=['GET'])
def summarize():
member_id = requests.args.get('user_id')
channel_id = requests.args.get('channel_id')
channel_name = requests.args.get('channel_name')
num_messages = requests.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | mit | Python |
6565e5bd88ebe5fde8d65664041a9e8f571ca7d7 | switch to requests | samcheck/Scripts,samcheck/Scripts | IMGURdl/downloadIMGUR.py | IMGURdl/downloadIMGUR.py | # example from:
# https://www.toptal.com/python/beginners-guide-to-concurrency-and-parallelism-in-python
import json
import logging
import os
from pathlib import Path
from urllib.request import urlopen, Request
import requests
logger = logging.getLogger(__name__)
def get_links(client_id):
headers = {'Authorization': 'Client-ID {}'.format(client_id)}
url = 'https://api.imgur.com/3/gallery/random/random/'
resp = requests.get(url, headers=headers)
resp.raise_for_status()
data = resp.json()
# req = Request('https://api.imgur.com/3/gallery/random/random/', headers=headers, method='GET')
# with urlopen(req) as resp:
# data = json.loads(resp.read().decode('utf-8'))
return map(lambda item: item['link'], data['data'])
def download_link(directory, link):
logger.info('Downloading %s', link)
download_path = directory / os.path.basename(link)
with urlopen(link) as image, download_path.open('wb') as f:
f.write(image.read())
def setup_download_dir():
download_dir = Path('images')
if not download_dir.exists():
download_dir.mkdir()
return download_dir
| # example from:
# https://www.toptal.com/python/beginners-guide-to-concurrency-and-parallelism-in-python
import json
import logging
import os
from pathlib import Path
from urllib.request import urlopen, Request
# import requests
logger = logging.getLogger(__name__)
def get_links(client_id):
headers = {'Authorization': 'Client-ID {}'.format(client_id)}
url = 'https://api.imgur.com/3/gallery/random/random/'
resp = requests.get(url, headers=headers)
resp.raise_for_status()
data = resp.json()
# req = Request('https://api.imgur.com/3/gallery/random/random/', headers=headers, method='GET')
# with urlopen(req) as resp:
# data = json.loads(resp.read().decode('utf-8'))
return map(lambda item: item['link'], data['data'])
def download_link(directory, link):
logger.info('Downloading %s', link)
download_path = directory / os.path.basename(link)
with urlopen(link) as image, download_path.open('wb') as f:
f.write(image.read())
def setup_download_dir():
download_dir = Path('images')
if not download_dir.exists():
download_dir.mkdir()
return download_dir
| mit | Python |
1323154dfbc453959f3d64fef439288004f6461e | add test for SyntaxError on def f(a): global a | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_compile.py | Lib/test/test_compile.py | from test_support import verbose, TestFailed
if verbose:
print 'Running tests on argument handling'
try:
exec('def f(a, a): pass')
raise TestFailed, "duplicate arguments"
except SyntaxError:
pass
try:
exec('def f(a = 0, a = 1): pass')
raise TestFailed, "duplicate keyword arguments"
except SyntaxError:
pass
try:
exec('def f(a): global a; a = 1')
raise TestFailed, "variable is global and local"
except SyntaxError:
pass
| from test_support import verbose, TestFailed
if verbose:
print 'Running test on duplicate arguments'
try:
exec('def f(a, a): pass')
raise TestFailed, "duplicate arguments"
except SyntaxError:
pass
try:
exec('def f(a = 0, a = 1): pass')
raise TestFailed, "duplicate keyword arguments"
except SyntaxError:
pass
| mit | Python |
69a735cd134723e4d47c02d21f4ff85a65d28148 | enable test_main.py | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_lib2to3.py | Lib/test/test_lib2to3.py | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
test_parser, test_main as test_main_)
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers, test_pytree,test_util, test_refactor, test_parser,
test_main_):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
test_parser)
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers, test_pytree,test_util, test_refactor, test_parser):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| mit | Python |
ae23c81ee18726755ed770d1d3654e50d28fb028 | Update views.py | bardia73/Graph,bardia-heydarinejad/Graph,bardia-heydarinejad/Graph,bardia73/Graph | chat/views.py | chat/views.py | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.contrib import auth
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import csrf_exempt
import json
from chat.models import Message, ChatUser
from django.contrib.auth.models import User
import datetime
from django.utils.timezone import now as utcnow
def index(request):
if request.method == 'POST':
print request.POST
logged_users = []
if request.user.username and request.user.profile.is_chat_user:
context = {'logged_users':logged_users}
return render(request, 'djangoChat/index.html', context)
else:
return HttpResponseRedirect(reverse('login'))
def login(request):
if request.user.username and request.user.profile.is_chat_user:
return HttpResponseRedirect(reverse('index'))
context = {'error':''}
if request.method == 'POST':
username = request.POST.get('username','') #retunr '' if no username
password = request.POST.get('password','')
user = auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
cu = request.user.profile
cu.is_chat_user = True
cu.last_accessed = utcnow()
cu.save()
return HttpResponseRedirect(reverse('index'))
else:
context['error'] = ' wrong credentials try again'
return render(request,'djangoChat/login.html',context)
context.update(csrf(request))
return render(request,'djangoChat/login.html',context)
def logout(request):
cu = request.user.profile
cu.is_chat_user = False
cu.save()
return HttpResponse('succesfully logged out of chat')
@csrf_exempt
def chat_api(request):
if request.method == 'POST':
d = json.loads(request.body)
msg = d.get('msg')
user = request.user.username
gravatar = request.user.profile.gravatar_url
m = Message(user=user,message=msg,gravatar=gravatar)
m.save()
res = {'id':m.id,'msg':m.message,'user':m.user,'time':m.time.strftime('%I:%M:%S %p').lstrip('0'),'gravatar':m.gravatar}
data = json.dumps(res)
return HttpResponse(data,content_type="application/json")
# get request
r = Message.objects.order_by('-time')[:70]
res = []
for msgs in reversed(r) :
res.append({'id':msgs.id,'user':msgs.user,'msg':msgs.message,'time':msgs.time.strftime('%I:%M:%S %p').lstrip('0'),'gravatar':msgs.gravatar})
data = json.dumps(res)
return HttpResponse(data,content_type="application/json")
def logged_chat_users(request):
u = ChatUser.objects.filter(is_chat_user=True)
for j in u:
elapsed = utcnow() - j.last_accessed
if elapsed > datetime.timedelta(seconds=35):
j.is_chat_user = False
j.save()
uu = ChatUser.objects.filter(is_chat_user=True)
d = []
for i in uu:
d.append({'username': i.username,'gravatar':i.gravatar_url,'id':i.userID})
data = json.dumps(d)
return HttpResponse(data,content_type="application/json")
def update_time(request):
if request.user.username:
u = request.user.profile
u.last_accessed = utcnow()
u.is_chat_user = True
u.save()
return HttpResponse('updated')
return HttpResponse('who are you?')
| from django.shortcuts import render
# Create your views here.
| mit | Python |
a6acf8a68ee5b2ef185f279b6169a34c2b70896d | Increase feature version | darashenka/aem-cmd,darashenka/aem-cmd,darashenka/aem-cmd | acmd/__init__.py | acmd/__init__.py | # coding: utf-8
""" aem-cmd main module. """
__version__ = '0.12.0b'
# Standard error codes that can be returned from any tool.
OK = 0
UNCHANGED = 1
USER_ERROR = 4711
CONFIG_ERROR = 4712
SERVER_ERROR = 4713
INTERNAL_ERROR = 4714
import acmd.logger
init_log = acmd.logger.init_log
log = acmd.logger.log
warning = acmd.logger.warning
error = acmd.logger.error
import acmd.server
Server = acmd.server.Server
import acmd.config
read_config = acmd.config.read_config
get_rcfilename = acmd.config.get_rcfilename
import acmd.deploy
setup_rcfile = acmd.deploy.setup_rcfile
deploy_bash_completion = acmd.deploy.deploy_bash_completion
get_current_version = acmd.deploy.get_current_version
import acmd.props
parse_properties = acmd.props.parse_properties
import acmd.repo
tool_repo = acmd.repo.tool_repo
tool = acmd.repo.tool
import_projects = acmd.repo.import_projects
| # coding: utf-8
""" aem-cmd main module. """
__version__ = '0.11.1b'
# Standard error codes that can be returned from any tool.
OK = 0
UNCHANGED = 1
USER_ERROR = 4711
CONFIG_ERROR = 4712
SERVER_ERROR = 4713
INTERNAL_ERROR = 4714
import acmd.logger
init_log = acmd.logger.init_log
log = acmd.logger.log
warning = acmd.logger.warning
error = acmd.logger.error
import acmd.server
Server = acmd.server.Server
import acmd.config
read_config = acmd.config.read_config
get_rcfilename = acmd.config.get_rcfilename
import acmd.deploy
setup_rcfile = acmd.deploy.setup_rcfile
deploy_bash_completion = acmd.deploy.deploy_bash_completion
get_current_version = acmd.deploy.get_current_version
import acmd.props
parse_properties = acmd.props.parse_properties
import acmd.repo
tool_repo = acmd.repo.tool_repo
tool = acmd.repo.tool
import_projects = acmd.repo.import_projects
| mit | Python |
69c590d7cf2d328b9e6ef63ddf49933e67df9614 | fix typo | wolph/python-statsd | statsd/gauge.py | statsd/gauge.py | import statsd
class Gauge(statsd.Client):
'''Class to implement a statsd gauge
'''
def send(self, subname, value):
'''Send the data to statsd via self.connection
:keyword subname: The subname to report the data to (appended to the
client name)
:keyword value: The gauge value to send
'''
name = self._get_name(self.name, subname)
self.logger.info('%s: %d', name, value)
return statsd.Client._send(self, {name: '%d|g' % value})
| import statsd
class Gauge(statsd.Client):
'''Class to implement a statd gauge
'''
def send(self, subname, value):
'''Send the data to statsd via self.connection
:keyword subname: The subname to report the data to (appended to the
client name)
:keyword value: The gauge value to send
'''
name = self._get_name(self.name, subname)
self.logger.info('%s: %d', name, value)
return statsd.Client._send(self, {name: '%d|g' % value})
| bsd-3-clause | Python |
b97edcc911419197099338085f0f2937286dead0 | Bump version | chouseknecht/galaxy,chouseknecht/galaxy,chouseknecht/galaxy,chouseknecht/galaxy | galaxy/__init__.py | galaxy/__init__.py | # (c) 2012-2014, Ansible, Inc. <support@ansible.com>
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sys
import warnings
__version__ = '2.0.1'
__all__ = ['__version__']
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands:
commands.append(f[:-4])
except OSError:
pass
return commands
def prepare_env():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings')
local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', 'site-packages')
sys.path.insert(0, local_site_packages)
from django.conf import settings
if not settings.DEBUG:
warnings.simplefilter('ignore', DeprecationWarning)
import django.utils
settings.version = __version__
def manage():
# Prepare the galaxy environment.
prepare_env()
# Now run the command (or display the version).
from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'):
sys.stdout.write('galaxy-%s\n' % __version__)
else:
execute_from_command_line(sys.argv)
| # (c) 2012-2014, Ansible, Inc. <support@ansible.com>
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sys
import warnings
__version__ = '2.0.0'
__all__ = ['__version__']
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands:
commands.append(f[:-4])
except OSError:
pass
return commands
def prepare_env():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings')
local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', 'site-packages')
sys.path.insert(0, local_site_packages)
from django.conf import settings
if not settings.DEBUG:
warnings.simplefilter('ignore', DeprecationWarning)
import django.utils
settings.version = __version__
def manage():
# Prepare the galaxy environment.
prepare_env()
# Now run the command (or display the version).
from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'):
sys.stdout.write('galaxy-%s\n' % __version__)
else:
execute_from_command_line(sys.argv)
| apache-2.0 | Python |
747fa98c7a9ec7906dfba44e4860d300825eee39 | Drop Py2 and six on tests/integration/modules/test_key.py | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/modules/test_key.py | tests/integration/modules/test_key.py | import re
import pytest
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
@pytest.mark.windows_whitelisted
class KeyModuleTest(ModuleCase):
@slowTest
def test_key_finger(self):
"""
test key.finger to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
@slowTest
def test_key_finger_master(self):
"""
test key.finger_master to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger_master")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import re
import pytest
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
@pytest.mark.windows_whitelisted
class KeyModuleTest(ModuleCase):
@slowTest
def test_key_finger(self):
"""
test key.finger to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
@slowTest
def test_key_finger_master(self):
"""
test key.finger_master to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger_master")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
| apache-2.0 | Python |
6140507068c7a42a988bad951c1a6f120de741fb | Update cam_timeLapse_Threaded_upload.py | philprobinson84/RPi,philprobinson84/RPi | camera/timelapse/cam_timeLapse_Threaded_upload.py | camera/timelapse/cam_timeLapse_Threaded_upload.py | #!/usr/bin/env python2.7
import time
import os
from subprocess import call
import sys
class Logger(object):
def __init__(self):
self.terminal = sys.stdout
self.log = open("logfile.log", "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
sys.stdout = Logger()
UPLOAD_INTERVAL = 60
def upload_file(inpath, outpath):
uploadCmd = "/home/pi/Dropbox-Uploader/dropbox_uploader.sh upload %s %s" % (inpath, outpath)
call ([uploadCmd], shell=True)
while True:
# record start_time
start_time = time.time()
# initiate the upload process
inpath = "/home/pi/timelapse/latest/latest.jpg"
outpath = "latest.jpg"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
inpath = "/home/pi/timelapse/latest/latest.mp4"
outpath = "latest.mp4"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
# record end_time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = UPLOAD_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "uploadThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "uploadThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| #!/usr/bin/env python2.7
import time
import os
from subprocess import call
UPLOAD_INTERVAL = 60
def upload_file(inpath, outpath):
uploadCmd = "/home/pi/Dropbox-Uploader/dropbox_uploader.sh upload %s %s" % (inpath, outpath)
call ([uploadCmd], shell=True)
while True:
# record start_time
start_time = time.time()
# initiate the upload process
inpath = "/home/pi/timelapse/latest/latest.jpg"
outpath = "latest.jpg"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
inpath = "/home/pi/timelapse/latest/latest.mp4"
outpath = "latest.mp4"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
# record end_time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = UPLOAD_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "uploadThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "uploadThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| artistic-2.0 | Python |
16fca36c2032929589a718507a74c87bee52c161 | move planarAxiPotential to top-level | followthesheep/galpy,jobovy/galpy,followthesheep/galpy,followthesheep/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy,followthesheep/galpy | galpy/potential.py | galpy/potential.py | from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotRotcurve= plotRotcurve.plotRotcurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
#
# Classes
#
Potential= Potential.Potential
planarAxiPotential= planarPotential.planarAxiPotential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
| from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotRotcurve= plotRotcurve.plotRotcurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
#
# Classes
#
Potential= Potential.Potential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
| bsd-3-clause | Python |
4c084313d2e27a620f194e6282a51aa1e94f7a35 | Change chunk so it only takes an int | muddyfish/PYKE,muddyfish/PYKE | node/floor_divide.py | node/floor_divide.py | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:int):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn] | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:Node.number):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn] | mit | Python |
942e3b183859623d2f2a6bf874f8d763e960ea5b | Print AST during integration test | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | tests/integration/test_integration.py | tests/integration/test_integration.py | import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
SEARCH_PATTERN = os.path.join(BASE_PATH, '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header('Parsed AST')
ast = thinglang.compiler(test_file.code)
print(ast.tree())
utils.print_header("Bytecode generation")
bytecode = ast.compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen(["thinglang", test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
SEARCH_PATTERN = os.path.join(BASE_PATH, '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header("Bytecode generation")
bytecode = thinglang.compiler(test_file.code).compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen(["thinglang", test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| mit | Python |
0dd41b65aaa0798a7a72a0d61d746bfa29bc3aad | Allow POST of fly and worm donors | 4dn-dcic/fourfront,hms-dbmi/fourfront,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/encoded,ClinGen/clincoded,philiptzou/clincoded,kidaa/encoded,ENCODE-DCC/snovault,ENCODE-DCC/encoded,philiptzou/clincoded,ClinGen/clincoded,4dn-dcic/fourfront,hms-dbmi/fourfront,ClinGen/clincoded,philiptzou/clincoded,kidaa/encoded,ENCODE-DCC/snovault,ENCODE-DCC/snovault,ENCODE-DCC/encoded,T2DREAM/t2dream-portal,ClinGen/clincoded,T2DREAM/t2dream-portal,kidaa/encoded,philiptzou/clincoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,philiptzou/clincoded,ENCODE-DCC/encoded,kidaa/encoded,4dn-dcic/fourfront,T2DREAM/t2dream-portal,hms-dbmi/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,kidaa/encoded,hms-dbmi/fourfront | src/encoded/types/donor.py | src/encoded/types/donor.py | from ..schema_utils import (
load_schema,
)
from ..contentbase import (
location,
)
from .base import (
ACCESSION_KEYS,
ALIAS_KEYS,
Collection,
paths_filtered_by_status,
)
class DonorItem(Collection.Item):
base_types = ['donor'] + Collection.Item.base_types
embedded = set(['organism'])
name_key = 'accession'
keys = ACCESSION_KEYS + ALIAS_KEYS
rev = {
'characterizations': ('donor_characterization', 'characterizes'),
}
template = {
'characterizations': (
lambda root, characterizations: paths_filtered_by_status(root, characterizations)
),
}
@location('mouse-donors')
class MouseDonor(Collection):
item_type = 'mouse_donor'
schema = load_schema('mouse_donor.json')
__acl__ = []
properties = {
'title': 'Mouse donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
def __ac_local_roles__(self):
# Disallow lab submitter edits
return {}
@location('fly-donors')
class FlyDonor(Collection):
item_type = 'fly_donor'
schema = load_schema('fly_donor.json')
properties = {
'title': 'Fly donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('worm-donors')
class WormDonor(Collection):
item_type = 'worm_donor'
schema = load_schema('worm_donor.json')
properties = {
'title': 'Worm donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('human-donors')
class HumanDonor(Collection):
item_type = 'human_donor'
schema = load_schema('human_donor.json')
properties = {
'title': 'Human donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
pass
| from ..schema_utils import (
load_schema,
)
from ..contentbase import (
location,
)
from .base import (
ACCESSION_KEYS,
ALIAS_KEYS,
Collection,
paths_filtered_by_status,
)
class DonorItem(Collection.Item):
base_types = ['donor'] + Collection.Item.base_types
embedded = set(['organism'])
name_key = 'accession'
keys = ACCESSION_KEYS + ALIAS_KEYS
rev = {
'characterizations': ('donor_characterization', 'characterizes'),
}
template = {
'characterizations': (
lambda root, characterizations: paths_filtered_by_status(root, characterizations)
),
}
@location('mouse-donors')
class MouseDonor(Collection):
item_type = 'mouse_donor'
schema = load_schema('mouse_donor.json')
__acl__ = []
properties = {
'title': 'Mouse donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
def __ac_local_roles__(self):
# Disallow lab submitter edits
return {}
@location('fly-donors')
class FlyDonor(Collection):
item_type = 'fly_donor'
schema = load_schema('fly_donor.json')
__acl__ = []
properties = {
'title': 'Fly donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('worm-donors')
class WormDonor(Collection):
item_type = 'worm_donor'
schema = load_schema('worm_donor.json')
__acl__ = []
properties = {
'title': 'Worm donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('human-donors')
class HumanDonor(Collection):
item_type = 'human_donor'
schema = load_schema('human_donor.json')
properties = {
'title': 'Human donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
pass
| mit | Python |
b9133e2fe7444b4449ab67f4d726c20ce5e21cd8 | clean ups in presentation of names | rob-metalinkage/django-gazetteer,rob-metalinkage/django-gazetteer,rob-metalinkage/django-gazetteer | gazetteer/admin.py | gazetteer/admin.py | from django.contrib import admin
from django import forms
from gazetteer.models import *
from skosxl.models import Notation
from .settings import TARGET_NAMESPACE_FT
# Register your models here.
# works for Dango > 1.6
class NameInline(admin.TabularInline):
model = LocationName
readonly_fields = ['nameUsed', 'namespace']
extra = 0
class LocationTypeInlineForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LocationTypeInlineForm, self).__init__(*args, **kwargs)
self.fields['locationType'].queryset = Notation.objects.filter(concept__scheme__uri = TARGET_NAMESPACE_FT[0:-1] )
class LocationTypeInline(admin.StackedInline) :
model = Notation
form = LocationTypeInlineForm
class LocationAdmin(admin.ModelAdmin):
search_fields = ['locationType__term','locationname__name']
inlines = [
NameInline,
]
class NameFieldConfigInline(admin.TabularInline):
model = NameFieldConfig
extra = 1
class CodeFieldConfigInline(admin.TabularInline):
model = CodeFieldConfig
extra = 1
class LocationTypeFieldInline(admin.TabularInline):
model = LocationTypeField
class GazSourceConfigAdmin(admin.ModelAdmin):
model = GazSourceConfig
inlines = [
LocationTypeFieldInline, NameFieldConfigInline, CodeFieldConfigInline
]
admin.site.register(GazSource);
admin.site.register(GazSourceConfig,GazSourceConfigAdmin);
admin.site.register(Location, LocationAdmin);
admin.site.register(LocationName);
admin.site.register(LinkSet);
| from django.contrib import admin
from django import forms
from gazetteer.models import *
from skosxl.models import Notation
from .settings import TARGET_NAMESPACE_FT
# Register your models here.
# works for Dango > 1.6
class NameInline(admin.TabularInline):
model = LocationName
class LocationTypeInlineForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LocationTypeInlineForm, self).__init__(*args, **kwargs)
self.fields['locationType'].queryset = Notation.objects.filter(concept__scheme__uri = TARGET_NAMESPACE_FT[0:-1] )
class LocationTypeInline(admin.StackedInline) :
model = Notation
form = LocationTypeInlineForm
class LocationAdmin(admin.ModelAdmin):
search_fields = ['locationType__term','locationname__name']
inlines = [
NameInline,
]
class NameFieldConfigInline(admin.TabularInline):
model = NameFieldConfig
extra = 1
class CodeFieldConfigInline(admin.TabularInline):
model = CodeFieldConfig
extra = 1
class LocationTypeFieldInline(admin.TabularInline):
model = LocationTypeField
class GazSourceConfigAdmin(admin.ModelAdmin):
model = GazSourceConfig
inlines = [
LocationTypeFieldInline, NameFieldConfigInline, CodeFieldConfigInline
]
admin.site.register(GazSource);
admin.site.register(GazSourceConfig,GazSourceConfigAdmin);
admin.site.register(Location, LocationAdmin);
admin.site.register(LocationName);
admin.site.register(LinkSet);
| cc0-1.0 | Python |
4ac7e5d15d3fba11ae37e5826ca6c7181539804b | Disable nested types tests affected by IMPALA-2295 | michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala | tests/query_test/test_nested_types.py | tests/query_test/test_nested_types.py | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestNestedTypes(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestNestedTypes, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'parquet')
def test_scanner_basic(self, vector):
"""Queries that do not materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-basic', vector)
def test_scanner_array_materialization(self, vector):
"""Queries that materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-array-materialization', vector)
def test_scanner_multiple_materialization(self, vector):
"""Queries that materialize the same array multiple times."""
self.run_test_case('QueryTest/nested-types-scanner-multiple-materialization', vector)
def test_scanner_position(self, vector):
"""Queries that materialize the artifical position element."""
self.run_test_case('QueryTest/nested-types-scanner-position', vector)
def test_scanner_map(self, vector):
"""Queries that materialize maps. (Maps looks like arrays of key/value structs, so
most map functionality is already tested by the array tests.)"""
self.run_test_case('QueryTest/nested-types-scanner-maps', vector)
def test_runtime(self, vector):
"""Queries that send collections through the execution runtime."""
pytest.skip("IMPALA-2295")
self.run_test_case('QueryTest/nested-types-runtime', vector)
def test_tpch(self, vector):
"""Queries over the larger nested TPCH dataset."""
pytest.skip("IMPALA-2295")
# This test takes a long time (minutes), only run in exhaustive
if self.exploration_strategy() != 'exhaustive': pytest.skip()
self.run_test_case('QueryTest/nested-types-tpch', vector)
| #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestNestedTypes(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestNestedTypes, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'parquet')
def test_scanner_basic(self, vector):
"""Queries that do not materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-basic', vector)
def test_scanner_array_materialization(self, vector):
"""Queries that materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-array-materialization', vector)
def test_scanner_multiple_materialization(self, vector):
"""Queries that materialize the same array multiple times."""
self.run_test_case('QueryTest/nested-types-scanner-multiple-materialization', vector)
def test_scanner_position(self, vector):
"""Queries that materialize the artifical position element."""
self.run_test_case('QueryTest/nested-types-scanner-position', vector)
def test_scanner_map(self, vector):
"""Queries that materialize maps. (Maps looks like arrays of key/value structs, so
most map functionality is already tested by the array tests.)"""
self.run_test_case('QueryTest/nested-types-scanner-maps', vector)
def test_runtime(self, vector):
"""Queries that send collections through the execution runtime."""
self.run_test_case('QueryTest/nested-types-runtime', vector)
def test_tpch(self, vector):
"""Queries over the larger nested TPCH dataset."""
# This test takes a long time (minutes), only run in exhaustive
if self.exploration_strategy() != 'exhaustive': pytest.skip()
self.run_test_case('QueryTest/nested-types-tpch', vector)
| apache-2.0 | Python |
f5f0cc6998f28bee7ccdaf304d3bc5e7e45ab9a6 | save memory allocation using kwarg `out`. | chainer/chainer,niboshi/chainer,okuta/chainer,wkentaro/chainer,okuta/chainer,hvy/chainer,chainer/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,hvy/chainer,hvy/chainer,okuta/chainer,chainer/chainer,niboshi/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,pfnet/chainer,wkentaro/chainer,wkentaro/chainer | chainer/optimizer_hooks/gradient_hard_clipping.py | chainer/optimizer_hooks/gradient_hard_clipping.py | import chainer
from chainer import backend
class GradientHardClipping(object):
"""Optimizer/UpdateRule hook function for gradient clipping.
This hook function clips all gradient arrays to be within a lower and upper
bound.
Args:
lower_bound (float): The lower bound of the gradient value.
upper_bound (float): The upper bound of the gradient value.
Attributes:
~optimizer_hooks.GradientHardClipping.lower_bound (float): The
lower bound of the gradient value.
~optimizer_hooks.GradientHardClipping.upper_bound (float): The
upper bound of the gradient value.
~optimizer_hooks.GradientHardClipping.timing (string): Specifies
when this hook should be called by the
Optimizer/UpdateRule. Valid values are 'pre'
(before any updates) and 'post'
(after any updates).
~optimizer_hooks.GradientHardClipping.call_for_each_param (bool): \
Specifies if this hook is called for each parameter
(``True``) or only once (``False``) by an optimizer to
which this hook is registered. This function does
not expect users to switch the value from default one,
which is `True`.
.. versionadded:: 4.0.0
The *timing* parameter.
"""
name = 'GradientHardClipping'
call_for_each_param = True
timing = 'pre'
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, rule, param):
grad = param.grad
if grad is None:
return
with chainer.using_device(param.device):
xp = param.device.xp
if xp == backend.chainerx \
or isinstance(param.grad, backend.intel64.mdarray):
param.grad = grad.clip(self.lower_bound, self.upper_bound)
else:
# Save on new object allocation when using numpy and cupy
# using kwarg `out`
xp.clip(grad, self.lower_bound, self.upper_bound, out=grad)
| import chainer
class GradientHardClipping(object):
"""Optimizer/UpdateRule hook function for gradient clipping.
This hook function clips all gradient arrays to be within a lower and upper
bound.
Args:
lower_bound (float): The lower bound of the gradient value.
upper_bound (float): The upper bound of the gradient value.
Attributes:
~optimizer_hooks.GradientHardClipping.lower_bound (float): The
lower bound of the gradient value.
~optimizer_hooks.GradientHardClipping.upper_bound (float): The
upper bound of the gradient value.
~optimizer_hooks.GradientHardClipping.timing (string): Specifies
when this hook should be called by the
Optimizer/UpdateRule. Valid values are 'pre'
(before any updates) and 'post'
(after any updates).
~optimizer_hooks.GradientHardClipping.call_for_each_param (bool): \
Specifies if this hook is called for each parameter
(``True``) or only once (``False``) by an optimizer to
which this hook is registered. This function does
not expect users to switch the value from default one,
which is `True`.
.. versionadded:: 4.0.0
The *timing* parameter.
"""
name = 'GradientHardClipping'
call_for_each_param = True
timing = 'pre'
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, rule, param):
grad = param.grad
if grad is None:
return
with chainer.using_device(param.device):
param.grad = param.grad.clip(self.lower_bound, self.upper_bound)
| mit | Python |
8c17d2076d54864094c3cd8ee51d514bc806c913 | bump version | jrversteegh/flexx,zoofIO/flexx,zoofIO/flexx,jrversteegh/flexx | flexx/__init__.py | flexx/__init__.py | """
`Flexx <https://flexx.readthedocs.io>`_ is a pure Python toolkit for
creating graphical user interfaces (GUI's), that uses web technology
for its rendering. Apps are written purely in Python; The
`PScript <https://pscript.readthedocs.io>`_ transpiler generates the
necessary JavaScript on the fly.
You can use Flexx to create (cross platform) desktop applications, web
applications, and export an app to a standalone HTML document. It also
works in the Jupyter notebook.
The docs are on `Readthedocs <http://flexx.readthedocs.io>`_,
the code is on `Github <http://github.com/flexxui/flexx>`_,
and there is a `demo server <http://demo.flexx.app>`_.
Once you've got started, the most important page is probably the
:doc:`Widget reference <ui/api>`.
----
For more information, see http://flexx.readthedocs.io.
"""
# NOTES ON DOCS:
# There are 2 places that define the short summary of Flexx: the
# __init__.py and the README.md. Their summaries should be kept equal.
# The index.rst for the docs uses the summary from __init__.py (the
# part after the "----" is stripped. The long-description for Pypi is
# obtained by converting README.md to RST.
__version__ = '0.8.0'
# Assert compatibility
import sys
if sys.version_info < (3, 5): # pragma: no cover
raise RuntimeError('Flexx needs at least Python 3.5')
# Import config object
from ._config import config # noqa
from .util.logging import set_log_level # noqa
set_log_level(config.log_level)
del sys
| """
`Flexx <https://flexx.readthedocs.io>`_ is a pure Python toolkit for
creating graphical user interfaces (GUI's), that uses web technology
for its rendering. Apps are written purely in Python; The
`PScript <https://pscript.readthedocs.io>`_ transpiler generates the
necessary JavaScript on the fly.
You can use Flexx to create (cross platform) desktop applications, web
applications, and export an app to a standalone HTML document. It also
works in the Jupyter notebook.
The docs are on `Readthedocs <http://flexx.readthedocs.io>`_,
the code is on `Github <http://github.com/flexxui/flexx>`_,
and there is a `demo server <http://demo.flexx.app>`_.
Once you've got started, the most important page is probably the
:doc:`Widget reference <ui/api>`.
----
For more information, see http://flexx.readthedocs.io.
"""
# NOTES ON DOCS:
# There are 2 places that define the short summary of Flexx: the
# __init__.py and the README.md. Their summaries should be kept equal.
# The index.rst for the docs uses the summary from __init__.py (the
# part after the "----" is stripped. The long-description for Pypi is
# obtained by converting README.md to RST.
__version__ = '0.7.1'
# Assert compatibility
import sys
if sys.version_info < (3, 5): # pragma: no cover
raise RuntimeError('Flexx needs at least Python 3.5')
# Import config object
from ._config import config # noqa
from .util.logging import set_log_level # noqa
set_log_level(config.log_level)
del sys
| bsd-2-clause | Python |
7ba77209687ae1bb1344cc09e3539f7e21bfe599 | Improve test of csvstack --filenames. | unpingco/csvkit,snuggles08/csvkit,doganmeh/csvkit,aequitas/csvkit,themiurgo/csvkit,bradparks/csvkit__query_join_filter_CSV_cli,matterker/csvkit,archaeogeek/csvkit,metasoarous/csvkit,jpalvarezf/csvkit,kyeoh/csvkit,nriyer/csvkit,gepuro/csvkit,bmispelon/csvkit,KarrieK/csvkit,Tabea-K/csvkit,moradology/csvkit,tlevine/csvkit,haginara/csvkit,cypreess/csvkit,barentsen/csvkit,reubano/csvkit,wjr1985/csvkit,dannguyen/csvkit,onyxfish/csvkit,wireservice/csvkit,arowla/csvkit,elcritch/csvkit,Jobava/csvkit | tests/test_utilities/test_csvstack.py | tests/test_utilities/test_csvstack.py | #!/usr/bin/env python
import sys
import StringIO
import unittest
from csvkit import CSVKitReader
from csvkit.utilities.stack import CSVStack
class TestCSVStack(unittest.TestCase):
def test_explicit_grouping(self):
# stack two CSV files
args = ["--groups", "asd,sdf", "-n", "foo", "examples/dummy.csv", "examples/dummy2.csv"]
output_file = StringIO.StringIO()
utility = CSVStack(args, output_file)
utility.main()
# verify the stacked file's contents
input_file = StringIO.StringIO(output_file.getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ["foo", "a", "b", "c"])
self.assertEqual(reader.next()[0], "asd")
self.assertEqual(reader.next()[0], "sdf")
def test_filenames_grouping(self):
# stack two CSV files
args = ["--filenames", "-n", "path", "examples/dummy.csv", "examples/dummy2.csv"]
output_file = StringIO.StringIO()
utility = CSVStack(args, output_file)
utility.main()
# verify the stacked file's contents
input_file = StringIO.StringIO(output_file.getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ["path", "a", "b", "c"])
self.assertEqual(reader.next()[0], "dummy.csv")
self.assertEqual(reader.next()[0], "dummy2.csv")
| #!/usr/bin/env python
import sys
import StringIO
import unittest
from csvkit import CSVKitReader
from csvkit.utilities.stack import CSVStack
class TestCSVStack(unittest.TestCase):
def test_explicit_grouping(self):
# stack two CSV files
args = ["--groups", "asd,sdf", "-n", "foo", "examples/dummy.csv", "examples/dummy2.csv"]
output_file = StringIO.StringIO()
utility = CSVStack(args, output_file)
utility.main()
# verify the stacked file's contents
input_file = StringIO.StringIO(output_file.getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ["foo", "a", "b", "c"])
self.assertEqual(reader.next()[0], "asd")
self.assertEqual(reader.next()[0], "sdf")
def test_filenames_grouping(self):
# stack two CSV files
args = ["--filenames", "-n", "path", "examples/dummy.csv", "examples/dummy2.csv"]
output_file = StringIO.StringIO()
utility = CSVStack(args, output_file)
utility.main()
# verify the stacked file's contents
input_file = StringIO.StringIO(output_file.getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ["foo", "a", "b", "c"])
self.assertEqual(reader.next()[0], "asd")
self.assertEqual(reader.next()[0], "sdf")
| mit | Python |
2f2114b47618ef6435543c05d941d3191ef44d5c | refactor Valuation functions | MBALearnsToCode/FinSymPy,MBALearnsToCode/CorpFin,MBALearnsToCode/CorpFin,MBALearnsToCode/FinSymPy | FinSymPy/Valuation.py | FinSymPy/Valuation.py |
def terminal_value(
terminal_cash_flow=0.,
long_term_discount_rate=.01,
long_term_growth_rate=0.):
return (1 + long_term_growth_rate) * terminal_cash_flow / (long_term_discount_rate - long_term_growth_rate)
def present_value(amount=0., discount_rate=0., nb_periods=0.):
return amount / ((1 + discount_rate) ** nb_periods)
def net_present_value(
cash_flows=(0,),
discount_rate=0.):
return reduce(
lambda x, y: x + y,
[cash_flows[i] / ((1 + discount_rate) ** i)
for i in range(len(cash_flows))])
| from sympy.matrices import Determinant, Matrix
def terminal_value(
cash_flows=Matrix([0.]),
long_term_discount_rate=0.,
long_term_growth_rate=0.):
m, n = cash_flows.shape
if m == 1:
filter_vector = Matrix((n - 1) * [0] + [1])
tv = Determinant(cash_flows * filter_vector)
elif n == 1:
filter_vector = Matrix([(m - 1) * [0] + [1]])
tv = Determinant(filter_vector * cash_flows)
return (1 + long_term_growth_rate) * tv / (long_term_discount_rate - long_term_growth_rate)
def present_value(amount=0., discount_rate=0., nb_periods=0.):
return amount / ((1 + discount_rate) ** nb_periods)
def net_present_value(
cash_flows=Matrix([0.]),
discount_rate=0.):
m, n = cash_flows.shape
discount_rate_plus_1 = discount_rate + 1
if m == 1:
discount_vector = Matrix([discount_rate_plus_1 ** -i for i in range(n)])
return Determinant(cash_flows * discount_vector)
elif n == 1:
discount_vector = Matrix([[discount_rate_plus_1 ** -i for i in range(m)]])
return Determinant(discount_vector * cash_flows)
| mit | Python |
6ff7389f85485b8aa2848aa0e7420569c0c06f37 | Update pluginLoader. | splice/gofer,jortel/gofer,jortel/gofer,kgiusti/gofer,credativ/gofer,credativ/gofer,kgiusti/gofer,splice/gofer,splice/gofer | src/gopher/agent/plugin.py | src/gopher/agent/plugin.py | #
# Copyright (c) 2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
import sys
from logging import getLogger
log = getLogger(__name__)
class PluginLoader:
"""
Agent plugins loader.
"""
ROOT = '/var/lib/gopher'
PLUGINS = 'plugins'
@classmethod
def abspath(cls):
return os.path.join(cls.ROOT, cls.PLUGINS)
def __init__(self):
path = self.abspath()
if not os.path.exists(path):
os.makedirs(path)
fn = os.path.join(path, '__init__.py')
f = open(fn, 'w')
f.close()
def load(self):
"""
Load the plugins.
"""
sys.path.append(self.ROOT)
path = self.abspath()
for fn in os.listdir(path):
if fn.startswith('__'):
continue
if not fn.endswith('.py'):
continue
self.__import(fn)
def __import(self, fn):
"""
Import a module by file name.
@param fn: The module file name.
@type fn: str
"""
mod = fn.rsplit('.', 1)[0]
imp = '%s.%s' % (self.PLUGINS, mod)
try:
__import__(imp)
log.info('plugin "%s", imported', imp)
except:
log.error('plugin "%s", import failed', imp, exc_info=True)
| #
# Copyright (c) 2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
import sys
from logging import getLogger
log = getLogger(__name__)
class PluginLoader:
"""
Agent plugins loader.
"""
ROOT = '/var/lib/gopher'
PLUGINS = 'gopherplugins'
@classmethod
def abspath(cls):
return os.path.join(cls.ROOT, cls.PLUGINS)
def __init__(self):
path = self.abspath()
if os.path.exists(path):
return
os.makedirs(path)
pkg = os.path.join(path, '__init__.py')
f = open(pkg, 'w')
f.close()
def load(self):
"""
Load the plugins.
"""
sys.path.append(self.ROOT)
path = self.abspath()
for fn in os.listdir(path):
if fn.startswith('__'):
continue
if not fn.endswith('.py'):
continue
self.__import(fn)
def __import(self, fn):
"""
Import a module by file name.
@param fn: The module file name.
@type fn: str
"""
mod = fn.rsplit('.', 1)[0]
imp = '%s.%s' % (self.PLUGINS, mod)
try:
__import__(imp)
log.info('plugin "%s", imported', imp)
except:
log.error('plugin "%s", import failed', imp, exc_info=True)
| lgpl-2.1 | Python |
60f3c4e1bbd25d781cfba5993aac647d937c64c9 | add BillSource to public interface | opencivicdata/python-opencivicdata,opencivicdata/python-opencivicdata-django,opencivicdata/python-opencivicdata-django,influence-usa/python-opencivicdata-django,opencivicdata/python-opencivicdata-divisions,opencivicdata/python-opencivicdata,rshorey/python-opencivicdata-django,opencivicdata/python-opencivicdata-django,rshorey/python-opencivicdata-django,mileswwatkins/python-opencivicdata-django,mileswwatkins/python-opencivicdata-django,influence-usa/python-opencivicdata-django | opencivicdata/models/__init__.py | opencivicdata/models/__init__.py | # flake8: NOQA
from .jurisdiction import Jurisdiction, JurisdictionSession
from .division import Division
from .people_orgs import (
Organization, OrganizationIdentifier, OrganizationName, OrganizationContactDetail,
OrganizationLink, OrganizationSource,
Person, PersonIdentifier, PersonName, PersonContactDetail, PersonLink, PersonSource,
Post, PostContactDetail, PostLinks,
Membership, MembershipContactDetail, MembershipLink
)
from .bill import (Bill, BillSummary, BillTitle, BillName, RelatedBill, BillSponsor,
BillDocument, BillVersion, BillDocumentLink, BillVersionLink, BillSource)
| # flake8: NOQA
from .jurisdiction import Jurisdiction, JurisdictionSession
from .division import Division
from .people_orgs import (
Organization, OrganizationIdentifier, OrganizationName, OrganizationContactDetail,
OrganizationLink, OrganizationSource,
Person, PersonIdentifier, PersonName, PersonContactDetail, PersonLink, PersonSource,
Post, PostContactDetail, PostLinks,
Membership, MembershipContactDetail, MembershipLink
)
from .bill import (Bill, BillSummary, BillTitle, BillName, RelatedBill, BillSponsor,
BillDocument, BillVersion, BillDocumentLink, BillVersionLink)
| bsd-3-clause | Python |
5232597d574f7089f592aac0a5f25efd1ff7763a | Update test_blt.py. | cjerdonek/open-rcv,cjerdonek/open-rcv | openrcv/test/formats/test_blt.py | openrcv/test/formats/test_blt.py |
from textwrap import dedent
from openrcv.formats.blt import BLTFileWriter
from openrcv.models import BallotsResource, ContestInput
from openrcv.streams import StringResource
from openrcv.utiltest.helpers import UnitCase
class BLTFileWriterTest(UnitCase):
def test(self):
contest = ContestInput()
contest.name = "Foo"
contest.candidates = ['A', 'B', 'C']
contest.seat_count = 1
ballots = [
(2, (2, 1)),
(1, (2, )),
]
contest.ballots_resource = BallotsResource(ballots)
resource = StringResource()
writer = BLTFileWriter(resource)
writer.write_contest(contest)
expected = dedent("""\
3 1
2 2 1 0
1 2 0
0
"A"
"B"
"C"
"Foo\"
""")
self.assertEqual(resource.contents, expected)
|
from textwrap import dedent
from openrcv.formats.blt import BLTFileWriter
from openrcv.models import BallotsResource, ContestInput
from openrcv.utils import StringInfo
from openrcv.utiltest.helpers import UnitCase
class BLTFileWriterTest(UnitCase):
def test(self):
contest = ContestInput()
contest.name = "Foo"
contest.candidates = ['A', 'B', 'C']
contest.seat_count = 1
ballots = [
(2, (2, 1)),
(1, (2, )),
]
contest.ballots_resource = BallotsResource(ballots)
stream_info = StringInfo()
writer = BLTFileWriter(stream_info)
writer.write_contest(contest)
expected = dedent("""\
3 1
2 2 1 0
1 2 0
0
"A"
"B"
"C"
"Foo\"
""")
self.assertEqual(stream_info.value, expected)
| mit | Python |
ff63f077fe68ae18b409598a3860d0abbc7442e3 | fix num_topics property | cheral/orange3-text,cheral/orange3-text,cheral/orange3-text | orangecontrib/text/topics/hdp.py | orangecontrib/text/topics/hdp.py | from gensim import models
from .topics import GensimWrapper
class HdpModel(models.HdpModel):
def __init__(self, corpus, id2word, **kwargs):
# disable fitting during initialization
_update = self.update
self.update = lambda x: x
super().__init__(corpus, id2word, **kwargs)
self.update = _update
class HdpWrapper(GensimWrapper):
name = 'Hdp Model'
Model = HdpModel
def __init__(self, **kwargs):
self.kwargs = kwargs
self.model = None
def reset_model(self, corpus):
self.model = self.Model(corpus=corpus,
id2word=corpus.ngrams_dictionary, **self.kwargs)
@property
def num_topics(self):
return self.model.m_lambda.shape[0] if self.model else 0
| from gensim import models
from .topics import GensimWrapper
class HdpModel(models.HdpModel):
def __init__(self, corpus, id2word, **kwargs):
# disable fitting during initialization
_update = self.update
self.update = lambda x: x
super().__init__(corpus, id2word, **kwargs)
self.update = _update
class HdpWrapper(GensimWrapper):
name = 'Hdp Model'
Model = HdpModel
def __init__(self, **kwargs):
self.kwargs = kwargs
def reset_model(self, corpus):
self.model = self.Model(corpus=corpus,
id2word=corpus.ngrams_dictionary, **self.kwargs)
@property
def num_topics(self):
return self.model.m_lambda.shape[0]
| bsd-2-clause | Python |
3a247b72ba39bb2f49099905c435127aea424fe0 | Remove unused variable | La0/mozilla-relengapi,mozilla-releng/services,garbas/mozilla-releng-services,garbas/mozilla-releng-services,srfraser/services,mozilla-releng/services,srfraser/services,lundjordan/services,mozilla-releng/services,garbas/mozilla-releng-services,srfraser/services,lundjordan/services,mozilla-releng/services,La0/mozilla-relengapi,srfraser/services,La0/mozilla-relengapi,lundjordan/services,La0/mozilla-relengapi,garbas/mozilla-releng-services,lundjordan/services | lib/backend_common/tests/conftest.py | lib/backend_common/tests/conftest.py | """Configure a mock application to run queries against"""
import pytest
from flask_login import current_user
from flask import jsonify
from backend_common import create_app, auth, auth0, mocks
from os.path import join, dirname
@pytest.fixture(scope='module')
def app():
"""
Build an app with an authenticated dummy api
"""
# Use unique auth instance
config = {
'DEBUG': True,
'OIDC_CLIENT_SECRETS': join(dirname(__file__), 'client_secrets.json'),
'OIDC_RESOURCE_SERVER_ONLY': True
}
app = create_app('test', extensions=[auth, auth0], config=config)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/test-auth-login')
@auth.auth.require_login
def logged_in():
data = {
'auth': True,
'user': current_user.get_id(),
# permissions is a set, not serializable
'scopes': list(current_user.permissions),
}
return jsonify(data)
@app.route('/test-auth-scopes')
@auth.auth.require_scopes([
['project/test/A', 'project/test/B'],
['project/test-admin/*'],
])
def scopes():
return app.response_class('Your scopes are ok.')
@app.route('/test-auth0-userinfo')
@auth0.accept_token()
def auth0_token():
return app.response_class('OK')
# Add fake swagger url, used by redirect
app.api.swagger_url = '/'
return app
@pytest.yield_fixture(scope='module')
def client(app):
"""
A Flask test client.
"""
with app.test_client() as client:
with mocks.apply_mockups():
yield client
| """Configure a mock application to run queries against"""
import pytest
from flask_login import current_user
from flask import jsonify
from backend_common import create_app, auth, auth0, mocks
from os.path import join, dirname
FAKE_CLIENT_SECRETS = """
{
"web": {
"auth_uri": "https://auth.mozilla.auth0.com/authorize",
"issuer": "https://auth.mozilla.auth0.com/",
"client_id": "some-id-string",
"client_secret": "my-super-secret",
"redirect_uris": [
"https://signoff.shipit.mozilla.com/oidc_callback"
],
"token_uri": "https://auth.mozilla.auth0.com/oauth/token",
"token_introspection_uri": "https://test/oauth/token",
"userinfo_uri": "https://auth.mozilla.auth0.com/userinfo"
}
}
"""
@pytest.fixture(scope='module')
def app():
"""
Build an app with an authenticated dummy api
"""
# Use unique auth instance
config = {
'DEBUG': True,
'OIDC_CLIENT_SECRETS': join(dirname(__file__), 'client_secrets.json'),
'OIDC_RESOURCE_SERVER_ONLY': True
}
app = create_app('test', extensions=[auth, auth0], config=config)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/test-auth-login')
@auth.auth.require_login
def logged_in():
data = {
'auth': True,
'user': current_user.get_id(),
# permissions is a set, not serializable
'scopes': list(current_user.permissions),
}
return jsonify(data)
@app.route('/test-auth-scopes')
@auth.auth.require_scopes([
['project/test/A', 'project/test/B'],
['project/test-admin/*'],
])
def scopes():
return app.response_class('Your scopes are ok.')
@app.route('/test-auth0-userinfo')
@auth0.accept_token()
def auth0_token():
return app.response_class('OK')
# Add fake swagger url, used by redirect
app.api.swagger_url = '/'
return app
@pytest.yield_fixture(scope='module')
def client(app):
"""
A Flask test client.
"""
with app.test_client() as client:
with mocks.apply_mockups():
yield client
| mpl-2.0 | Python |
6cd9af9d1c2f6b7e366c4bcc0b7c7422d4f776be | Add device events hook to app engine app. | tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation | src/appengine/main.py | src/appengine/main.py | import json
import logging
import os
import random
import string
import sys
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp.util import login_required
from google.appengine.ext import db
from google.appengine.ext.db import polymodel
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../third_party'))
import flask
from flask import request
import accounts
import model
import pusher
from common import creds
def StaticDir():
return os.path.normpath(os.path.join(os.path.dirname(__file__), '../static'))
app = flask.Flask('domics', static_folder=StaticDir())
app.debug = True
@app.route('/')
def Root():
return flask.send_from_directory(StaticDir(), 'index.html')
@app.before_request
def BeforeRequest():
user = users.get_current_user()
if not user and request.endpoint not in {'/'}:
return flaskredirect(users.create_login_url(request.endpoint))
logging.info(user)
person = model.Person.get_or_insert(key_name=user.user_id())
def GetUser():
user = users.get_current_user()
assert user is not None
return model.Person.get_or_insert(key_name=user.user_id())
@app.route('/api/user', methods=['GET'])
def GetUserRequest():
user = GetUser()
return flask.jsonify(id=user.key().id_or_name(), **db.to_dict(user))
@app.route('/api/channel')
def post(chan_name):
event = json.loads(self.request.body)
print event
p = pusher.Pusher(app_id=creds.pusher_app_id,
key=creds.pusher_key, secret=creds.pusher_secret)
p[chan_name].trigger('event', event)
@app.route('/api/device/events', methods=['POST'])
def DeviceEvents():
body = json.loads(flask.request.data)
logging.info(body)
@app.route('/api/device/<int:device_id>', methods=['POST'])
def CreateUpdateDevice(device_id):
body = json.loads(flask.request.data)
device = model.Device.get_by_id(device_id)
if not device:
device = Devices.CreateDevice(body)
else:
device.update(body)
device.put()
@app.route('/api/device/<int:device_id>', methods=['GET'])
def GetDevice(device_id):
device = model.Device.get_by_id(device_id)
if not device:
flask.abort(404)
return flask.jsonify(**db.to_dict(device))
@app.route('/api/device/<int:device_id>/event')
def DeviceEvent(device_id):
device = model.Device.get_by_id(device_id)
if not device:
flask.abort(404)
event = json.loads(flask.request.data)
device.Event(event)
| import json
import logging
import os
import random
import string
import sys
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp.util import login_required
from google.appengine.ext import db
from google.appengine.ext.db import polymodel
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../third_party'))
import flask
from flask import request
import accounts
import model
import pusher
from common import creds
def StaticDir():
return os.path.normpath(os.path.join(os.path.dirname(__file__), '../static'))
app = flask.Flask('domics', static_folder=StaticDir())
app.debug = True
@app.route('/')
def Root():
return flask.send_from_directory(StaticDir(), 'index.html')
@app.before_request
def BeforeRequest():
user = users.get_current_user()
if not user and request.endpoint not in {'/'}:
return flaskredirect(users.create_login_url(request.endpoint))
logging.info(user)
person = model.Person.get_or_insert(key_name=user.user_id())
def GetUser():
user = users.get_current_user()
assert user is not None
return model.Person.get_or_insert(key_name=user.user_id())
@app.route('/api/user', methods=['GET'])
def GetUserRequest():
user = GetUser()
return flask.jsonify(id=user.key().id_or_name(), **db.to_dict(user))
@app.route('/api/channel')
def post(chan_name):
event = json.loads(self.request.body)
print event
p = pusher.Pusher(app_id=creds.pusher_app_id,
key=creds.pusher_key, secret=creds.pusher_secret)
p[chan_name].trigger('event', event)
@app.route('/api/device/<int:device_id>', methods=['POST'])
def CreateUpdateDevice(device_id):
body = json.loads(flask.request.data)
device = model.Device.get_by_id(device_id)
if not device:
device = Devices.CreateDevice(body)
else:
device.update(body)
device.put()
@app.route('/api/device/<int:device_id>', methods=['GET'])
def GetDevice(device_id):
device = model.Device.get_by_id(device_id)
if not device:
flask.abort(404)
return flask.jsonify(**db.to_dict(device))
@app.route('/api/device/<int:device_id>/event')
def DeviceEvent(device_id):
device = model.Device.get_by_id(device_id)
if not device:
flask.abort(404)
event = json.loads(flask.request.data)
device.Event(event)
| mit | Python |
2c6a495351de52fe1de0b36d73f22e777ef3d08c | fix sqlalchemy url with sqlite prefix | saschagottfried/OpenShift-ToDoPyramid,saschagottfried/OpenShift-ToDoPyramid,saschagottfried/OpenShift-ToDoPyramid | wsgi/todopyramid/todopyramid/__init__.py | wsgi/todopyramid/todopyramid/__init__.py | import os
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
DBSession,
Base,
)
from .views import get_user
def get_db_session(request):
"""return thread-local DB session"""
return DBSession
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
# Persona DNS
settings['persona.audiences'] = '%(OPENSHIFT_APP_DNS)s' % os.environ
# OpenShift Settings
settings['sqlalchemy.url'] = 'sqlite:///%(OPENSHIFT_DATA_DIR)s/todopyramid.sqlite' % os.environ
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(
settings=settings,
root_factory='todopyramid.models.RootFactory',
)
includeme(config)
# scan modules for config descriptors
config.scan()
return config.make_wsgi_app()
def includeme(config):
"""we use this concept to include routes and configuration setup in test cases
http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/testing.html#creating-integration-tests
"""
config.add_static_view('static', 'static', cache_max_age=3600)
# Misc. views
config.add_route('home', '/')
config.add_route('about', '/about')
# Users
config.add_route('account', '/account')
# Viewing todo lists
config.add_route('todos', '/todos')
config.add_route('tags', '/tags')
config.add_route('taglist', '/tags/{tag_name}')
# AJAX
config.add_route('todo', '/todos/{todo_id}')
config.add_route('delete.task', '/delete.task/{todo_id}')
config.add_route('tags.autocomplete', '/tags.autocomplete')
# make DB session a request attribute
# http://blog.safaribooksonline.com/2014/01/07/building-pyramid-applications/
config.add_request_method(get_db_session, 'db', reify=True)
# Making A User Object Available as a Request Attribute
# http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/auth/user_object.html
config.add_request_method(get_user, 'user', reify=True)
| import os
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
DBSession,
Base,
)
from .views import get_user
def get_db_session(request):
"""return thread-local DB session"""
return DBSession
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
# Persona DNS
settings['persona.audiences'] = '%(OPENSHIFT_APP_DNS)s' % os.environ
# OpenShift Settings
settings['sqlalchemy.url'] = '%(OPENSHIFT_DATA_DIR)s/todopyramid.sqlite' % os.environ
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(
settings=settings,
root_factory='todopyramid.models.RootFactory',
)
includeme(config)
# scan modules for config descriptors
config.scan()
return config.make_wsgi_app()
def includeme(config):
"""we use this concept to include routes and configuration setup in test cases
http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/testing.html#creating-integration-tests
"""
config.add_static_view('static', 'static', cache_max_age=3600)
# Misc. views
config.add_route('home', '/')
config.add_route('about', '/about')
# Users
config.add_route('account', '/account')
# Viewing todo lists
config.add_route('todos', '/todos')
config.add_route('tags', '/tags')
config.add_route('taglist', '/tags/{tag_name}')
# AJAX
config.add_route('todo', '/todos/{todo_id}')
config.add_route('delete.task', '/delete.task/{todo_id}')
config.add_route('tags.autocomplete', '/tags.autocomplete')
# make DB session a request attribute
# http://blog.safaribooksonline.com/2014/01/07/building-pyramid-applications/
config.add_request_method(get_db_session, 'db', reify=True)
# Making A User Object Available as a Request Attribute
# http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/auth/user_object.html
config.add_request_method(get_user, 'user', reify=True)
| mit | Python |
926df1bc4dee9fc613f0fb31bb8c579943008645 | Update plot_label_propagation_digits.py (#22725) | TomDLT/scikit-learn,espg/scikit-learn,anntzer/scikit-learn,scikit-learn/scikit-learn,jakirkham/scikit-learn,betatim/scikit-learn,lesteve/scikit-learn,manhhomienbienthuy/scikit-learn,lesteve/scikit-learn,jakirkham/scikit-learn,vinayak-mehta/scikit-learn,ivannz/scikit-learn,scikit-learn/scikit-learn,TomDLT/scikit-learn,saiwing-yeung/scikit-learn,betatim/scikit-learn,anntzer/scikit-learn,jakirkham/scikit-learn,AlexandreAbraham/scikit-learn,jakirkham/scikit-learn,TomDLT/scikit-learn,scikit-learn/scikit-learn,vinayak-mehta/scikit-learn,AlexandreAbraham/scikit-learn,manhhomienbienthuy/scikit-learn,saiwing-yeung/scikit-learn,saiwing-yeung/scikit-learn,betatim/scikit-learn,betatim/scikit-learn,espg/scikit-learn,AlexandreAbraham/scikit-learn,TomDLT/scikit-learn,manhhomienbienthuy/scikit-learn,vinayak-mehta/scikit-learn,saiwing-yeung/scikit-learn,lesteve/scikit-learn,anntzer/scikit-learn,espg/scikit-learn,vinayak-mehta/scikit-learn,lesteve/scikit-learn,scikit-learn/scikit-learn,manhhomienbienthuy/scikit-learn,ivannz/scikit-learn,ivannz/scikit-learn,espg/scikit-learn,ivannz/scikit-learn,anntzer/scikit-learn,AlexandreAbraham/scikit-learn | examples/semi_supervised/plot_label_propagation_digits.py | examples/semi_supervised/plot_label_propagation_digits.py | """
===================================================
Label Propagation digits: Demonstrating performance
===================================================
This example demonstrates the power of semisupervised learning by
training a Label Spreading model to classify handwritten digits
with sets of very few labels.
The handwritten digit dataset has 1797 total points. The model will
be trained using all points, but only 30 will be labeled. Results
in the form of a confusion matrix and a series of metrics over each
class will be very good.
At the end, the top 10 most uncertain predictions will be shown.
"""
# Authors: Clay Woolam <clay@woolam.org>
# License: BSD
# %%
# Data generation
# ---------------
#
# We use the digits dataset. We only use a subset of randomly selected samples.
from sklearn import datasets
import numpy as np
digits = datasets.load_digits()
rng = np.random.RandomState(2)
indices = np.arange(len(digits.data))
rng.shuffle(indices)
# %%
#
# We selected 340 samples of which only 40 will be associated with a known label.
# Therefore, we store the indices of the 300 other samples for which we are not
# supposed to know their labels.
X = digits.data[indices[:340]]
y = digits.target[indices[:340]]
images = digits.images[indices[:340]]
n_total_samples = len(y)
n_labeled_points = 40
indices = np.arange(n_total_samples)
unlabeled_set = indices[n_labeled_points:]
# %%
# Shuffle everything around
y_train = np.copy(y)
y_train[unlabeled_set] = -1
# %%
# Semi-supervised learning
# ------------------------
#
# We fit a :class:`~sklearn.semi_supervised.LabelSpreading` and use it to predict
# the unknown labels.
from sklearn.semi_supervised import LabelSpreading
from sklearn.metrics import classification_report
lp_model = LabelSpreading(gamma=0.25, max_iter=20)
lp_model.fit(X, y_train)
predicted_labels = lp_model.transduction_[unlabeled_set]
true_labels = y[unlabeled_set]
print(
"Label Spreading model: %d labeled & %d unlabeled points (%d total)"
% (n_labeled_points, n_total_samples - n_labeled_points, n_total_samples)
)
# %%
# Classification report
print(classification_report(true_labels, predicted_labels))
# %%
# Confusion matrix
from sklearn.metrics import ConfusionMatrixDisplay
ConfusionMatrixDisplay.from_predictions(
true_labels, predicted_labels, labels=lp_model.classes_
)
# %%
# Plot the most uncertain predictions
# -----------------------------------
#
# Here, we will pick and show the 10 most uncertain predictions.
from scipy import stats
pred_entropies = stats.distributions.entropy(lp_model.label_distributions_.T)
# %%
# Pick the top 10 most uncertain labels
uncertainty_index = np.argsort(pred_entropies)[-10:]
# %%
# Plot
import matplotlib.pyplot as plt
f = plt.figure(figsize=(7, 5))
for index, image_index in enumerate(uncertainty_index):
image = images[image_index]
sub = f.add_subplot(2, 5, index + 1)
sub.imshow(image, cmap=plt.cm.gray_r)
plt.xticks([])
plt.yticks([])
sub.set_title(
"predict: %i\ntrue: %i" % (lp_model.transduction_[image_index], y[image_index])
)
f.suptitle("Learning with small amount of labeled data")
plt.show()
| """
===================================================
Label Propagation digits: Demonstrating performance
===================================================
This example demonstrates the power of semisupervised learning by
training a Label Spreading model to classify handwritten digits
with sets of very few labels.
The handwritten digit dataset has 1797 total points. The model will
be trained using all points, but only 30 will be labeled. Results
in the form of a confusion matrix and a series of metrics over each
class will be very good.
At the end, the top 10 most uncertain predictions will be shown.
"""
# Authors: Clay Woolam <clay@woolam.org>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn import datasets
from sklearn.semi_supervised import LabelSpreading
from sklearn.metrics import confusion_matrix, classification_report
digits = datasets.load_digits()
rng = np.random.RandomState(2)
indices = np.arange(len(digits.data))
rng.shuffle(indices)
X = digits.data[indices[:340]]
y = digits.target[indices[:340]]
images = digits.images[indices[:340]]
n_total_samples = len(y)
n_labeled_points = 40
indices = np.arange(n_total_samples)
unlabeled_set = indices[n_labeled_points:]
# #############################################################################
# Shuffle everything around
y_train = np.copy(y)
y_train[unlabeled_set] = -1
# #############################################################################
# Learn with LabelSpreading
lp_model = LabelSpreading(gamma=0.25, max_iter=20)
lp_model.fit(X, y_train)
predicted_labels = lp_model.transduction_[unlabeled_set]
true_labels = y[unlabeled_set]
cm = confusion_matrix(true_labels, predicted_labels, labels=lp_model.classes_)
print(
"Label Spreading model: %d labeled & %d unlabeled points (%d total)"
% (n_labeled_points, n_total_samples - n_labeled_points, n_total_samples)
)
print(classification_report(true_labels, predicted_labels))
print("Confusion matrix")
print(cm)
# #############################################################################
# Calculate uncertainty values for each transduced distribution
pred_entropies = stats.distributions.entropy(lp_model.label_distributions_.T)
# #############################################################################
# Pick the top 10 most uncertain labels
uncertainty_index = np.argsort(pred_entropies)[-10:]
# #############################################################################
# Plot
f = plt.figure(figsize=(7, 5))
for index, image_index in enumerate(uncertainty_index):
image = images[image_index]
sub = f.add_subplot(2, 5, index + 1)
sub.imshow(image, cmap=plt.cm.gray_r)
plt.xticks([])
plt.yticks([])
sub.set_title(
"predict: %i\ntrue: %i" % (lp_model.transduction_[image_index], y[image_index])
)
f.suptitle("Learning with small amount of labeled data")
plt.show()
| bsd-3-clause | Python |
4b7e6d7df8a447873bc57adfedfb6013b915190c | Fix Node.namespace_uri for py3 | 5monkeys/content-io | cio/node.py | cio/node.py | # coding=utf-8
from __future__ import unicode_literals
from .environment import env
from .utils.formatters import ContentFormatter
from .utils.uri import URI
import six
empty = object()
class Node(object):
_formatter = ContentFormatter()
def __init__(self, uri, content=None, **meta):
self.env = env.state
self._uri = [uri, URI(uri)]
self._content = [content]
self.meta = meta
def __repr__(self):
return '<Node: %s>' % self.uri
def __bytes__(self):
content = self.render()
if isinstance(content, six.text_type):
content = content.encode('utf-8')
return content or b''
def __unicode__(self):
return self.render() or ''
__str__ = __bytes__ if six.PY2 else __unicode__
def render(self, **context):
if self.content is not None:
if context:
return self._formatter.format(self.content, **context)
else:
return self.content
def get_uri(self):
return self._uri[-1]
def set_uri(self, uri):
if uri != self.get_uri():
self._uri.append(URI(uri))
uri = property(get_uri, set_uri)
def get_content(self):
return self._content[-1]
def set_content(self, content):
if content != self.get_content():
self._content.append(content)
content = property(get_content, set_content)
@property
def initial(self):
return self._content[0]
@property
def initial_uri(self):
return self._uri[0]
@property
def namespace_uri(self):
"""
Finds and returns first applied URI of this node that has a namespace.
:return str: uri
"""
try:
return next(
iter(filter(lambda uri: URI(uri).namespace, self._uri))
)
except StopIteration:
return None
def for_json(self):
return {
'uri': six.text_type(self.uri),
'content': self.content,
'meta': self.meta if self.meta is not None else {}
}
| # coding=utf-8
from __future__ import unicode_literals
from .environment import env
from .utils.formatters import ContentFormatter
from .utils.uri import URI
import six
empty = object()
class Node(object):
_formatter = ContentFormatter()
def __init__(self, uri, content=None, **meta):
self.env = env.state
self._uri = [uri, URI(uri)]
self._content = [content]
self.meta = meta
def __repr__(self):
return '<Node: %s>' % self.uri
def __bytes__(self):
content = self.render()
if isinstance(content, six.text_type):
content = content.encode('utf-8')
return content or b''
def __unicode__(self):
return self.render() or ''
__str__ = __bytes__ if six.PY2 else __unicode__
def render(self, **context):
if self.content is not None:
if context:
return self._formatter.format(self.content, **context)
else:
return self.content
def get_uri(self):
return self._uri[-1]
def set_uri(self, uri):
if uri != self.get_uri():
self._uri.append(URI(uri))
uri = property(get_uri, set_uri)
def get_content(self):
return self._content[-1]
def set_content(self, content):
if content != self.get_content():
self._content.append(content)
content = property(get_content, set_content)
@property
def initial(self):
return self._content[0]
@property
def initial_uri(self):
return self._uri[0]
@property
def namespace_uri(self):
"""
Finds and returns first applied URI of this node that has a namespace.
:return str: uri
"""
try:
return iter(
filter(lambda uri: URI(uri).namespace, self._uri)
).next()
except StopIteration:
return None
def for_json(self):
return {
'uri': six.text_type(self.uri),
'content': self.content,
'meta': self.meta if self.meta is not None else {}
}
| bsd-3-clause | Python |
3a42b4458f85d8f2640c34fce79c9a99a79f5323 | Revert "add second db connection to coastdat" | openego/data_processing | calc_renpass_gis/scenario_reader/db.py | calc_renpass_gis/scenario_reader/db.py | # -*- coding: utf-8 -*-
from sqlalchemy import (Column, Float, ForeignKey, Integer, MetaData, String,
Table, join, create_engine, ForeignKeyConstraint,
Boolean, DateTime, Sequence)
from sqlalchemy.orm import sessionmaker, relationship, configure_mappers
# from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.automap import automap_base
# from geoalchemy2 import Geometry, shape
import configparser as cp
# from sqlalchemy.sql import func
# from sqlalchemy.dialects import postgresql
import os.path as path
# read configuration file
FILENAME = 'config.ini'
FILE = path.join(path.expanduser("~"), '.open_eGo', FILENAME)
cfg = cp.ConfigParser()
cfg.read(FILE)
# establish db connection
section = 'Connection'
conn = create_engine(
"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db}".format(
user=cfg.get(section, 'username'),
password=cfg.get(section, 'password'),
host=cfg.get(section, 'host'),
port=cfg.get(section, 'port'),
db=cfg.get(section, 'db')))
print("Connected to database.")
# map schema
session = sessionmaker(bind=conn)()
meta = MetaData()
meta.bind = conn
meta.reflect(bind=conn, schema='calc_renpass_gis',
only=['renpass_gis_scenario',
'renpass_gis_linear_transformer',
'renpass_gis_source',
'renpass_gis_sink',
'renpass_gis_storage'])
# map to classes
Base = automap_base(metadata=meta)
Base.prepare()
Scenario, LinearTransformer, Source, Sink, Storage = \
Base.classes.renpass_gis_scenario,\
Base.classes.renpass_gis_linear_transformer,\
Base.classes.renpass_gis_source,\
Base.classes.renpass_gis_sink,\
Base.classes.renpass_gis_storage
| # -*- coding: utf-8 -*-
from sqlalchemy import (Column, Float, ForeignKey, Integer, MetaData, String,
Table, join, create_engine, ForeignKeyConstraint,
Boolean, DateTime, Sequence)
from sqlalchemy.orm import sessionmaker, relationship, configure_mappers
# from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.automap import automap_base
# from geoalchemy2 import Geometry, shape
import configparser as cp
# from sqlalchemy.sql import func
# from sqlalchemy.dialects import postgresql
import os.path as path
# read configuration file
FILENAME = 'config.ini'
FILE = path.join(path.expanduser("~"), '.open_eGo', FILENAME)
cfg = cp.ConfigParser()
cfg.read(FILE)
# establish db connection
section = 'Connection'
conn = create_engine(
"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db}".format(
user=cfg.get(section, 'username'),
password=cfg.get(section, 'password'),
host=cfg.get(section, 'host'),
port=cfg.get(section, 'port'),
db=cfg.get(section, 'db')))
print("Connected to database.")
# establish second db connection to
# change of init file in .open_ego and Server connetion via ssh required
section2 = 'Coastdat'
conn2 = create_engine(
"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db}".format(
user=cfg.get(section2, 'username'),
password=cfg.get(section2, 'password'),
host=cfg.get(section2, 'host'),
port=cfg.get(section2, 'port'),
db=cfg.get(section2, 'db')))
print("Connected to database 2.")
# map schema
session = sessionmaker(bind=conn)()
meta = MetaData()
meta.bind = conn
meta.reflect(bind=conn, schema='calc_renpass_gis',
only=['renpass_gis_scenario',
'renpass_gis_linear_transformer',
'renpass_gis_source',
'renpass_gis_sink',
'renpass_gis_storage'])
# map to classes
Base = automap_base(metadata=meta)
Base.prepare()
Scenario, LinearTransformer, Source, Sink, Storage = \
Base.classes.renpass_gis_scenario,\
Base.classes.renpass_gis_linear_transformer,\
Base.classes.renpass_gis_source,\
Base.classes.renpass_gis_sink,\
Base.classes.renpass_gis_storage
# map schema of coastdat-2
session2 = sessionmaker(bind=conn)()
meta2 = MetaData()
meta2.bind = conn2
meta2.reflect(bind=conn2, schema='coastdat',
only=['cosmoclmgrid',
'datatype',
'located',
'projection',
'scheduled',
'spatial',
'timeseries',
'typified',
'year'])
# map to classes of coastdat weather data
Coastdat = automap_base(metadata=meta2)
Coastdat.prepare()
| agpl-3.0 | Python |
49152781ecbfb4f51707e6e54641301038eba80f | set varchar length | nebgnahz/CS268NetworkMeasurement,nebgnahz/CS268NetworkMeasurement,nebgnahz/CS268NetworkMeasurement | king/DataPoint.py | king/DataPoint.py | from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, PickleType, Boolean, String, DateTime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
engine = create_engine('mysql+pymysql://ucb_268_measure:ucb_268_measure@data.cnobwey0khau.us-west-2.rds.amazonaws.com:3306/mydb', echo=False)
Base = declarative_base(bind=engine)
Session = sessionmaker(bind=engine)
class DataPoint(Base):
__tablename__ = 'data'
id = Column(Integer, primary_key=True)
timestamp = Column(DateTime)
name1 = Column(String(length=200))
name2 = Column(String(length=200))
target1 = Column(PickleType)
target2 = Column(PickleType)
start = Column(DateTime)
end = Column(DateTime)
pings = Column(PickleType)
address = Column(PickleType)
test_point = Column(String(length=200))
success = Column(Boolean)
def __init__(self, name1, name2, target1, target2, start, end,
pings, address, test_point, success):
self.timestamp = datetime.now()
self.name1 = name1
self.name2 = name2
self.target1 = target1
self.target2 = target2
self.start = start
self.end = end
self.pings = pings
self.address = address
self.test_point = test_point
self.success = success
Base.metadata.create_all()
| from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, PickleType, Boolean, String, DateTime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
engine = create_engine('mysql+pymysql://ucb_268_measure:ucb_268_measure@data.cnobwey0khau.us-west-2.rds.amazonaws.com:3306/mydb', echo=False)
Base = declarative_base(bind=engine)
Session = sessionmaker(bind=engine)
class DataPoint(Base):
__tablename__ = 'data'
id = Column(Integer, primary_key=True)
timestamp = Column(DateTime)
name1 = Column(String)
name2 = Column(String)
target1 = Column(PickleType)
target2 = Column(PickleType)
start = Column(DateTime)
end = Column(DateTime)
pings = Column(PickleType)
address = Column(PickleType)
test_point = Column(String)
success = Column(Boolean)
def __init__(self, name1, name2, target1, target2, start, end,
pings, address, test_point, success):
self.timestamp = datetime.now()
self.name1 = name1
self.name2 = name2
self.target1 = target1
self.target2 = target2
self.start = start
self.end = end
self.pings = pings
self.address = address
self.test_point = test_point
self.success = success
Base.metadata.create_all()
| bsd-2-clause | Python |
93db3543a576ccde905fc77d7c3ad825f6a100a1 | change threshold | derwind/fontUtils,derwind/fontUtils,derwind/fontUtils,derwind/fontUtils,derwind/otfparser,derwind/otfparser | misc_scripts/compare_bounds.py | misc_scripts/compare_bounds.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys, re
from fontTools.ttLib import TTFont
from fontTools.pens.boundsPen import BoundsPen, ControlBoundsPen
class ConcordanceInfo(object):
def __init__(self):
self.glyphs = 0
self.concordant_glyphs = 0
self.maxdiff = 0
self.maxdiff_gname = None
def update(self, diff, gname):
self.glyphs += 1
if diff <= 1:
self.concordant_glyphs += 1
elif diff > self.maxdiff:
self.maxdiff = round(diff, 2)
self.maxdiff_gname = gname
def calc_bounds(font, gname, penclass):
gs = font.getGlyphSet()
g = gs[gname]
pen = penclass(gs)
g.draw(pen)
return [round(v, 2) for v in pen.bounds] if pen.bounds is not None else None
def bounds_differ(bounds1, bounds2):
for v1, v2 in zip(bounds1, bounds2):
if abs(v1 - v2) >= 1:
return True
return False
def compare_bounds():
font1_path = sys.argv[1]
font2_path = sys.argv[2]
font1 = TTFont(font1_path, fontNumber=0)
font2 = TTFont(font2_path, fontNumber=0)
for gname in font1.getGlyphOrder():
bounds1 = calc_bounds(font1, gname, BoundsPen)
bounds2 = calc_bounds(font2, gname, BoundsPen)
if bounds1 is None or bounds2 is None:
if bounds1 is not None or bounds2 is not None:
print "[{}] {} {}".format(gname, bounds1, bounds2)
elif bounds_differ(bounds1, bounds2):
print "[{}] {} {}".format(gname, bounds1, bounds2)
def main():
compare_bounds()
if __name__ == "__main__":
main()
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys, re
from fontTools.ttLib import TTFont
from fontTools.pens.boundsPen import BoundsPen, ControlBoundsPen
class ConcordanceInfo(object):
def __init__(self):
self.glyphs = 0
self.concordant_glyphs = 0
self.maxdiff = 0
self.maxdiff_gname = None
def update(self, diff, gname):
self.glyphs += 1
if diff <= 1:
self.concordant_glyphs += 1
elif diff > self.maxdiff:
self.maxdiff = round(diff, 2)
self.maxdiff_gname = gname
def calc_bounds(font, gname, penclass):
gs = font.getGlyphSet()
g = gs[gname]
pen = penclass(gs)
g.draw(pen)
return [round(v, 2) for v in pen.bounds] if pen.bounds is not None else None
def bounds_differ(bounds1, bounds2):
for v1, v2 in zip(bounds1, bounds2):
if abs(v1 - v2) > 1:
return True
return False
def compare_bounds():
font1_path = sys.argv[1]
font2_path = sys.argv[2]
font1 = TTFont(font1_path, fontNumber=0)
font2 = TTFont(font2_path, fontNumber=0)
for gname in font1.getGlyphOrder():
bounds1 = calc_bounds(font1, gname, BoundsPen)
bounds2 = calc_bounds(font2, gname, BoundsPen)
if bounds1 is None or bounds2 is None:
if bounds1 is not None or bounds2 is not None:
print "[{}] {} {}".format(gname, bounds1, bounds2)
elif bounds_differ(bounds1, bounds2):
print "[{}] {} {}".format(gname, bounds1, bounds2)
def main():
compare_bounds()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
be0b85f50b8cd4f7323d5c6def5c388c7a8fad36 | fix webhook | Windfarer/episode,Windfarer/episode | webhooks.py | webhooks.py | from http.server import HTTPServer, BaseHTTPRequestHandler
import json
import os
import shutil
from episode import GitRepo, Episode
WORK_DIR = "repo"
class WebHookHandler(BaseHTTPRequestHandler):
def do_POST(self):
event_type = self.headers.get('X-Github-Event')
if event_type != 'push':
return
length = int(self.headers.get('Content-Length'))
http_body = self.rfile.read(length).decode('utf-8')
data = json.loads(http_body)
ref = data.get('ref')
if ref != 'refs/heads/source':
return
# todo: pull repo & branch to source & build & push to master
repo_addr = data.get("repository")['ssh_url']
print('repo', repo_addr)
repo = GitRepo(repo_address=repo_addr, dst=WORK_DIR)
repo.clone()
os.chdir(WORK_DIR)
repo.checkout_or_create("source")
Episode().deploy()
os.chdir("..")
shutil.rmtree(WORK_DIR)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
# self.wfile.write(bytes("Hello World", "utf-8"))
return
if __name__ == "__main__":
port = 8000
Handler = WebHookHandler
httpd = HTTPServer(("0.0.0.0", port), Handler)
print("Serving at http://127.0.0.1:{port}".format(port=port))
httpd.serve_forever() | from http.server import HTTPServer, BaseHTTPRequestHandler
import json
import os
import shutil
from episode import GitRepo, Episode
WORK_DIR = "repo"
class WebHookHandler(BaseHTTPRequestHandler):
def do_POST(self):
event_type = self.headers.get('X-Github-Event')
if event_type != 'push':
return
length = int(self.headers.get('Content-Length'))
http_body = self.rfile.read(length).decode('utf-8')
data = json.loads(http_body)
ref = data.get('ref')
if ref != 'refs/heads/source':
return
# todo: pull repo & branch to source & build & push to master
repo_addr = data.get("repository")['ssh_url']
print('repo', repo_addr)
repo = GitRepo(repo_address=repo_addr, dst=WORK_DIR)
repo.clone()
os.chdir(WORK_DIR)
repo.checkout_or_create("source")
Episode().deploy()
shutil.rmtree(WORK_DIR)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
# self.wfile.write(bytes("Hello World", "utf-8"))
return
if __name__ == "__main__":
port = 8000
Handler = WebHookHandler
httpd = HTTPServer(("0.0.0.0", port), Handler)
print("Serving at http://127.0.0.1:{port}".format(port=port))
httpd.serve_forever() | mit | Python |
4787c9e1b895b5ce0bdd0fedeb537a971fab5933 | add management command to benchmark get_direct_ccz | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/app_manager/management/commands/benchmark_direct_ccz.py | corehq/apps/app_manager/management/commands/benchmark_direct_ccz.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import json
from django.core.management import BaseCommand
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.app_manager.management.commands.benchmark_build_times import Timer
from corehq.apps.app_manager.views.cli import get_direct_ccz
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'domain_app_id_pairs',
help='A JSON list where each element has the format [<domain>, <app_id>]',
type=json.loads,
)
def handle(self, domain_app_id_pairs, **options):
for (domain, app_id) in domain_app_id_pairs:
print("%s: %s" % (domain, app_id))
with Timer():
app = get_app(domain, app_id)
get_direct_ccz(domain, app, None, None)
| bsd-3-clause | Python |
|
585317f3a03f55f6487a98446d4a9279f91714d2 | Add a test of the linearity of scalar multiplication | ppb/ppb-vector,ppb/ppb-vector | tests/test_vector2_scalar_multiplication.py | tests/test_vector2_scalar_multiplication.py | import pytest # type: ignore
from hypothesis import given
from hypothesis.strategies import floats
from utils import vectors
from ppb_vector import Vector2
@pytest.mark.parametrize("x, y, expected", [
(Vector2(6, 1), 0, Vector2(0, 0)),
(Vector2(6, 1), 2, Vector2(12, 2)),
(Vector2(0, 0), 3, Vector2(0, 0)),
(Vector2(-1.5, 2.4), -2, Vector2(3.0, -4.8)),
(Vector2(1, 2), 0.1, Vector2(0.1, 0.2))
])
def test_scalar_multiplication(x, y, expected):
assert x * y == expected
@given(
x=floats(min_value=-1e75, max_value=1e75),
y=floats(min_value=-1e75, max_value=1e75),
v=vectors(max_magnitude=1e150)
)
def test_scalar_associative(x: float, y: float, v: Vector2):
left = (x * y) * v
right = x * (y * v)
assert left.isclose(right)
@given(
l=floats(min_value=-1e150, max_value=1e150),
x=vectors(max_magnitude=1e150),
y=vectors(max_magnitude=1e150),
)
def test_scalar_linear(l: float, x: Vector2, y: Vector2):
assert (l * (x + y)).isclose(l*x + l*y)
| import pytest # type: ignore
from hypothesis import given
from hypothesis.strategies import floats
from utils import vectors
from ppb_vector import Vector2
@pytest.mark.parametrize("x, y, expected", [
(Vector2(6, 1), 0, Vector2(0, 0)),
(Vector2(6, 1), 2, Vector2(12, 2)),
(Vector2(0, 0), 3, Vector2(0, 0)),
(Vector2(-1.5, 2.4), -2, Vector2(3.0, -4.8)),
(Vector2(1, 2), 0.1, Vector2(0.1, 0.2))
])
def test_scalar_multiplication(x, y, expected):
assert x * y == expected
@given(
x=floats(min_value=-1e75, max_value=1e75),
y=floats(min_value=-1e75, max_value=1e75),
v=vectors(max_magnitude=1e150)
)
def test_scalar_associative(x: float, y: float, v: Vector2):
left = (x * y) * v
right = x * (y * v)
assert left.isclose(right)
| artistic-2.0 | Python |
cecbb5951ef806c5b4b7b6894c05e4d086730fb0 | order fy descending (newest on top) | camptocamp/c2c-rd-addons,Endika/c2c-rd-addons,Antiun/c2c-rd-addons,Endika/c2c-rd-addons,Antiun/c2c-rd-addons,Antiun/c2c-rd-addons,camptocamp/c2c-rd-addons,camptocamp/c2c-rd-addons,VitalPet/c2c-rd-addons,Endika/c2c-rd-addons,VitalPet/c2c-rd-addons,VitalPet/c2c-rd-addons | base_ordered/ordered.py | base_ordered/ordered.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 Camptocamp Austria (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class purchase_order(osv.osv):
_inherit = "purchase.order"
_order = 'date_order desc, id desc'
purchase_order()
class sale_order(osv.osv):
_inherit = "sale.order"
_order = 'date_order desc, id desc'
sale_order()
class stock_picking(osv.osv):
_inherit = "stock.picking"
_order = 'date desc, id desc'
stock_picking()
class stock_move(osv.osv):
_inherit = "stock.move"
_order = 'date desc, id desc'
stock_move()
class account_invoice(osv.osv):
_inherit = "account.invoice"
_order = 'date_invoice desc, id desc'
account_invoice()
class account_fiscalyear(osv.osv):
_inherit = "account.fiscalyear"
_order = 'date_start desc, id desc'
account_fiscalyear()
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 Camptocamp Austria (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class purchase_order(osv.osv):
_inherit = "purchase.order"
_order = 'date_order desc, id desc'
purchase_order()
class sale_order(osv.osv):
_inherit = "sale.order"
_order = 'date_order desc, id desc'
sale_order()
class stock_picking(osv.osv):
_inherit = "stock.picking"
_order = 'date desc, id desc'
stock_picking()
class stock_move(osv.osv):
_inherit = "stock.move"
_order = 'date desc, id desc'
stock_move()
class account_invoice(osv.osv):
_inherit = "account.invoice"
_order = 'date_invoice desc, id desc'
account_invoice()
| agpl-3.0 | Python |
2fedc43c50bd933924046b6f79633687a452116a | bump version | jklynch/mr-fitty,jklynch/mr-fitty | src/mrfitty/__init__.py | src/mrfitty/__init__.py | __version__ = '0.12.0'
| __version__ = '0.11.0'
| mit | Python |
f5e2e7cbb494fc111efcf4abd5c744091e9ee8aa | Fix function name error | shinken-monitoring/mod-webui,mohierf/mod-webui,rednach/mod-webui,rednach/mod-webui,vizvayu/mod-webui,vizvayu/mod-webui,shinken-monitoring/mod-webui,rednach/mod-webui,mohierf/mod-webui,mohierf/mod-webui,shinken-monitoring/mod-webui | module/submodules/graphs.py | module/submodules/graphs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
import time
from shinken.log import logger
from .metamodule import MetaModule
class GraphsMetaModule(MetaModule):
_functions = ['get_graph_uris']
_custom_log = "You should configure the module 'graphite' in your broker and the module 'ui-graphite' in webui.cfg file to be able to display graphs."
def get_graph_uris(self, elt, graphstart=None, graphend=None, duration=None, source='detail'):
''' Aggregate the get_graph_uris of all the submodules.
The source parameter defines the source of the calling:
Are we displaying graphs for the element detail page (detail),
or a widget in the dashboard (dashboard) ?
If duration is not None, we consider it as a number of seconds to graph and
we call the module get_relative_graphs_uri
If get_relative_graphs_uri is not a module function we compute graphstart and
graphend and we call we call the module get_graphs_uri
If graphstart and graphend are not None, we call the module get_graphs_uri
'''
uris = []
for mod in self.modules:
if not duration:
uris.extend(mod.get_graph_uris(elt, graphstart, graphend, source))
else:
f = getattr(mod, 'get_relative_graph_uris', None)
if f and callable(f):
uris.extend(f(elt, duration, source))
else:
graphend = time.time()
graphstart = graphend - duration
uris.extend(mod.get_graph_uris(elt, graphstart, graphend, source))
logger.debug("[WebUI] Got graphs: %s", uris)
return uris
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
import time
from shinken.log import logger
from .metamodule import MetaModule
class GraphsMetaModule(MetaModule):
_functions = ['get_graph_uris']
_custom_log = "You should configure the module 'graphite' in your broker and the module 'ui-graphite' in webui.cfg file to be able to display graphs."
def get_graph_uris(self, elt, graphstart=None, graphend=None, duration=None, source='detail'):
''' Aggregate the get_graph_uris of all the submodules.
The source parameter defines the source of the calling:
Are we displaying graphs for the element detail page (detail),
or a widget in the dashboard (dashboard) ?
If duration is not None, we consider it as a number of seconds to graph and
we call the module get_relative_graphs_uri
If get_relative_graphs_uri is not a module function we compute graphstart and
graphend and we call we call the module get_graphs_uri
If graphstart and graphend are not None, we call the module get_graphs_uri
'''
uris = []
for mod in self.modules:
if not duration:
uris.extend(mod.get_graph_uris(elt, graphstart, graphend, source))
else:
f = getattr(mod, 'get_relative_graphs_uri', None)
if f and callable(f):
uris.extend(f(elt, duration, source))
else:
graphend = time.time()
graphstart = graphend - duration
uris.extend(mod.get_graph_uris(elt, graphstart, graphend, source))
logger.debug("[WebUI] Got graphs: %s", uris)
return uris
| agpl-3.0 | Python |
e69542c01959e7cf874c6ca1ae5c94d0c9a0ba1f | Fix tarball URL's for htslib (#5993) | iulian787/spack,EmreAtes/spack,tmerrick1/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,lgarren/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,EmreAtes/spack,lgarren/spack,lgarren/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack,iulian787/spack,iulian787/spack,krafczyk/spack,krafczyk/spack,skosukhin/spack,EmreAtes/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,LLNL/spack,tmerrick1/spack | var/spack/repos/builtin/packages/htslib/package.py | var/spack/repos/builtin/packages/htslib/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Htslib(AutotoolsPackage):
"""C library for high-throughput sequencing data formats."""
homepage = "https://github.com/samtools/htslib"
version('1.6', 'd6fd14e208aca7e08cbe9072233d0af9')
version('1.4', '2a22ff382654c033c40e4ec3ea880050')
version('1.3.1', '16d78f90b72f29971b042e8da8be6843')
version('1.2', '64026d659c3b062cfb6ddc8a38e9779f')
depends_on('zlib')
depends_on('bzip2', when="@1.4:")
depends_on('xz', when="@1.4:")
depends_on('m4', when="@1.2")
depends_on('autoconf', when="@1.2")
depends_on('automake', when="@1.2")
depends_on('libtool', when="@1.2")
# v1.2 uses the automagically assembled tarball from .../archive/...
# everything else uses the tarballs uploaded to the release
def url_for_version(self, version):
if version.string == '1.2':
return 'https://github.com/samtools/htslib/archive/1.2.tar.gz'
else:
url = "https://github.com/samtools/htslib/releases/download/{0}/htslib-{0}.tar.bz2"
return url.format(version.dotted)
| ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Htslib(AutotoolsPackage):
"""C library for high-throughput sequencing data formats."""
homepage = "https://github.com/samtools/htslib"
url = "https://github.com/samtools/htslib/releases/download/1.3.1/htslib-1.3.1.tar.bz2"
version('1.6', 'd6fd14e208aca7e08cbe9072233d0af9')
version('1.4', '2a22ff382654c033c40e4ec3ea880050')
version('1.3.1', '16d78f90b72f29971b042e8da8be6843')
version('1.2', '64026d659c3b062cfb6ddc8a38e9779f',
url='https://github.com/samtools/htslib/archive/1.2.tar.gz')
depends_on('zlib')
depends_on('bzip2', when="@1.4:")
depends_on('xz', when="@1.4:")
depends_on('m4', when="@1.2")
depends_on('autoconf', when="@1.2")
depends_on('automake', when="@1.2")
depends_on('libtool', when="@1.2")
| lgpl-2.1 | Python |
7f29770766a30bf821689960189e95526eee6bdc | print python version if using file directly, not as import | hchiam/cognateLanguage | getDataRemotely.py | getDataRemotely.py | import sys
from dictAsFile_wrapper import *
def run():
hashtableName = 'hashtable.pkl'
data = {}
# use different import based on python version number:
if (sys.version_info > (3, 0)):
# python 3:
if __name__ == '__main__':
print('python 3')
import urllib.request
url = 'https://raw.githubusercontent.com/hchiam/cognateLanguage/master/hashtable.pkl'
urllib.request.urlretrieve(url) # download file
data = readFileToDict(hashtableName)
# with urllib.request.urlopen('https://raw.githubusercontent.com/hchiam/cognateLanguage/master/output_shortlist.txt') as response:
# line = response.readline().decode('utf-8').replace('\n','')
# while line != '':
# data.append(line)
# line = response.readline().decode('utf-8').replace('\n','')
else:
# python 2:
if __name__ == '__main__':
print('python 2')
import urllib2
url = 'https://raw.githubusercontent.com/hchiam/cognateLanguage/master/hashtable.pkl'
response = urllib2.urlopen(url) # download file
data = readFileToDict(hashtableName)
# response = urllib2.urlopen('https://raw.githubusercontent.com/hchiam/cognateLanguage/master/output_shortlist.txt')
# data = response.read().split('\n')
return data
# this if statement is so that the following code only runs if this .py file is not being imported
if __name__ == '__main__':
data = run()
# debug print out:
print ('debug output: data[\"hi\"] = ' + data["hi"]) | import sys
from dictAsFile_wrapper import *
def run():
hashtableName = 'hashtable.pkl'
data = {}
# use different import based on python version number:
if (sys.version_info > (3, 0)):
# python 3:
print('python 3')
import urllib.request
url = 'https://raw.githubusercontent.com/hchiam/cognateLanguage/master/hashtable.pkl'
urllib.request.urlretrieve(url) # download file
data = readFileToDict(hashtableName)
# with urllib.request.urlopen('https://raw.githubusercontent.com/hchiam/cognateLanguage/master/output_shortlist.txt') as response:
# line = response.readline().decode('utf-8').replace('\n','')
# while line != '':
# data.append(line)
# line = response.readline().decode('utf-8').replace('\n','')
else:
# python 2:
print('python 2')
import urllib2
url = 'https://raw.githubusercontent.com/hchiam/cognateLanguage/master/hashtable.pkl'
response = urllib2.urlopen(url) # download file
data = readFileToDict(hashtableName)
# response = urllib2.urlopen('https://raw.githubusercontent.com/hchiam/cognateLanguage/master/output_shortlist.txt')
# data = response.read().split('\n')
return data
# this if statement is so that the following code only runs if this .py file is not being imported
if __name__ == '__main__':
data = run()
# debug print out:
print ('debug output: data[\"hi\"] = ' + data["hi"]) | mit | Python |
9c218079f00e9b3c7285cd94dcc7836531f722a5 | Install RMPISNOW wrapper in prefix.bin for r-snow (#16479) | LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/r-snow/package.py | var/spack/repos/builtin/packages/r-snow/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RSnow(RPackage):
"""Support for simple parallel computing in R."""
homepage = "https://cloud.r-project.org/package=snow"
url = "https://cloud.r-project.org/src/contrib/snow_0.4-2.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/snow"
version('0.4-3', sha256='8512537daf334ea2b8074dbb80cf5e959a403a78d68bc1e97664e8a4f64576d8')
version('0.4-2', sha256='ee070187aea3607c9ca6235399b3db3e181348692405d038e962e06aefccabd7')
depends_on('r@2.13.1:', type=('build', 'run'))
@run_after('install')
def install_wrapper(self):
mkdir(self.prefix.bin)
install('inst/RMPISNOW', self.prefix.bin)
| # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RSnow(RPackage):
"""Support for simple parallel computing in R."""
homepage = "https://cloud.r-project.org/package=snow"
url = "https://cloud.r-project.org/src/contrib/snow_0.4-2.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/snow"
version('0.4-3', sha256='8512537daf334ea2b8074dbb80cf5e959a403a78d68bc1e97664e8a4f64576d8')
version('0.4-2', sha256='ee070187aea3607c9ca6235399b3db3e181348692405d038e962e06aefccabd7')
depends_on('r@2.13.1:', type=('build', 'run'))
| lgpl-2.1 | Python |
f42744558b989f8122f67d24bf65c8514eb516cb | Use better names for generated IR files. | djc/runa,djc/runa,djc/runa,djc/runa | runac/__init__.py | runac/__init__.py | from . import tokenizer, ast, blocks, ti, specialize, codegen
from util import Error
import sys, os, subprocess, tempfile
BASE = os.path.dirname(__path__[0])
CORE_DIR = os.path.join(BASE, 'core')
TRIPLES = {
'darwin': 'x86_64-apple-darwin11.0.0',
'linux2': 'x86_64-pc-linux-gnu',
}
def tokenize(f):
return tokenizer.tokenize(f)
def parse(tokens):
return ast.parse(tokens)
def module(ast):
mod = blocks.Module(ast)
for fn in os.listdir(CORE_DIR):
if not fn.endswith('.rns'):
continue
with open(os.path.join(CORE_DIR, fn)) as f:
mod.merge(blocks.Module(parse(tokenize(f))))
return mod
def type(mod):
ti.typer(mod)
def spec(mod):
specialize.specialize(mod)
def generate(mod):
triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform]
with open('core/rt.ll') as f:
rt = f.read()
return triple + rt + '\n' + codegen.source(mod)
def compile(ir, outfn):
name = outfn + '.ll'
with open(name, 'wb') as f:
f.write(ir)
try:
subprocess.check_call(('clang', '-o', outfn, name))
except OSError as e:
if e.errno == 2:
print 'error: clang not found'
except subprocess.CalledProcessError:
pass
finally:
os.unlink(name)
def full(fn, outfn):
with open(fn) as f:
mod = module(parse(tokenize(f)))
type(mod)
spec(mod)
compile(generate(mod), outfn)
| from . import tokenizer, ast, blocks, ti, specialize, codegen
from util import Error
import sys, os, subprocess, tempfile
BASE = os.path.dirname(__path__[0])
CORE_DIR = os.path.join(BASE, 'core')
TRIPLES = {
'darwin': 'x86_64-apple-darwin11.0.0',
'linux2': 'x86_64-pc-linux-gnu',
}
def tokenize(f):
return tokenizer.tokenize(f)
def parse(tokens):
return ast.parse(tokens)
def module(ast):
mod = blocks.Module(ast)
for fn in os.listdir(CORE_DIR):
if not fn.endswith('.rns'):
continue
with open(os.path.join(CORE_DIR, fn)) as f:
mod.merge(blocks.Module(parse(tokenize(f))))
return mod
def type(mod):
ti.typer(mod)
def spec(mod):
specialize.specialize(mod)
def generate(mod):
triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform]
with open('core/rt.ll') as f:
rt = f.read()
return triple + rt + '\n' + codegen.source(mod)
def compile(ir, outfn):
fd, name = tempfile.mkstemp('.ll', dir='.')
f = os.fdopen(fd, 'wb')
f.write(ir)
f.close()
try:
subprocess.check_call(('clang', '-o', outfn, name))
except OSError as e:
if e.errno == 2:
print 'error: clang not found'
except subprocess.CalledProcessError:
pass
finally:
os.unlink(name)
def full(fn, outfn):
with open(fn) as f:
mod = module(parse(tokenize(f)))
type(mod)
spec(mod)
compile(generate(mod), outfn)
| mit | Python |
a328a1974b985eda47191748e28a69d1e521f070 | 实现FREEBUF的AJAX页面爬取的几种小爬虫-json库解析-科学方法 | NORTHERNhacker/many-spiders | freebufspider2.py | freebufspider2.py | import requests
from bs4 import BeautifulSoup
import json
for i in range(1, 20):
url = 'http://www.freebuf.com/www.freebuf.com?action=ajax_wenku&year=all&score=all&type=all&tech=0&keyword=&page=' + str(
i)
r = requests.get(url)
data = json.loads(r.text)#使用json库解析,科学的做法
soup = BeautifulSoup(data['cont'])
for i in soup.select('h3 a'):
print(i.getText(), i.get('href'))
| import requests
from bs4 import BeautifulSoup
import json
for i in range(1, 20):
url = 'http://www.freebuf.com/www.freebuf.com?action=ajax_wenku&year=all&score=all&type=all&tech=0&keyword=&page=' + str(
i)
r = requests.get(url)
data = json.loads(r.text)
soup = BeautifulSoup(data['cont'])
for i in soup.select('h3 a'):
print(i.getText(), i.get('href'))
| apache-2.0 | Python |
cd9e8c1595e0e987e2ec0067c9532a9778e64ea3 | Update test_plugin.py | kemiz/cloudify-logstash-plugin,cloudify-cosmo/cloudify-logstash-plugin,EarthmanT/cloudify-logstash-plugin | logstash_plugin/tests/test_plugin.py | logstash_plugin/tests/test_plugin.py | ########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
import subprocess
from logstash_test_utils import (
LogstashTestUtils,
DEFAULT_UBUNTU_UNINSTALL,
DEFAULT_LOGSTASH_STOP,
DEFAULT_LOGSTASH_CONFIG_PATH
)
# from cloudify import ctx
class TestLogstashPlugin(LogstashTestUtils):
def SetUp(self):
super(LogstashTestUtils, self).setUp()
self._set_up()
def test_install_static_clean(self):
inputs = self.get_static_config_inputs()
self._set_up(inputs)
self.addCleanup(subprocess.call, DEFAULT_UBUNTU_UNINSTALL)
self.localenv.execute('install', task_retries=10)
self.addCleanup(subprocess.call, DEFAULT_LOGSTASH_STOP)
logstash_started = subprocess.call(
"sudo service logstash status", shell=True)
self.assertIn('started', logstash_started)
self.addCleanup(os.remove, DEFAULT_LOGSTASH_CONFIG_PATH)
with open(DEFAULT_LOGSTASH_CONFIG_PATH, 'r') as default:
self.assertEqual(default.read(), self.get_config())
def test_uninstall_static_clean(self):
self.addCleanup(subprocess.call, DEFAULT_UBUNTU_UNINSTALL)
self.addCleanup(subprocess.call, DEFAULT_LOGSTASH_STOP)
self.addCleanup(os.remove, DEFAULT_LOGSTASH_CONFIG_PATH)
inputs = self.get_static_config_inputs()
self._set_up(inputs)
self.localenv.execute('install', task_retries=10)
self.localenv.execute('uninstall', task_retries=10)
logstash_stopped = subprocess.call(
"sudo service logstash status", shell=True)
self.assertNotIn('started', logstash_stopped)
| ########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
import subprocess
from logstash_test_utils import (
LogstashTestUtils,
DEFAULT_UBUNTU_UNINSTALL,
DEFAULT_LOGSTASH_STOP,
DEFAULT_LOGSTASH_CONFIG_PATH
)
# from cloudify import ctx
class TestLogstashPlugin(LogstashTestUtils):
def SetUp(self):
super(LogstashTestUtils, self).setUp()
self._set_up()
def test_install_static_clean(self):
inputs = self.get_static_config_inputs()
self._set_up(inputs)
self.addCleanup(subprocess.call, DEFAULT_UBUNTU_UNINSTALL)
self.env.execute('install', task_retries=10)
self.addCleanup(subprocess.call, DEFAULT_LOGSTASH_STOP)
logstash_started = subprocess.call(
"sudo service logstash status", shell=True)
self.assertIn('started', logstash_started)
self.addCleanup(os.remove, DEFAULT_LOGSTASH_CONFIG_PATH)
with open(DEFAULT_LOGSTASH_CONFIG_PATH, 'r') as default:
self.assertEqual(default.read(), self.get_config())
def test_uninstall_static_clean(self):
self.addCleanup(subprocess.call, DEFAULT_UBUNTU_UNINSTALL)
self.addCleanup(subprocess.call, DEFAULT_LOGSTASH_STOP)
self.addCleanup(os.remove, DEFAULT_LOGSTASH_CONFIG_PATH)
inputs = self.get_static_config_inputs()
self._set_up(inputs)
self.env.execute('install', task_retries=10)
self.env.execute('uninstall', task_retries=10)
logstash_stopped = subprocess.call(
"sudo service logstash status", shell=True)
self.assertNotIn('started', logstash_stopped)
| apache-2.0 | Python |
74a182a13bae5dde3e2b4fe604a839e5ec05e771 | load palette hoohah | cooperhewitt/py-cooperhewitt-swatchbook | cooperhewitt/swatchbook/palettes/__init__.py | cooperhewitt/swatchbook/palettes/__init__.py | def palettes():
return [
'css3',
'css4'
]
def load_palette(reference):
if not reference in palettes():
raise Exception, "Invalid palette"
# Please figure out the hoo-hah to make dynamic
# loading work (20140623/straup)
if reference == 'css3':
import css3
return css3.colours()
if __name__ == '__main__':
p = load_palette('css5')
print p
| # I blame, Guido
| bsd-3-clause | Python |
220983a4cf75f4e27f5491812de9ff04f4104510 | fix butter_bandpass | vincentadam87/gatsby-hackathon-seizure,vincentadam87/gatsby-hackathon-seizure | code/python/seizures/preprocessing/preprocessing.py | code/python/seizures/preprocessing/preprocessing.py | import scipy.signal as signal
def preprocess_multichannel_data(matrix,fs):
"""
:param matrix: multichannel EEG data
:param fs: sampling frequency
:return: data without mains, electrical artefacts etc
authors: Lea and Vincent
"""
n_channel,m= matrix.shape
for i in range(n_channel):
preprocess_single_channel(matrix[i,:],fs)
def preprocess_single_channel(x,fs):
x = remove_elec_noise(x,fs)
x = anti_alias_filter(x)
x = remove_dc(x)
return x
def remove_dc(x):
"""
Remove mean of signal: use 0.5Hz cut-off hp filter
:return:
"""
x = signal.medfilt(x)
return x
def remove_elec_noise(x,fs):
"""
Bandpass remove:59-61Hz (US); if data from EU/UK 49-51Hz
:return:
"""
bandstop = 60
lowcut = bandstop-1
highcut = bandstop+1
def butter_bandpass(lowcut, highcut, fs, order=5):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = signal.butter(order, [low, high], btype='band')
return b, a
def butter_bandpass_filter(x, lowcut, highcut, fs, order=5):
b, a = signal.butter_bandpass(lowcut, highcut, fs, order=order)
y = signal.lfilter(b, a, data)
return y
return butter_bandpass_filter(x,fs)
def anti_alias_filter(x,fs):
"""
Anti_aliasing: use Nyquist frequ cutoff low-pass filter
:return:
"""
numtaps = 1
cutoff = 0.5 * fs
x = signal.firwin(numtaps, cutoff)
return x
| import scipy.signal as signal
def preprocess_multichannel_data(matrix,fs):
"""
:param matrix: multichannel EEG data
:param fs: sampling frequency
:return: data without mains, electrical artefacts etc
authors: Lea and Vincent
"""
n_channel,m= matrix.shape
for i in range(n_channel):
preprocess_single_channel(matrix[i,:],fs)
def preprocess_single_channel(x,fs):
x = remove_elec_noise(x,fs)
x = anti_alias_filter(x)
x = remove_dc(x)
return x
def remove_dc(x):
"""
Remove mean of signal: use 0.5Hz cut-off hp filter
:return:
"""
x = signal.medfilt(x)
return x
def remove_elec_noise(x,fs):
"""
Bandpass remove:59-61Hz (US); if data from EU/UK 49-51Hz
:return:
"""
lowcut = 59
highcut = 61
def butter_bandpass(lowcut, highcut, fs, order=5):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = signal.butter(order, [low, high], btype='band')
return b, a
def butter_bandpass_filter(data, lowcut, highcut, fs, order=5):
b, a = signal.butter_bandpass(lowcut, highcut, fs, order=order)
y = signal.lfilter(b, a, data)
return y
return butter_bandpass_filter(x,fs)
def anti_alias_filter(x,fs):
"""
Anti_aliasing: use Nyquist frequ cutoff low-pass filter
:return:
"""
numtaps = 1
cutoff = 0.5 * fs
x = signal.firwin(numtaps, cutoff)
return x
| bsd-2-clause | Python |
4a4e56a0909d8e89d82462c846f365b0849b3cb4 | add missing import | simpleton/eclipse2buck | generator/aidl.py | generator/aidl.py | #!/usr/bin/python
from eclipse2buck.generator.base_target import BaseTarget
from eclipse2buck.decorator import target
from eclipse2buck.util import util
from eclipse2buck import config
import os
class AIDL(BaseTarget):
"""
generated all aidl targets
"""
aidl_path_list = []
def __init__(self, root, name):
BaseTarget.__init__(self, root, name, config.aidl_suffix)
self.aidl_path_list = self._find_all_aidls(os.path.join(self.lib_path, 'src'))
for aidl_path in self.aidl_path_list:
name = self.target_name(util.path_get_basename(aidl_path))
self.deps.append(":%s" % name)
def dump_src(self):
for aidl in self.aidl_path_list:
#remove .aild
aidl = aidl[:-5]
print "genfile( '%s.java' )," % aidl
def dump(self):
for aidl_path in self.aidl_path_list:
name = self.target_name(util.path_get_basename(aidl_path))
self._gen_aidl_target(name, aidl_path)
def is_existed_aidl(self):
return len(self.aidl_path_list) > 0
def _find_all_aidls(self, relative_path):
path_list = util.find_all_files_with_suffix(relative_path, "*.aidl")
exclude_aidls = ["src/com/tencent/mm/cache/MCacheItem.aidl",
"src/com/tencent/tmassistantsdk/downloadclient/TMAssistantDownloadTaskInfo.aidl"]
#some aidl file needn't be generated
for exclude_aidl in exclude_aidls:
if exclude_aidl in path_list:
path_list.remove(exclude_aidl)
return path_list
@target("gen_aidl")
def _gen_aidl_target(self, aidl_name, path):
"""
print the aidl target
Returns:
str: the target name which lib target should depend on
"""
print "name = '%s'," % aidl_name
print "aidl = '%s'," % path
print "import_path = '%s/src/'," % self.proj_name
| #!/usr/bin/python
from eclipse2buck.generator.base_target import BaseTarget
from eclipse2buck.decorator import target
from eclipse2buck.util import util
from eclipse2buck import config
class AIDL(BaseTarget):
"""
generated all aidl targets
"""
aidl_path_list = []
def __init__(self, root, name):
BaseTarget.__init__(self, root, name, config.aidl_suffix)
self.aidl_path_list = self._find_all_aidls(os.path.join(self.lib_path, 'src'))
for aidl_path in self.aidl_path_list:
name = self.target_name(util.path_get_basename(aidl_path))
self.deps.append(":%s" % name)
def dump_src(self):
for aidl in self.aidl_path_list:
#remove .aild
aidl = aidl[:-5]
print "genfile( '%s.java' )," % aidl
def dump(self):
for aidl_path in self.aidl_path_list:
name = self.target_name(util.path_get_basename(aidl_path))
self._gen_aidl_target(name, aidl_path)
def is_existed_aidl(self):
return len(self.aidl_path_list) > 0
def _find_all_aidls(self, relative_path):
path_list = util.find_all_files_with_suffix(relative_path, "*.aidl")
exclude_aidls = ["src/com/tencent/mm/cache/MCacheItem.aidl",
"src/com/tencent/tmassistantsdk/downloadclient/TMAssistantDownloadTaskInfo.aidl"]
#some aidl file needn't be generated
for exclude_aidl in exclude_aidls:
if exclude_aidl in path_list:
path_list.remove(exclude_aidl)
return path_list
@target("gen_aidl")
def _gen_aidl_target(self, aidl_name, path):
"""
print the aidl target
Returns:
str: the target name which lib target should depend on
"""
print "name = '%s'," % aidl_name
print "aidl = '%s'," % path
print "import_path = '%s/src/'," % self.proj_name
| mit | Python |
fe007b772f5f2ef50d99ce7967df33752f37a17c | adjust tests for is_advanced | qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/export/tests/test_export_item.py | corehq/apps/export/tests/test_export_item.py | from django.test import SimpleTestCase
from corehq.apps.export.models import (
ExportItem,
ExportColumn,
)
class TestExportItemGeneration(SimpleTestCase):
app_id = '1234'
def setUp(self):
self.item = ExportItem(
path=['data', 'question1'],
label='Question One',
last_occurrences={self.app_id: 3},
)
def test_create_default_from_export_item(self):
column = ExportColumn.create_default_from_export_item([None], self.item, {self.app_id: 3})
self.assertEqual(column.is_advanced, False)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, True)
def test_create_default_from_export_item_deleted(self):
column = ExportColumn.create_default_from_export_item([None], self.item, {self.app_id: 4})
self.assertEqual(column.is_advanced, True)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, False)
def test_create_default_from_export_item_not_main_table(self):
column = ExportColumn.create_default_from_export_item(['other_table'], self.item, {self.app_id: 3})
self.assertEqual(column.is_advanced, False)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, False)
| from django.test import SimpleTestCase
from corehq.apps.export.models import (
ExportItem,
ExportColumn,
)
class TestExportItemGeneration(SimpleTestCase):
app_id = '1234'
def setUp(self):
self.item = ExportItem(
path=['data', 'question1'],
label='Question One',
last_occurrences={self.app_id: 3},
)
def test_create_default_from_export_item(self):
column = ExportColumn.create_default_from_export_item([None], self.item, {self.app_id: 3})
self.assertEqual(column.show, True)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, True)
def test_create_default_from_export_item_deleted(self):
column = ExportColumn.create_default_from_export_item([None], self.item, {self.app_id: 4})
self.assertEqual(column.show, False)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, False)
def test_create_default_from_export_item_not_main_table(self):
column = ExportColumn.create_default_from_export_item(['other_table'], self.item, {self.app_id: 3})
self.assertEqual(column.show, True)
self.assertEqual(column.label, 'Question One')
self.assertEqual(column.selected, False)
| bsd-3-clause | Python |
6e7a20675cd66d9ca7d4a286958404198369dece | Validate ReplicationTopology data | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/physical/forms/replication_topology.py | dbaas/physical/forms/replication_topology.py | # -*- coding: utf-8 -*-
from django import forms
from django.forms.widgets import SelectMultiple
#from django.forms.widgets import CheckboxSelectMultiple
from ..models import ReplicationTopology, Parameter, DatabaseInfraParameter
class ReplicationTopologyForm(forms.ModelForm):
class Meta:
model = ReplicationTopology
def __init__(self, *args, **kwargs):
super(ReplicationTopologyForm, self).__init__(*args, **kwargs)
self.fields["parameter"].widget = SelectMultiple()
#self.fields["parameter"].widget = CheckboxSelectMultiple()
self.fields["parameter"].queryset = Parameter.objects.all()
self.fields["parameter"].help_text = 'Select the parameters that can be changed in this topology'
def clean(self):
cleaned_data = super(ReplicationTopologyForm, self).clean()
if self.instance.id and 'parameter' in self.changed_data:
form_parameters = cleaned_data.get("parameter")
topology_parameters = Parameter.objects.filter(
replication_topologies=self.instance
)
for topology_parameter in topology_parameters:
if topology_parameter not in form_parameters:
parametersinfra = DatabaseInfraParameter.objects.filter(
parameter=topology_parameter,
databaseinfra__plan__replication_topology=self.instance
)
if parametersinfra:
parameterinfra = parametersinfra[0]
msg = "The parameter {} can not be deleted. It has been set in the databaseinfra {}.".format(
parameterinfra.parameter, parameterinfra.databaseinfra
)
raise forms.ValidationError(msg)
return cleaned_data
| from django import forms
from django.forms.widgets import SelectMultiple
#from django.forms.widgets import CheckboxSelectMultiple
from ..models import ReplicationTopology, Parameter
class ReplicationTopologyForm(forms.ModelForm):
class Meta:
model = ReplicationTopology
def __init__(self, *args, **kwargs):
super(ReplicationTopologyForm, self).__init__(*args, **kwargs)
self.fields["parameter"].widget = SelectMultiple()
#self.fields["parameter"].widget = CheckboxSelectMultiple()
self.fields["parameter"].queryset = Parameter.objects.all()
self.fields["parameter"].help_text = 'Select the parameters that can be changed in this topology'
| bsd-3-clause | Python |
1820001e6ec6960014b5e9cf23eb7a2f8b90c213 | Remove a broken test case from decorators_test | deepmind/dm_control | dm_control/mujoco/testing/decorators_test.py | dm_control/mujoco/testing/decorators_test.py | # Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests of the decorators module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Internal dependencies.
from absl.testing import absltest
from dm_control.mujoco.testing import decorators
import mock
from six.moves import xrange # pylint: disable=redefined-builtin
class RunThreadedTest(absltest.TestCase):
@mock.patch(decorators.__name__ + ".threading")
def test_number_of_threads(self, mock_threading):
num_threads = 5
mock_threads = [mock.MagicMock() for _ in xrange(num_threads)]
for thread in mock_threads:
thread.start = mock.MagicMock()
thread.join = mock.MagicMock()
mock_threading.Thread = mock.MagicMock(side_effect=mock_threads)
test_decorator = decorators.run_threaded(num_threads=num_threads)
tested_method = mock.MagicMock()
tested_method.__name__ = "foo"
test_runner = test_decorator(tested_method)
test_runner(self)
for thread in mock_threads:
thread.start.assert_called_once()
thread.join.assert_called_once()
def test_number_of_iterations(self):
calls_per_thread = 5
tested_method = mock.MagicMock()
tested_method.__name__ = "foo"
test_decorator = decorators.run_threaded(
num_threads=1, calls_per_thread=calls_per_thread)
test_runner = test_decorator(tested_method)
test_runner(self)
self.assertEqual(calls_per_thread, tested_method.call_count)
if __name__ == "__main__":
absltest.main()
| # Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests of the decorators module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Internal dependencies.
from absl.testing import absltest
from absl.testing import parameterized
from dm_control.mujoco.testing import decorators
import mock
from six.moves import xrange # pylint: disable=redefined-builtin
class RunThreadedTest(absltest.TestCase):
@mock.patch(decorators.__name__ + ".threading")
def test_number_of_threads(self, mock_threading):
num_threads = 5
mock_threads = [mock.MagicMock() for _ in xrange(num_threads)]
for thread in mock_threads:
thread.start = mock.MagicMock()
thread.join = mock.MagicMock()
mock_threading.Thread = mock.MagicMock(side_effect=mock_threads)
test_decorator = decorators.run_threaded(num_threads=num_threads)
tested_method = mock.MagicMock()
tested_method.__name__ = "foo"
test_runner = test_decorator(tested_method)
test_runner(self)
for thread in mock_threads:
thread.start.assert_called_once()
thread.join.assert_called_once()
def test_number_of_iterations(self):
calls_per_thread = 5
tested_method = mock.MagicMock()
tested_method.__name__ = "foo"
test_decorator = decorators.run_threaded(
num_threads=1, calls_per_thread=calls_per_thread)
test_runner = test_decorator(tested_method)
test_runner(self)
self.assertEqual(calls_per_thread, tested_method.call_count)
def test_works_with_named_parameters(self):
func = mock.MagicMock()
names = ["foo", "bar", "baz"]
params = [1, 2, 3]
calls_per_thread = 2
num_threads = 4
class FakeTest(parameterized.TestCase):
@parameterized.named_parameters(zip(names, params))
@decorators.run_threaded(calls_per_thread=calls_per_thread,
num_threads=num_threads)
def test_method(self, param):
func(param)
suite = absltest.TestLoader().loadTestsFromTestCase(FakeTest)
suite.debug() # Run tests without collecting the output.
expected_call_count = len(params) * calls_per_thread * num_threads
self.assertEqual(func.call_count, expected_call_count)
actual_params = {call[0][0] for call in func.call_args_list}
self.assertSetEqual(set(params), actual_params)
if __name__ == "__main__":
absltest.main()
| apache-2.0 | Python |
b58d296373ed4ba75d0e6409e332e70abea76086 | add more axes labels | boada/vpCluster,boada/vpCluster | data/boada/analysis_all/redshifts/redshift_stats.py | data/boada/analysis_all/redshifts/redshift_stats.py | import pandas as pd
import pylab as pyl
from glob import glob
files = glob('*.csv')
for f in files:
results = pd.read_csv(f)
# good redshifts
try:
q0 = pyl.append(q0, results[results.Q == 0].r.values)
q1 = pyl.append(q1, results[results.Q == 1].r.values)
x = ~pyl.isnan(results.fiber) & pyl.isnan(results.Q)
q2 = pyl.append(q2, results.r.values[x.values])
except NameError:
q0 = results[results.Q==0].r.values
q1 = results[results.Q==1].r.values
x = ~pyl.isnan(results.fiber) & pyl.isnan(results.Q)
q2 = results.r.values[x.values]
# make a figure
f = pyl.figure(1,figsize=(5,5*(pyl.sqrt(5.)-1.0)/2.0))
ax = f.add_subplot(111)
bins = pyl.linspace(14,22,15)
ax.hist(q2, weights=pyl.zeros_like(q2)+1./q2.size, histtype='step', bins=bins,
lw=2, label='Q=2')
ax.hist(q1, weights=pyl.zeros_like(q1)+1./q1.size, histtype='step', bins=bins,
lw=2, label='Q=1')
q0 = q0[~pyl.isnan(q0)]
ax.hist(q0, weights=pyl.zeros_like(q0)+1./q0.size, histtype='step', bins=bins,
lw=2, label='Q=0')
ax.legend(loc='upper right')
ax.invert_xaxis()
ax.set_ylim(0,0.5)
ax.set_xlabel('$m_r$')
ax.set_ylabel('Fraction of Total')
pyl.show()
| import pandas as pd
import pylab as pyl
from glob import glob
files = glob('*.csv')
for f in files:
results = pd.read_csv(f)
# good redshifts
try:
q0 = pyl.append(q0, results[results.Q == 0].r.values)
q1 = pyl.append(q1, results[results.Q == 1].r.values)
x = ~pyl.isnan(results.fiber) & pyl.isnan(results.Q)
q2 = pyl.append(q2, results.r.values[x.values])
except NameError:
q0 = results[results.Q==0].r.values
q1 = results[results.Q==1].r.values
x = ~pyl.isnan(results.fiber) & pyl.isnan(results.Q)
q2 = results.r.values[x.values]
bins = pyl.linspace(14,22,15)
pyl.hist(q2, weights=pyl.zeros_like(q2)+1./q2.size, histtype='step', bins=bins,
lw=2, label='Q=2')
pyl.hist(q1, weights=pyl.zeros_like(q1)+1./q1.size, histtype='step', bins=bins,
lw=2, label='Q=1')
q0 = q0[~pyl.isnan(q0)]
pyl.hist(q0, weights=pyl.zeros_like(q0)+1./q0.size, histtype='step', bins=bins,
lw=2, label='Q=0')
pyl.legend(loc='upper right')
pyl.gca().invert_xaxis()
pyl.ylim(0,0.5)
pyl.xlabel('$m_r$')
| mit | Python |
b8399e48872271ccac6431d9f875238ff509a03a | Increment number of JS files in test_js_load | SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree | InvenTree/InvenTree/test_views.py | InvenTree/InvenTree/test_views.py | """
Unit tests for the main web views
"""
import re
import os
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
class ViewTests(TestCase):
""" Tests for various top-level views """
username = 'test_user'
password = 'test_pass'
def setUp(self):
# Create a user
self.user = get_user_model().objects.create_user(self.username, 'user@email.com', self.password)
self.user.set_password(self.password)
self.user.save()
result = self.client.login(username=self.username, password=self.password)
self.assertEqual(result, True)
def test_api_doc(self):
""" Test that the api-doc view works """
api_url = os.path.join(reverse('index'), 'api-doc') + '/'
response = self.client.get(api_url)
self.assertEqual(response.status_code, 200)
def test_index_redirect(self):
"""
top-level URL should redirect to "index" page
"""
response = self.client.get("/")
self.assertEqual(response.status_code, 302)
def get_index_page(self):
"""
Retrieve the index page (used for subsequent unit tests)
"""
response = self.client.get("/index/")
self.assertEqual(response.status_code, 200)
return str(response.content.decode())
def test_panels(self):
"""
Test that the required 'panels' are present
"""
content = self.get_index_page()
self.assertIn("<div id='detail-panels'>", content)
# TODO: In future, run the javascript and ensure that the panels get created!
def test_js_load(self):
"""
Test that the required javascript files are loaded correctly
"""
# Change this number as more javascript files are added to the index page
N_SCRIPT_FILES = 40
content = self.get_index_page()
# Extract all required javascript files from the index page content
script_files = re.findall("<script type='text\\/javascript' src=\"([^\"]*)\"><\\/script>", content)
self.assertEqual(len(script_files), N_SCRIPT_FILES)
# TODO: Request the javascript files from the server, and ensure they are correcty loaded
| """
Unit tests for the main web views
"""
import re
import os
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
class ViewTests(TestCase):
""" Tests for various top-level views """
username = 'test_user'
password = 'test_pass'
def setUp(self):
# Create a user
self.user = get_user_model().objects.create_user(self.username, 'user@email.com', self.password)
self.user.set_password(self.password)
self.user.save()
result = self.client.login(username=self.username, password=self.password)
self.assertEqual(result, True)
def test_api_doc(self):
""" Test that the api-doc view works """
api_url = os.path.join(reverse('index'), 'api-doc') + '/'
response = self.client.get(api_url)
self.assertEqual(response.status_code, 200)
def test_index_redirect(self):
"""
top-level URL should redirect to "index" page
"""
response = self.client.get("/")
self.assertEqual(response.status_code, 302)
def get_index_page(self):
"""
Retrieve the index page (used for subsequent unit tests)
"""
response = self.client.get("/index/")
self.assertEqual(response.status_code, 200)
return str(response.content.decode())
def test_panels(self):
"""
Test that the required 'panels' are present
"""
content = self.get_index_page()
self.assertIn("<div id='detail-panels'>", content)
# TODO: In future, run the javascript and ensure that the panels get created!
def test_js_load(self):
"""
Test that the required javascript files are loaded correctly
"""
# Change this number as more javascript files are added to the index page
N_SCRIPT_FILES = 39
content = self.get_index_page()
# Extract all required javascript files from the index page content
script_files = re.findall("<script type='text\\/javascript' src=\"([^\"]*)\"><\\/script>", content)
self.assertEqual(len(script_files), N_SCRIPT_FILES)
# TODO: Request the javascript files from the server, and ensure they are correcty loaded
| mit | Python |
83b6e177fccaef7d62682c25a0e82f29bcba01e6 | Remove autofilling "GSSAPI" mechanism in hue.ini | mapr/hue,mapr/hue,mapr/hue,mapr/hue,mapr/hue,mapr/hue,mapr/hue | desktop/core/src/desktop/lib/mapr_config_changer.py | desktop/core/src/desktop/lib/mapr_config_changer.py | import re
import os
MAPR_SECURITY = "MAPR-SECURITY"
SECURE = "secure"
SECURITY_ENABLED = 'security_enabled'
MECHANISM = 'mechanism'
MAPR_CLUSTERS_CONF_PATH = "/opt/mapr/conf/mapr-clusters.conf"
templates = {
MECHANISM: 'none',
SECURITY_ENABLED: 'false'
}
def read_values_from_mapr_clusters_conf():
if not os.path.exists(MAPR_CLUSTERS_CONF_PATH):
return
mapr_clusters_conf = open(MAPR_CLUSTERS_CONF_PATH, "r").read()
cluster_props = dict(re.findall(r'(\S+)=(".*?"|\S+)', mapr_clusters_conf))
templates[SECURITY_ENABLED] = cluster_props[SECURE] if SECURE in cluster_props else "false"
if templates[SECURITY_ENABLED] == "true":
templates[MECHANISM] = MAPR_SECURITY
templateRegEx = re.compile(r'^\${(.+?)}')
def change_config(config):
for key in config:
if isinstance(config[key], dict):
change_config(config[key])
elif type(config[key]) == str:
match = templateRegEx.search(config[key])
if (match != None) and (match.group(1) in templates):
config[key] = templates[match.group(1)]
return config
def fill_templates(config):
read_values_from_mapr_clusters_conf()
change_config(config)
return config
| import re
import os
GSSAPI = "GSSAPI"
MAPR_SECURITY = "MAPR-SECURITY"
KERBEROS_ENABLE = "kerberosEnable"
SECURE = "secure"
SECURITY_ENABLED = 'security_enabled'
MECHANISM = 'mechanism'
MAPR_CLUSTERS_CONF_PATH = "/opt/mapr/conf/mapr-clusters.conf"
templates = {
MECHANISM: 'none',
SECURITY_ENABLED: 'false'
}
def read_values_from_mapr_clusters_conf():
if not os.path.exists(MAPR_CLUSTERS_CONF_PATH):
return
mapr_clusters_conf = open(MAPR_CLUSTERS_CONF_PATH, "r").read()
cluster_props = dict(re.findall(r'(\S+)=(".*?"|\S+)', mapr_clusters_conf))
templates[SECURITY_ENABLED] = cluster_props[SECURE] if SECURE in cluster_props else "false"
if templates[SECURITY_ENABLED] == "true":
templates[MECHANISM] = MAPR_SECURITY
if (KERBEROS_ENABLE in cluster_props) and (cluster_props[KERBEROS_ENABLE] == "true"):
templates[MECHANISM] = GSSAPI
templateRegEx = re.compile(r'^\${(.+?)}')
def change_config(config):
for key in config:
if isinstance(config[key], dict):
change_config(config[key])
elif type(config[key]) == str:
match = templateRegEx.search(config[key])
if (match != None) and (match.group(1) in templates):
config[key] = templates[match.group(1)]
return config
def fill_templates(config):
read_values_from_mapr_clusters_conf()
change_config(config)
return config
| apache-2.0 | Python |
06277ea30094ff6669537f2365b6ad9f5a19642b | Update laundry.py | JamesKBowler/fxcmminer | fxcmminer_v1.1/fxcmminer/cleaning/laundry.py | fxcmminer_v1.1/fxcmminer/cleaning/laundry.py | from event import CleanedDataEvent
class DataCleaner(object):
"""
The DataCleaner class is the process of correcting
(or removing) corrupt or inaccurate records from a record set
and refers to identifying incomplete, incorrect, inaccurate
or irrelevant parts of the data and then replacing,
modifying, or deleting the dirty or coarse data.
Most of the above is not implemented in the code below.
"""
def __init__(self, events_queue):
""" Initialize varables """
self.events_queue = events_queue
def _remove_duplicates(self, data):
""" Drop any duplicates in the Datetime Index """
return data.reset_index().drop_duplicates(
subset='date', keep='last').set_index('date')
def _remove_not_a_number(self, data):
""" Drop any rows that contain NaN values """
return data.dropna()
def _remove_incorrect_values(
self, data, ao='askopen',ah='askhigh', al='asklow',
ac='askclose', bo='bidopen',bh='bidhigh', bl='bidlow',
bc='bidclose', v='volume'
):
""" Removes errors from the open high low close values """
data = data.loc[data[ac] <= data[ah]]
data = data.loc[data[ac] >= data[al]]
data = data.loc[data[ao] <= data[ah]]
data = data.loc[data[ao] >= data[al]]
data = data.loc[data[ah] >= data[al]]
data = data.loc[data[bc] <= data[bh]]
data = data.loc[data[bc] >= data[bl]]
data = data.loc[data[bo] <= data[bh]]
data = data.loc[data[bo] >= data[bl]]
data = data.loc[data[bh] >= data[bl]]
data = data.loc[data[v] >= 0]
return data
def clean_data(self, event):
""" Encapsulates the above cleaning processes """
data = self._remove_not_a_number(event.data)
data = self._remove_incorrect_values(data)
data = self._remove_duplicates(data)
self.events_queue.put(CleanedDataEvent(
data, event.instrument, event.time_frame))
| from event import CleanedDataEvent
class DataCleaner(object):
"""
Basic data cleaning
"""
def __init__(self, events_queue):
"""
"""
self.events_queue = events_queue
def _remove_duplicates(self, data):
"""
Drop any duplicates in the Datetime Index
"""
return data.reset_index().drop_duplicates(
subset='date', keep='last').set_index('date')
def _remove_not_a_number(self, data):
"""
Drop any rows that contain NaN values.
"""
return data.dropna()
def _remove_incorrect_values(
self, data, ao='askopen',ah='askhigh', al='asklow',
ac='askclose', bo='bidopen',bh='bidhigh', bl='bidlow',
bc='bidclose', v='volume'
):
"""
Removes errors from the open high low close values.
"""
data = data.loc[data[ac] <= data[ah]]
data = data.loc[data[ac] >= data[al]]
data = data.loc[data[ao] <= data[ah]]
data = data.loc[data[ao] >= data[al]]
data = data.loc[data[ah] >= data[al]]
data = data.loc[data[bc] <= data[bh]]
data = data.loc[data[bc] >= data[bl]]
data = data.loc[data[bo] <= data[bh]]
data = data.loc[data[bo] >= data[bl]]
data = data.loc[data[bh] >= data[bl]]
data = data.loc[data[v] >= 0]
return data
def clean_data(self, event):
data = self._remove_not_a_number(event.data)
data = self._remove_incorrect_values(data)
data = self._remove_duplicates(data)
self.events_queue.put(CleanedDataEvent(
data, event.instrument, event.time_frame))
| mit | Python |
4844ba065d86fdce3f01b7b191ecc6a4ef43661e | Add autoclass directives to Visualization/__init__.py. This will enable Visualization module methods to appear in function reference. | amdouglas/OpenPNM,PMEAL/OpenPNM,TomTranter/OpenPNM,stadelmanma/OpenPNM,amdouglas/OpenPNM | OpenPNM/Visualization/__init__.py | OpenPNM/Visualization/__init__.py | r"""
*******************************************************************************
:mod:`OpenPNM.Visualization`: Network Visualization
*******************************************************************************
.. module:: OpenPNM.Visualization
Contents
--------
tbd
.. note::
n/a
Classes
-------
.. autoclass:: GenericVisualization
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: VTK
:members:
:undoc-members:
:show-inheritance:
"""
from __GenericVisualization__ import GenericVisualization
from __VTK__ import VTK
| r"""
*******************************************************************************
:mod:`OpenPNM.Visualization`: Network Visualization
*******************************************************************************
.. module:: OpenPNM.Visualization
Contents
--------
tbd
.. note::
n/a
Import
------
>>> import OpenPNM as PNM
>>> tmp=PNM.Visualization ....
Submodules
----------
::
None --- No subpackages at the moment
"""
from __GenericVisualization__ import GenericVisualization
from __VTK__ import VTK
| mit | Python |
6e6ccc8566fe90323d900fd0ebd38f45ad4d0b63 | Update TipCalculator.py | ZEUSOFCS/Python | PracticePrograms/TipCalculator.py | PracticePrograms/TipCalculator.py | '''
Author : DORIAN JAVA BROWN
Version : N/A
Copyright : All Rights Reserve; You may use, distribute and modify this code.
Description : This program provides the user with options on how much tip the customer should leave the waiter/waitress
'''
import os
total = 21.49
def cls():
os.system('cls' if os.name=='nt' else 'clear')
# menu
print('\n\n\t\t JUICEY BURGER\n\n')
print('')
print('1 Juicey Burger $ 18.99')
print('1 Orange Drink $ 1.00')
print('-------------------------------------------')
print('')
print('Sub Total: $ 19.99')
print('Local Tax: $ 1.50')
print('Bill Total: $ 21.49')
print('\n\n')
answer = raw_input('Correct ? ')
if answer == 'YES' or answer == 'Yes' or answer == 'yes' :
cls()
# tip suggestion list
print('\n\n\t Tip Suggestions')
print('----------------------------------')
print('A) %%20 $ %0.3f' %((total * .20)))
print('B) %%15 $ %0.3f' %((total * .15)))
print('C) %%10 $ %0.3f' %((total * .10)))
print('D) %%5 $ %0.3f' %((total * .05)))
elif answer == 'NO' or answer == 'No' or answer == 'no' :
print ('\n\n\t\t please wait one moment for assistance...\n\n')
else:
print('\n\n\t\t error:. invaild value \n\n')
#https://www.youtube.com/watch?annotation_id=annotation_3770292585&feature=iv&src_vid=bguKhMnvmb8&v=LtGEp9c6Z-U
| '''
Author : DORIAN JAVA BROWN
Version : N/A
Copyright : All Rights Reserve; You may use, distribute and modify this code.
Description : This program provides the user with options on how much tip the customer should leave the waiter/waitress
'''
import os
total = 21.49
def cls():
os.system('cls' if os.name=='nt' else 'clear')
# menu
print('\n\n\t\t JUICEY BURGER\n\n')
print('')
print('1 Juicey Burger $ 18.99')
print('1 Orange Drink $ 1.00')
print('-------------------------------------------')
print('')
print('Sub Total: $ 19.99')
print('Local Tax: $ 1.50')
print('Bill Total: $ 21.49')
print('\n\n')
answer = raw_input('Correct ? ')
if answer == 'YES' or answer == 'Yes' or answer == 'yes' :
cls()
# tip suggestion list
print('\n\n\t Tip Suggestions')
print('----------------------------------')
print('A) %%20 $ %0.3f' %((total * .20)))
print('B) %%20 $ %0.3f' %((total * .20)))
print('C) %%20 $ %0.3f' %((total * .20)))
print('D) %%20 $ %0.3f' %((total * .20)))
elif answer == 'NO' or answer == 'No' or answer == 'no' :
print ('\n\n\t\t please wait one moment for assistance...\n\n')
else:
print('\n\n\t\t error:. invaild value \n\n')
#https://www.youtube.com/watch?annotation_id=annotation_3770292585&feature=iv&src_vid=bguKhMnvmb8&v=LtGEp9c6Z-U
| mit | Python |
953ce15f2a3b2ffdc0e27d95afbe4f8cda2cdbfd | set default behavior to add datacenters | softlayer/softlayer-python,allmightyspiff/softlayer-python | SoftLayer/CLI/image/datacenter.py | SoftLayer/CLI/image/datacenter.py | """Edit details of an image."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import helpers
@click.command()
@click.argument('identifier')
@click.option('--add/--remove', default=True,
help="To add or remove Datacenter")
@click.argument('locations', nargs=-1, required=True)
@environment.pass_env
def cli(env, identifier, add, locations):
"""Add/Remove datacenter of an image."""
image_mgr = SoftLayer.ImageManager(env.client)
image_id = helpers.resolve_id(image_mgr.resolve_ids, identifier, 'image')
if add:
result = image_mgr.add_locations(image_id, locations)
else:
result = image_mgr.remove_locations(image_id, locations)
env.fout(result)
| """Edit details of an image."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import helpers
@click.command()
@click.argument('identifier')
@click.option('--add/--remove',
default=False,
help="To add or remove Datacenter")
@click.argument('locations', nargs=-1, required=True)
@environment.pass_env
def cli(env, identifier, add, locations):
"""Add/Remove datacenter of an image."""
image_mgr = SoftLayer.ImageManager(env.client)
image_id = helpers.resolve_id(image_mgr.resolve_ids, identifier, 'image')
if add:
result = image_mgr.add_locations(image_id, locations)
else:
result = image_mgr.remove_locations(image_id, locations)
env.fout(result)
| mit | Python |
cdd28cba2c6299e18b5d5221f8d10b8649c1faed | Use numpy | chainer/chainer,kikusu/chainer,jnishi/chainer,jnishi/chainer,ktnyt/chainer,cupy/cupy,pfnet/chainer,okuta/chainer,wkentaro/chainer,kiyukuta/chainer,jnishi/chainer,keisuke-umezawa/chainer,aonotas/chainer,keisuke-umezawa/chainer,AlpacaDB/chainer,niboshi/chainer,cupy/cupy,kashif/chainer,tkerola/chainer,cupy/cupy,hvy/chainer,chainer/chainer,cupy/cupy,benob/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,kikusu/chainer,niboshi/chainer,ronekko/chainer,hvy/chainer,ktnyt/chainer,hvy/chainer,okuta/chainer,delta2323/chainer,anaruse/chainer,niboshi/chainer,ysekky/chainer,hvy/chainer,ktnyt/chainer,jnishi/chainer,sinhrks/chainer,keisuke-umezawa/chainer,sinhrks/chainer,chainer/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,rezoo/chainer,cemoody/chainer,niboshi/chainer,AlpacaDB/chainer,okuta/chainer,benob/chainer | tests/chainer_tests/functions_tests/array_tests/test_expand_dims.py | tests/chainer_tests/functions_tests/array_tests/test_expand_dims.py | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
@testing.parameterize(
{'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': 0},
{'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': 1},
{'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': 2},
{'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': -1},
{'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': -2},
{'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': -3},
)
class TestExpandDims(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.in_shape) \
.astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, self.out_shape) \
.astype(numpy.float32)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = functions.expand_dims(x, self.axis)
self.assertEqual(y.data.shape, self.out_shape)
y_expect = numpy.expand_dims(cuda.to_cpu(x_data), self.axis)
numpy.testing.assert_array_equal(cuda.to_cpu(y.data), y_expect)
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = functions.expand_dims(x, self.axis)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x_data,))
gx, = gradient_check.numerical_grad(f, (x_data,), (y_grad,))
gradient_check.assert_allclose(cuda.to_cpu(x.grad),
cuda.to_cpu(gx))
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward(self):
self.check_forward(cuda.to_gpu(self.x))
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
| import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
@testing.parameterize(
{'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': 0},
{'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': 1},
{'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': 2},
{'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': -1},
{'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': -2},
{'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': -3},
)
class TestExpandDims(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.in_shape) \
.astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, self.out_shape) \
.astype(numpy.float32)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = functions.expand_dims(x, self.axis)
self.assertEqual(y.data.shape, self.out_shape)
y_expect = numpy.expand_dims(cuda.to_cpu(x_data), self.axis)
cuda.cupy.testing.assert_array_equal(y.data, y_expect)
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = functions.expand_dims(x, self.axis)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x_data,))
gx, = gradient_check.numerical_grad(f, (x_data,), (y_grad,))
gradient_check.assert_allclose(cuda.to_cpu(x.grad),
cuda.to_cpu(gx))
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward(self):
self.check_forward(cuda.to_gpu(self.x))
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
| mit | Python |
d1fa13bdf3ca7d1c4eabdaace5758d6b031ef909 | Set up the download url, which I forgot about. | Rhombik/rhombik-object-repository,Rhombik/rhombik-object-repository,Rhombik/rhombik-object-repository,Rhombik/rhombik-object-repository,Rhombik/rhombik-object-repository | exampleSettings/urls.py | exampleSettings/urls.py | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'post.views.list', name='home'),
(r'^post/(.*)/$', 'post.views.post'),
# Examples:
# url(r'^$', 'exampleSettings.views.home', name='home'),
# url(r'^exampleSettings/', include('exampleSettings.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^edit/(.*)/$', 'post.views.edit'),
(r'^create/$', 'post.views.create'),
(r'^tag/(.*)/$', 'post.views.tag'),
(r'^tagcloud/$', 'post.views.tagcloud' ),
#Search urls
(r'^search/', include('haystack.urls')),
#captcha urls
url(r'^captcha/', include('captcha.urls')),
(r'^register/$', 'userProfile.views.register'),
(r'^login/$', 'userProfile.views.login_user'),
(r'^logout/$', 'userProfile.views.logout_user'),
(r'^editProfile/$', 'userProfile.views.edit'),
#preview pages for the STL files.
(r'^thumbs/jsc3d/(.*)', 'thumbnailer.views.stlthumb'),
(r'^preview/jsc3d/(.*)', 'thumbnailer.views.stlview'),
url(r'', include('multiuploader.urls')),
(r'^taggit_autosuggest/', include('taggit_autosuggest.urls')),
#user profile pages
url(r'^userProfile/', include('userProfile.urls')),
(r'^editUser/', 'userProfile.views.edit'),
(r'^download/(.*)/$', 'filemanager.views.download'),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'post.views.list', name='home'),
(r'^post/(.*)/$', 'post.views.post'),
# Examples:
# url(r'^$', 'exampleSettings.views.home', name='home'),
# url(r'^exampleSettings/', include('exampleSettings.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^edit/(.*)/$', 'post.views.edit'),
(r'^create/$', 'post.views.create'),
(r'^tag/(.*)/$', 'post.views.tag'),
(r'^tagcloud/$', 'post.views.tagcloud' ),
(r'^editorg/(.*)/$', 'organization.views.orgedit'),
#Search urls
(r'^search/', include('haystack.urls')),
#captcha urls
url(r'^captcha/', include('captcha.urls')),
(r'^register/$', 'userProfile.views.register'),
(r'^login/$', 'userProfile.views.login_user'),
(r'^logout/$', 'userProfile.views.logout_user'),
(r'^editProfile/$', 'userProfile.views.edit'),
#preview pages for the STL files.
(r'^thumbs/jsc3d/(.*)', 'thumbnailer.views.stlthumb'),
(r'^preview/jsc3d/(.*)', 'thumbnailer.views.stlview'),
url(r'', include('multiuploader.urls')),
(r'^taggit_autosuggest/', include('taggit_autosuggest.urls')),
#user profile pages
url(r'^userProfile/', include('userProfile.urls')),
(r'^editUser/', 'userProfile.views.edit'),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| agpl-3.0 | Python |
5b1eefb315cd9094de8c8827e0f3a8c0eeefe95a | delete view: make sure item is closed before it is removed from storage | bepasty/bepasty-server,makefu/bepasty-server,bepasty/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server | bepasty/views/delete.py | bepasty/views/delete.py | # Copyright: 2014 Dennis Schmalacker <github@progde.de>
# License: BSD 2-clause, see LICENSE for details.
import errno
from flask import current_app, redirect, url_for, render_template, abort
from flask.views import MethodView
from werkzeug.exceptions import NotFound
from . import blueprint
from ..utils.permissions import *
class DeleteView(MethodView):
def get(self, name):
if not may(DELETE):
abort(403)
try:
with current_app.storage.open(name) as item:
if item.meta.get('locked'):
error = 'File locked.'
elif not item.meta.get('complete'):
error = 'Upload incomplete. Try again later.'
else:
error = None
if error:
return render_template('display_error.html', name=name, item=item, error=error), 409
current_app.storage.remove(name)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return render_template('file_not_found.html'), 404
raise
return redirect(url_for('bepasty.display', name=name))
blueprint.add_url_rule('/<itemname:name>/+delete', view_func=DeleteView.as_view('delete'))
| # Copyright: 2014 Dennis Schmalacker <github@progde.de>
# License: BSD 2-clause, see LICENSE for details.
import errno
from flask import current_app, redirect, url_for, render_template, abort
from flask.views import MethodView
from werkzeug.exceptions import NotFound
from . import blueprint
from ..utils.permissions import *
class DeleteView(MethodView):
def get(self, name):
if not may(DELETE):
abort(403)
try:
item = current_app.storage.open(name)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return render_template('file_not_found.html'), 404
raise
if item.meta.get('locked'):
error = 'File locked.'
elif not item.meta.get('complete'):
error = 'Upload incomplete. Try again later.'
else:
error = None
if error:
try:
return render_template('display_error.html', name=name, item=item, error=error), 409
finally:
item.close()
try:
item = current_app.storage.remove(name)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return render_template('file_not_found.html'), 404
return redirect(url_for('bepasty.display', name=name))
blueprint.add_url_rule('/<itemname:name>/+delete', view_func=DeleteView.as_view('delete'))
| bsd-2-clause | Python |
aa5259efac8f7fbe8e2afd263198feaaa45fc4c3 | Change test for running on Tingbot | furbrain/tingbot-python | tingbot/platform_specific/__init__.py | tingbot/platform_specific/__init__.py | import platform, os
def is_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_tingbot():
from pi import fixup_env, create_main_surface, register_button_callback
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
| import platform
def is_tingbot():
"""return True if running as a tingbot. We can update this function to be more smart in future"""
return platform.machine().startswith('armv71')
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_tingbot():
from pi import fixup_env, create_main_surface, register_button_callback
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
| bsd-2-clause | Python |
6f0676877f5c23c0e6d04422cb8365e16958eb82 | document potential for streaming | benbroce3/PiCamServer,benbroce3/PiCamServer,benbroce3/PiCamServer,benbroce3/PiCamServer | camerav4.py | camerav4.py | import picamera
from picamera import PiCamera
import time
from datetime import datetime
import os.path
from subprocess32 import Popen
print "\nSecurity Camera Logger v3 | Ben Broce & William Hampton\n\n"
print "Streams video to vids/vidstream.h264 | Captures to pics/[timestamp].jpg"
print "Ctrl-C quits.\n\n"
stream = raw_input("Should I stream video (y/n)? ")
length = float(raw_input("How long should I run (in minutes): "))*60
interval = float(raw_input("How often should I take a picture (in seconds): "))
print "Running..."
#http://www.raspberry-projects.com/pi/pi-hardware/raspberry-pi-camera/streaming-video-using-vlc-player
#http://www.diveintopython.net/scripts_and_streams/stdin_stdout_stderr.html
#Ouput video (record) => stream => stdout => | => cvlc livestream => browser
if stream == "y":
Popen(["./livestream.sh"])
camera = PiCamera()
camera.annotate_background = picamera.Color('black')
camera.rotation = 180
camera.resolution = (640, 480)
counter = 0
try:
camera.start_preview()
while (counter <= length):
timestamp = datetime.now().strftime("%m-%d-%Y_%H:%M:%S")
camera.annotate_text = timestamp
path = '/var/www/PiCamServer/pics/' + timestamp + '.jpg'
camera.capture(path, use_video_port=True)
time.sleep(interval)
counter += interval
finally:
print "Exiting..."
camera.stop_preview()
| import picamera
from picamera import PiCamera
import time
from datetime import datetime
import os.path
from subprocess32 import Popen
print "\nSecurity Camera Logger v3 | Ben Broce & William Hampton\n\n"
print "Streams video to vids/vidstream.h264 | Captures to pics/[timestamp].jpg"
print "Ctrl-C quits.\n\n"
stream = raw_input("Should I stream video (y/n)? ")
length = float(raw_input("How long should I run (in minutes): "))*60
interval = float(raw_input("How often should I take a picture (in seconds): "))
print "Running..."
if stream == "y":
Popen(["./livestream.sh"])
camera = PiCamera()
camera.annotate_background = picamera.Color('black')
camera.rotation = 180
camera.resolution = (640, 480)
counter = 0
try:
camera.start_preview()
while (counter <= length):
timestamp = datetime.now().strftime("%m-%d-%Y_%H:%M:%S")
camera.annotate_text = timestamp
path = '/var/www/PiCamServer/pics/' + timestamp + '.jpg'
camera.capture(path, use_video_port=True)
time.sleep(interval)
counter += interval
finally:
print "Exiting..."
camera.stop_preview()
| mit | Python |
48a30aade7e606e671db44e8ee69092c0e67b363 | Complete lc051_n_queens.py | bowen0701/algorithms_data_structures | lc051_n_queens.py | lc051_n_queens.py | """Leetcode 51. N-Queens.
Hard.
URL: https://leetcode.com/problems/n-queens/
The n-queens puzzle is the problem of placing n queens on an nxn chessboard
such that no two queens attack each other.
Given an integer n, return all distinct solutions to the n-queens puzzle.
Each solution contains a distinct board configuration of the n-queens' placement,
where 'Q' and '.' both indicate a queen and an empty space respectively.
Example:
Input: 4
Output: [
[".Q..", // Solution 1
"...Q",
"Q...",
"..Q."],
["..Q.", // Solution 2
"Q...",
"...Q",
".Q.."]
]
Explanation: There exist two distinct solutions to the 4-queens puzzle
as shown above.
"""
class Solution(object):
def _is_valid(self, queens):
"""Check current queen position is valid among previous queens."""
current_row, current_col = len(queens) - 1, queens[-1]
# Check any queens can attack the current queen.
for row, col in enumerate(queens[:-1]):
col_diff = abs(current_col - col)
row_diff = abs(current_row - row)
if col_diff == 0 or col_diff == row_diff:
return False
return True
def _dfs(self, n, res=[], queens=[]):
"""DFS for putting queens in suitable position."""
if n == len(queens):
res.append(queens[:])
return None
for col in range(n):
# Append current queen's column id.
queens.append(col)
if self._is_valid(queens):
# If current queen's position is valid, search the next level.
self._dfs(n, res, queens)
# Backtrack by poping out current queen.
queens.pop()
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
Time complexity: O(n!).
Space complexity: O(n).
"""
# res to collect multiple solutions for n_queens.
res = []
# queens is an 1-d array to store the column ids of queens.
queens = []
self._dfs(n, res, queens)
# Make solution configs.
sol = [['.'*j + 'Q' + '.'*(n - j - 1) for j in queens]
for queens in res]
return sol
def main():
n = 4
print Solution().solveNQueens(n)
if __name__ == '__main__':
main()
| """Leetcode 51. N-Queens.
Hard.
URL: https://leetcode.com/problems/n-queens/
The n-queens puzzle is the problem of placing n queens on an nxn chessboard
such that no two queens attack each other.
Given an integer n, return all distinct solutions to the n-queens puzzle.
Each solution contains a distinct board configuration of the n-queens' placement,
where 'Q' and '.' both indicate a queen and an empty space respectively.
Example:
Input: 4
Output: [
[".Q..", // Solution 1
"...Q",
"Q...",
"..Q."],
["..Q.", // Solution 2
"Q...",
"...Q",
".Q.."]
]
Explanation: There exist two distinct solutions to the 4-queens puzzle
as shown above.
"""
class Solution(object):
def _is_valid(self, queens):
"""Check current queen position is valid."""
current_row, current_col = len(queens) - 1, queens[-1]
# Check any queens can attack the current queen.
for row, col in enumerate(queens[:-1]):
col_diff = abs(current_col - col)
row_diff = abs(current_row - row)
if col_diff == 0 or col_diff == row_diff:
return False
return True
def solveNQueens(self, n, res, queens):
"""
:type n: int
:rtype: List[List[str]]
"""
# queens is an 1-d array to store the column ids of queens.
if n == len(queens):
res.append(queens[:])
return res
for col in range(n):
# Append current queen's column id.
queens.append(col)
if self._is_valid(queens):
# If current queen's position is valid, search the next level.
self.solveNQueens(n, res, queens)
# Backtrack by poping out current queens.
queens.pop()
return res
def main():
n = 4
print Solution().solveNQueens(n, [], [])
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
16c1ae09e0288036aae87eb4337c24b23b1e6638 | Clean up some unused imports and comments | timvandermeij/sentiment-analysis,timvandermeij/sentiment-analysis | classify.py | classify.py | from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.ensemble import RandomForestRegressor
from sklearn.pipeline import Pipeline
import numpy as np
import json
import sys
from analyze import Analyzer # for some train data labelling
def main(argv):
group = argv[0] if len(argv) > 0 else "id"
train_size = int(argv[1]) if len(argv) > 1 else 1000
train_data = []
train_labels = []
analyzer = Analyzer(group)
for message, _ in analyzer.read_json(sys.stdin):
label = analyzer.analyze(message)[0]
train_data.append(message)
train_labels.append(label)
if len(train_data) >= train_size:
break
regressor = Pipeline([
('tfidf', TfidfVectorizer(input='content')),
('clf', RandomForestRegressor())
])
regressor.fit(train_data, train_labels)
for message, group in analyzer.read_json(sys.stdin):
# Call predict for every message which might be slow in practice but
# avoids memory hog due to not being able to use iterators if done in
# one batch.
prediction = regressor.predict([message])[0]
if analyzer.display:
# Take the color for this group of predictions
c = cmp(prediction, 0)
message = analyzer.colors[c] + message + analyzer.END_COLOR
analyzer.output(group, message, prediction, "")
if __name__ == "__main__":
main(sys.argv[1:])
| from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.neighbors import KNeighborsRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.pipeline import Pipeline
import numpy as np
import json
import sys
import time
from analyze import Analyzer # for some train data labelling
def main(argv):
group = argv[0] if len(argv) > 0 else "id"
train_size = int(argv[1]) if len(argv) > 1 else 1000
train_data = []
train_labels = []
analyzer = Analyzer(group)
for message, _ in analyzer.read_json(sys.stdin):
label = analyzer.analyze(message)[0]
train_data.append(message)
train_labels.append(label)
if len(train_data) >= train_size:
break
regressor = Pipeline([
('tfidf', TfidfVectorizer(input='content')),
('clf', RandomForestRegressor())
#('clf', KNeighborsRegressor())
])
regressor.fit(train_data, train_labels)
for message, group in analyzer.read_json(sys.stdin):
# Call predict for every message which might be slow in practice but
# avoids memory hog due to not being able to use iterators if done in
# one batch.
prediction = regressor.predict([message])[0]
if analyzer.display:
# Take the color for this group of predictions
c = cmp(prediction, 0)
message = analyzer.colors[c] + message + analyzer.END_COLOR
analyzer.output(group, message, prediction, "")
if __name__ == "__main__":
main(sys.argv[1:])
| mit | Python |
4f3d1e90ec4af618ada415f53ddd9eec42bafb38 | Indent with 4 spaces, not 3 | CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer | wafer/talks/tests/test_wafer_basic_talks.py | wafer/talks/tests/test_wafer_basic_talks.py | # This tests the very basic talk stuff, to ensure some levels of sanity
def test_add_talk():
"""Create a user and add a talk to it"""
from django.contrib.auth.models import User
from wafer.talks.models import Talks
user = User.objects.create_user('john', 'best@wafer.test', 'johnpassword')
talk = Talks.objects.create(title="This is a test talk",
abstract="This should be a long and interesting abstract, but isn't",
corresponding_author_id=user.id)
assert user.contact_talks.count() == 1
| # This tests the very basic talk stuff, to ensure some levels of sanity
def test_add_talk():
"""Create a user and add a talk to it"""
from django.contrib.auth.models import User
from wafer.talks.models import Talks
user = User.objects.create_user('john', 'best@wafer.test', 'johnpassword')
talk = Talks.objects.create(title="This is a test talk",
abstract="This should be a long and interesting abstract, but isn't",
corresponding_author_id=user.id)
assert user.contact_talks.count() == 1
| isc | Python |
7b0a6d27389f8e4abde77b2ed76dac795c33cfab | Use url_for | uranusjr/diffhtml | demo/app.py | demo/app.py | import flask
from flask import request
from markupsafe import Markup
import diffhtml
app = flask.Flask('Diff-HTML Demo')
DEFAULT_A = """
I am the very model of a modern Major-General,
I've information vegetable, animal, and mineral,
I know the kings of England, and I quote the fights historical,
From Marathon to Waterloo, in order categorical.
"""
DEFAULT_B = """
I am the very model of a cartoon individual,
My animation's comical, unusual, and whimsical,
I'm quite adept at funny gags, comedic theory I have read,
From wicked puns and stupid jokes to anvils that drop on your head.
"""
@app.route('/ndiff', methods=['GET', 'POST'])
def ndiff():
a = request.form.get('a', DEFAULT_A)
b = request.form.get('b', DEFAULT_B)
try:
cutoff = float(request.form.get('cutoff', 0.6))
except ValueError:
cutoff = 0.6
context = {
'result': None,
'cutoff': cutoff,
'input': {'a': a, 'b': b},
}
if request.method == 'POST':
context['result'] = Markup('<br>').join(diffhtml.ndiff(
a.splitlines(), b.splitlines(), cutoff=cutoff,
))
return flask.render_template('ndiff.html', **context)
@app.route('/')
def home():
return flask.redirect(flask.url_for('ndiff'))
if __name__ == '__main__':
app.run()
| import flask
from flask import request
from markupsafe import Markup
import diffhtml
app = flask.Flask('Diff-HTML Demo')
DEFAULT_A = """
I am the very model of a modern Major-General,
I've information vegetable, animal, and mineral,
I know the kings of England, and I quote the fights historical,
From Marathon to Waterloo, in order categorical.
"""
DEFAULT_B = """
I am the very model of a cartoon individual,
My animation's comical, unusual, and whimsical,
I'm quite adept at funny gags, comedic theory I have read,
From wicked puns and stupid jokes to anvils that drop on your head.
"""
@app.route('/ndiff', methods=['GET', 'POST'])
def ndiff():
a = request.form.get('a', DEFAULT_A)
b = request.form.get('b', DEFAULT_B)
try:
cutoff = float(request.form.get('cutoff', 0.6))
except ValueError:
cutoff = 0.6
context = {
'result': None,
'cutoff': cutoff,
'input': {'a': a, 'b': b},
}
if request.method == 'POST':
context['result'] = Markup('<br>').join(diffhtml.ndiff(
a.splitlines(), b.splitlines(), cutoff=cutoff,
))
return flask.render_template('ndiff.html', **context)
@app.route('/')
def home():
return flask.redirect('/ndiff')
if __name__ == '__main__':
app.run()
| isc | Python |
68b2e1cb5a914d408761229bd27677e80967f5ff | Remove unused import. | potatolondon/django-hashbrown | hashbrown/management/commands/switches.py | hashbrown/management/commands/switches.py | from django.core.management.base import BaseCommand
from django.utils.six.moves import input
from hashbrown.models import Switch
from hashbrown.utils import SETTINGS_KEY, is_active, get_defaults
class Command(BaseCommand):
help = 'Creates / deletes feature switches in the database'
def add_arguments(self, parser):
parser.add_argument(
'--delete',
action='store_true',
default=False,
help='Delete switches in the database that are not in ' + SETTINGS_KEY,
)
parser.add_argument(
'--force',
action='store_true',
default=False,
help='Delete switches without confirmation (implies --delete)',
)
def handle(self, *args, **kwargs):
if kwargs['delete'] or kwargs['force']:
self._delete_switches(force=kwargs['force'])
self._create_switches()
self.stderr.write('All switches up-to-date.')
def _create_switches(self):
create_switches(self.stderr)
def _delete_switches(self, force=False):
delete_switches(self.stderr, force=force)
def create_switches(stderr):
"""Create switches listed in HASHBROWN_SWITCH_DEFAULTS which aren't in
the database yet.
"""
defaults = get_defaults()
installed_switches = set(Switch.objects.values_list('label', flat=True))
missing_switches = set(defaults) - installed_switches
for label in sorted(missing_switches):
is_active(label)
stderr.write('Created switch %r.' % label)
return missing_switches
def delete_switches(stderr, force=False):
defaults = get_defaults()
installed_switches = set(Switch.objects.values_list('label', flat=True))
unknown_switches = sorted(installed_switches - set(defaults))
if not unknown_switches:
return
permission_granted = force or ask_permission(stderr, unknown_switches)
if permission_granted:
Switch.objects.filter(label__in=unknown_switches).delete()
for label in unknown_switches:
stderr.write('Deleted switch %r.' % label)
def ask_permission(stderr, switches):
stderr.write('The following switches are in the database but not in %s:' % SETTINGS_KEY)
for label in switches:
stderr.write(label)
response = input('Delete switches? [y/N]: ')
return response.lower().strip() in ('y', 'yes')
| from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils.six.moves import input
from hashbrown.models import Switch
from hashbrown.utils import SETTINGS_KEY, is_active, get_defaults
class Command(BaseCommand):
help = 'Creates / deletes feature switches in the database'
def add_arguments(self, parser):
parser.add_argument(
'--delete',
action='store_true',
default=False,
help='Delete switches in the database that are not in ' + SETTINGS_KEY,
)
parser.add_argument(
'--force',
action='store_true',
default=False,
help='Delete switches without confirmation (implies --delete)',
)
def handle(self, *args, **kwargs):
if kwargs['delete'] or kwargs['force']:
self._delete_switches(force=kwargs['force'])
self._create_switches()
self.stderr.write('All switches up-to-date.')
def _create_switches(self):
create_switches(self.stderr)
def _delete_switches(self, force=False):
delete_switches(self.stderr, force=force)
def create_switches(stderr):
"""Create switches listed in HASHBROWN_SWITCH_DEFAULTS which aren't in
the database yet.
"""
defaults = get_defaults()
installed_switches = set(Switch.objects.values_list('label', flat=True))
missing_switches = set(defaults) - installed_switches
for label in sorted(missing_switches):
is_active(label)
stderr.write('Created switch %r.' % label)
return missing_switches
def delete_switches(stderr, force=False):
defaults = get_defaults()
installed_switches = set(Switch.objects.values_list('label', flat=True))
unknown_switches = sorted(installed_switches - set(defaults))
if not unknown_switches:
return
permission_granted = force or ask_permission(stderr, unknown_switches)
if permission_granted:
Switch.objects.filter(label__in=unknown_switches).delete()
for label in unknown_switches:
stderr.write('Deleted switch %r.' % label)
def ask_permission(stderr, switches):
stderr.write('The following switches are in the database but not in %s:' % SETTINGS_KEY)
for label in switches:
stderr.write(label)
response = input('Delete switches? [y/N]: ')
return response.lower().strip() in ('y', 'yes')
| bsd-2-clause | Python |
114d7bc6b45d18f528a7ed5c12e1938e35efb93c | Update hash_db_password.py | dpgaspar/Flask-AppBuilder,qpxu007/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,qpxu007/Flask-AppBuilder,rpiotti/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,qpxu007/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,qpxu007/Flask-AppBuilder,rpiotti/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,rpiotti/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,rpiotti/Flask-AppBuilder | bin/hash_db_password.py | bin/hash_db_password.py | import sys
from werkzeug.security import generate_password_hash
from flask_appbuilder.security.models import User
try:
from app import app, db
except:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
if len(sys.argv) < 2:
print "Without typical app structure use parameter to config"
print "Use example: python hash_db_password.py sqlite:////home/user/application/app.db"
exit()
con_str = sys.argv[1]
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = con_str
db = SQLAlchemy(app)
try:
print "using connection string: {}".format(app.config['SQLALCHEMY_DATABASE_URI'])
users = db.session.query(User).all()
except Exception as e:
print "Query, connection error {}".format(e)
print "Config db key {}".format(app.config['SQLALCHEMY_DATABASE_URI'])
exit()
for user in users:
print "Hashing password for {}".format(user.full_name)
user.password = generate_password_hash(user.password)
try:
db.session.merge(user)
db.session.commit()
except:
print "Error updating password for {}".format(user.full_name)
| import sys
from werkzeug.security import generate_password_hash
from flask_appbuilder.security.models import User
try:
from app import app, db
except:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
if len(sys.argv) < 2:
print "Without typical app structure use parameter to config"
print "Use example: python hash_db_password.py sqlite:////home/user/application/app.db"
exit()
con_str = sys.argv[1]
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = con_str
db = SQLAlchemy(app)
try:
print "using connection string: {}".format(app.config['SQLALCHEMY_DATABASE_URI'])
users = db.session.query(User).all()
except Exception as e:
print "Query, connection error {}".format(e.message)
print "Config db key {}".format(app.config['SQLALCHEMY_DATABASE_URI'])
exit()
for user in users:
print "Hashing password for {}".format(user.full_name)
user.password = generate_password_hash(user.password)
try:
db.session.merge(user)
db.session.commit()
except:
print "Error updating password for {}".format(user.full_name)
| bsd-3-clause | Python |
3224a95d79f6e3166e235f4cfc857a48d1b17c52 | Revise docstring: memoization | bowen0701/algorithms_data_structures | alg_fibonacci.py | alg_fibonacci.py | """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def fibonacci_recur(n):
"""Get the nth number of Fibonacci series, Fn, by recursion.
- Time complexity: 2Fn - 1 = O(Fn); too fast.
- Space complexity: O(n).
"""
if n <= 1:
return n
else:
return fibonacci_recur(n - 1) + fibonacci_recur(n - 2)
def fibonacci_memo(n):
"""Get the nth number of Fibonacci series, Fn, by memoization.
- Time complexity: O(n).
- Space complexity: O(n).
"""
fn_d = {}
fn_d[0] = 0
fn_d[1] = 1
for n in range(2, n + 1):
fn_d[n] = fn_d[n - 1] + fn_d[n - 2]
return fn_d[n]
def fibonacci_dp(n):
"""Get the nth number of Fibonacci series by dynamic programming.
- Time complexity is still O(n), like fibonacci_memo().
- Space complexity is O(1), improving a lot.
"""
a, b = 0, 1
for _ in range(n):
a, b = a + b, a
return a
def fibonacci_gen(n):
"""Get the nth number of Fibonacci series by generator."""
a, b = 0, 1
for _ in range(n):
a, b = a + b, a
yield a
def main():
import time
n = 30
print('{}th number of Fibonacci series:'.format(n))
start_time = time.time()
print('By recursion: {}'.format(fibonacci_recur(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memorization: {}'.format(fibonacci_memo(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By dynamic programming: {}'.format(fibonacci_dp(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By generator: {}'.format(list(fibonacci_gen(n))[-1]))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def fibonacci_recur(n):
"""Get the nth number of Fibonacci series, Fn, by recursion.
- Time complexity: 2Fn - 1 = O(Fn); too fast.
- Space complexity: O(n).
"""
if n <= 1:
return n
else:
return fibonacci_recur(n - 1) + fibonacci_recur(n - 2)
def fibonacci_memo(n):
"""Get the nth number of Fibonacci series, Fn, by memorization.
- Time complexity: O(n).
- Space complexity: O(n).
"""
fn_d = {}
fn_d[0] = 0
fn_d[1] = 1
for n in range(2, n + 1):
fn_d[n] = fn_d[n - 1] + fn_d[n - 2]
return fn_d[n]
def fibonacci_dp(n):
"""Get the nth number of Fibonacci series by dynamic programming.
- Time complexity is still O(n), like fibonacci_memo().
- Space complexity is O(1), improving a lot.
"""
a, b = 0, 1
for _ in range(n):
a, b = a + b, a
return a
def fibonacci_gen(n):
"""Get the nth number of Fibonacci series by generator."""
a, b = 0, 1
for _ in range(n):
a, b = a + b, a
yield a
def main():
import time
n = 30
print('{}th number of Fibonacci series:'.format(n))
start_time = time.time()
print('By recursion: {}'.format(fibonacci_recur(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memorization: {}'.format(fibonacci_memo(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By dynamic programming: {}'.format(fibonacci_dp(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By generator: {}'.format(list(fibonacci_gen(n))[-1]))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
858e84f336f76a1e65b730834ad8ffb346ee6b0f | fix logger singleton to work with pyjd | gpitel/pyjs,spaceone/pyjs,pombredanne/pyjs,minghuascode/pyj,spaceone/pyjs,pombredanne/pyjs,Hasimir/pyjs,Hasimir/pyjs,Hasimir/pyjs,minghuascode/pyj,anandology/pyjamas,pyjs/pyjs,pombredanne/pyjs,spaceone/pyjs,anandology/pyjamas,gpitel/pyjs,anandology/pyjamas,minghuascode/pyj,pyjs/pyjs,lancezlin/pyjs,minghuascode/pyj,pyjs/pyjs,spaceone/pyjs,lancezlin/pyjs,gpitel/pyjs,gpitel/pyjs,lancezlin/pyjs,anandology/pyjamas,pombredanne/pyjs,pyjs/pyjs,Hasimir/pyjs,lancezlin/pyjs | examples/mail/Logger.py | examples/mail/Logger.py | from pyjamas.ui.Grid import Grid
_logger = None
class LoggerCls(Grid):
def __init__(self):
Grid.__init__(self)
self.targets=[]
self.targets.append("app")
#self.targets.append("ui")
self.resize(len(self.targets)+1, 2)
self.setBorderWidth("1px")
self.counter=0
self.setHTML(0, 0, "<b>Log</b>")
self.setText(1, 0, "app")
for i in range(len(self.targets)):
target=self.targets[i]
self.setText(i+1, 0, target)
def addTarget(self, target):
self.targets.append(target)
self.resize(len(self.targets)+1, 2)
self.setText(len(self.targets), 0, target)
return self.targets.index(target)
def write(self, target, message):
self.counter+=1
if target=='':
target='app'
try:
target_idx=self.targets.index(target)
except ValueError:
target_idx = -1
# add new target
if target_idx<0:
target_idx=self.addTarget(target)
target_row=target_idx+1
old_text=self.getHTML(target_row, 1)
log_line="%d: " % self.counter + message
if old_text==' ':
new_text=log_line
else:
new_text=old_text + "<br>" + log_line
self.setHTML(target_row, 1, new_text)
def Logger(target="", message=""):
global _logger
# make sure there is only one instance of this class
if not _logger:
_logger = LoggerCls()
_logger.write(target, message)
return _logger
| from pyjamas.ui.Grid import Grid
_logger = None
class Logger(Grid):
def __new__(cls):
global _logger
# make sure there is only one instance of this class
if _logger:
return _logger
_logger = Grid.__new__(cls)
return _logger
def __init__(self, target="", message=""):
#global _logger
if message:
return Logger().write(target, message)
# make sure there is only one instance of this class
if hasattr(self, target): return None
self.setSingleton()
Grid.__init__(self)
self.targets=[]
self.targets.append("app")
#self.targets.append("ui")
self.resize(len(self.targets)+1, 2)
self.setBorderWidth("1px")
self.counter=0
self.setHTML(0, 0, "<b>Log</b>")
self.setText(1, 0, "app")
for i in range(len(self.targets)):
target=self.targets[i]
self.setText(i+1, 0, target)
def setSingleton(self):
global _logger
_logger = self
def addTarget(self, target):
self.targets.append(target)
self.resize(len(self.targets)+1, 2)
self.setText(len(self.targets), 0, target)
return self.targets.index(target)
def write(self, target, message):
self.counter+=1
if target=='':
target='app'
try:
target_idx=self.targets.index(target)
except ValueError:
target_idx = -1
# add new target
if target_idx<0:
target_idx=self.addTarget(target)
target_row=target_idx+1
old_text=self.getHTML(target_row, 1)
log_line="%d: " % self.counter + message
if old_text==' ':
new_text=log_line
else:
new_text=old_text + "<br>" + log_line
self.setHTML(target_row, 1, new_text)
| apache-2.0 | Python |
f10dcd822f72e86d0eb0071acf7d38e81cfe32da | Add a blank line | raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd | examples/mnist/mnist.py | examples/mnist/mnist.py | import logging
import qnd
import tensorflow as tf
logging.getLogger().setLevel(logging.INFO)
qnd.add_flag("use_eval_input_fn", action="store_true")
qnd.add_flag("use_model_fn_ops", action="store_true")
def read_file(filename_queue):
_, serialized = tf.TFRecordReader().read(filename_queue)
scalar_feature = lambda dtype: tf.FixedLenFeature([], dtype)
features = tf.parse_single_example(serialized, {
"image_raw": scalar_feature(tf.string),
"label": scalar_feature(tf.int64),
})
image = tf.decode_raw(features["image_raw"], tf.uint8)
image.set_shape([28**2])
return tf.to_float(image) / 255 - 0.5, features["label"]
def minimize(loss):
return tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
0.01,
"Adam")
def mnist_model(image, number):
h = tf.contrib.layers.fully_connected(image, 200)
h = tf.contrib.layers.fully_connected(h, 10, activation_fn=None)
loss = tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(h, number))
predictions = tf.argmax(h, axis=1)
train_op = minimize(loss)
eval_metrics = {
"accuracy": tf.reduce_mean(tf.to_float(tf.equal(predictions, number)))
}
if qnd.FLAGS.use_model_fn_ops:
return tf.contrib.learn.estimators.model_fn.ModelFnOps(
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metrics=eval_metrics)
return predictions, loss, train_op, eval_metrics
run = qnd.def_run()
def main():
run(mnist_model,
read_file,
read_file if qnd.FLAGS.use_eval_input_fn else None)
if __name__ == "__main__":
main()
| import logging
import qnd
import tensorflow as tf
logging.getLogger().setLevel(logging.INFO)
qnd.add_flag("use_eval_input_fn", action="store_true")
qnd.add_flag("use_model_fn_ops", action="store_true")
def read_file(filename_queue):
_, serialized = tf.TFRecordReader().read(filename_queue)
scalar_feature = lambda dtype: tf.FixedLenFeature([], dtype)
features = tf.parse_single_example(serialized, {
"image_raw": scalar_feature(tf.string),
"label": scalar_feature(tf.int64),
})
image = tf.decode_raw(features["image_raw"], tf.uint8)
image.set_shape([28**2])
return tf.to_float(image) / 255 - 0.5, features["label"]
def minimize(loss):
return tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
0.01,
"Adam")
def mnist_model(image, number):
h = tf.contrib.layers.fully_connected(image, 200)
h = tf.contrib.layers.fully_connected(h, 10, activation_fn=None)
loss = tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(h, number))
predictions = tf.argmax(h, axis=1)
train_op = minimize(loss)
eval_metrics = {
"accuracy": tf.reduce_mean(tf.to_float(tf.equal(predictions, number)))
}
if qnd.FLAGS.use_model_fn_ops:
return tf.contrib.learn.estimators.model_fn.ModelFnOps(
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metrics=eval_metrics)
return predictions, loss, train_op, eval_metrics
run = qnd.def_run()
def main():
run(mnist_model,
read_file,
read_file if qnd.FLAGS.use_eval_input_fn else None)
if __name__ == "__main__":
main()
| unlicense | Python |