commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
35d6d780bddf72ab5ff216a7603bd89f980c8deb
Bump version
bright-sparks/grab-site,UncleStranger/grab-site,bright-sparks/grab-site,UncleStranger/grab-site
libgrabsite/__init__.py
libgrabsite/__init__.py
__version__ = '0.4.1'
__version__ = '0.4.0'
mit
Python
e511da2cb7b73891f26b93e684d9fba80042f3cd
fix syntax
madcore-ai/core,madcore-ai/core
bin/redis_app_update.py
bin/redis_app_update.py
import json import sys import redis r_server = redis.StrictRedis('127.0.0.1', db=2) app_key = "apps" app_info = json.loads(r_server.get(app_key)) app_name = sys.argv[1] app_port = sys.argv[2] app_namespace = sys.argv[3] app_service_name = sys.argv[4] check = False for app in app_info: if app["name"] == app_name: check = True break if not check: element = '[{"name":"%s", "namespace":"%s", "service_name":"%s" "port":"%s"}]' % (app_name, app_namespace, app_service_name, app_port) el = json.loads(element) app_info.extend(el) app_data = json.dumps(app_info) r_server.set(app_key, app_data) r_server.set("need_CSR", "1") r_server.bgsave() else: r_server.set("need_CSR", "0") r_server.bgsave()
import json import sys import redis r_server = redis.StrictRedis('127.0.0.1', db=2) app_key = "apps" app_info = json.loads(r_server.get(app_key)) app_name = sys.argv[1] app_port = sys.argv[2] app_namespace = sys.argv[3] app_service_name = sys.argv[4] check = False for app in app_info: if app["name"] == app_name: check = True break if not check: element = '[{"name":"%s", "namespace":"%s", "service_name":"%s" "port":"%s"}]' % (app_name, app_namespace, app_service_name app_port) el = json.loads(element) app_info.extend(el) app_data = json.dumps(app_info) r_server.set(app_key, app_data) r_server.set("need_CSR", "1") r_server.bgsave() else: r_server.set("need_CSR", "0") r_server.bgsave()
mit
Python
0d68fbaef300c53db407f6296c00e493e4b040bf
use xdg-email by default, fallback to xdg-open + mailto uri
cleett/plyer,kivy/plyer,kostyll/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,johnbolia/plyer,kived/plyer,kived/plyer,johnbolia/plyer,KeyWeeUsr/plyer,kostyll/plyer,cleett/plyer
plyer/platforms/linux/email.py
plyer/platforms/linux/email.py
import subprocess from urllib import quote from plyer.facades import Email class LinuxEmail(Email): def _send(self, **kwargs): recipient = kwargs.get('recipient') subject = kwargs.get('subject') text = kwargs.get('text') create_chooser = kwargs.get('create_chooser') uri = "mailto:" args = ["xdg-email"] if recipient: uri += str(recipient) args += [str(recipient)] if subject: uri += "?" if not "?" in uri else "&" uri += "subject=" uri += quote(str(subject)) args += ["--subject", str(subject)] if text: uri += "?" if not "?" in uri else "&" uri += "body=" uri += quote(str(text)) args += ["--body", str(text)] try: subprocess.Popen(args) except OSError: try: subprocess.Popen(["xdg-open", uri]) except OSError: print "Warning: unable to start an email client. Make sure xdg-open is installed." def instance(): return LinuxEmail()
import subprocess from urllib import quote from plyer.facades import Email class LinuxEmail(Email): def _send(self, **kwargs): recipient = kwargs.get('recipient') subject = kwargs.get('subject') text = kwargs.get('text') create_chooser = kwargs.get('create_chooser') uri = "mailto:" if recipient: uri += str(recipient) if subject: uri += "?" if not "?" in uri else "&" uri += "subject=" uri += quote(str(subject)) if text: uri += "?" if not "?" in uri else "&" uri += "body=" uri += quote(str(text)) subprocess.Popen(["xdg-open", uri]) def instance(): return LinuxEmail()
mit
Python
8fb201b866c0eabc99c370cf3ccc993d2de06264
Update version 0.10.1
MSeifert04/iteration_utilities,MSeifert04/iteration_utilities,MSeifert04/iteration_utilities,MSeifert04/iteration_utilities
src/iteration_utilities/__init__.py
src/iteration_utilities/__init__.py
# Licensed under Apache License Version 2.0 - see LICENSE """Utilities based on Pythons iterators and generators.""" from ._iteration_utilities import * from ._convenience import * from ._recipes import * from ._additional_recipes import * from ._classes import * __version__ = '0.10.1'
# Licensed under Apache License Version 2.0 - see LICENSE """Utilities based on Pythons iterators and generators.""" from ._iteration_utilities import * from ._convenience import * from ._recipes import * from ._additional_recipes import * from ._classes import * __version__ = '0.10.0'
apache-2.0
Python
ffa551d8e4519005791f42bb2862f0411c54ced3
Update projectfiles_unchanged script
kullo/smartsqlite,kullo/smartsqlite,kullo/smartsqlite
projectfiles_unchanged.py
projectfiles_unchanged.py
#!/usr/bin/env python3 # # This script is used on Linux, OS X and Windows. # Python 3 required. # Returns 0 if project files are unchanged and 1 else. # # Script version: 3 import os import glob import hashlib import sys matches = [] tmp_file = "projectfiles.md5.tmp" exlude_dirs = set(['.git', 'docs']) def get_subdirs(path): return set([name for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))]) def find_in(path): # print(path) out = [] out += glob.glob(path + "/*.pro") out += glob.glob(path + "/CMakeLists.txt") out += glob.glob(path + "/Info.plist") subs = get_subdirs(path) - exlude_dirs for s in subs: out += find_in(os.path.join(path, s)) out.sort() return out pros = find_in(".") # print(pros) hasher = hashlib.md5() for pro in pros: with open(pro) as f: s = f.read() hasher.update(s.encode('utf8')) current = hasher.hexdigest() if os.path.isfile(tmp_file): with open(tmp_file) as f: old = f.read() else: old = "" if current.strip() == old.strip(): sys.exit(0) else: with open(tmp_file, "w") as f: print(current, file=f) sys.exit(1)
# version: 2 import os import glob import hashlib import sys matches = [] exlude_dirs = set(['.git', 'docs']) def get_subdirs(path): return set([name for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))]) def find_in(path): # print(path) out = [] out += glob.glob(path + "/*.pro") out += glob.glob(path + "/CMakeLists.txt") out += glob.glob(path + "/Info.plist") subs = get_subdirs(path) - exlude_dirs for s in subs: out += find_in(os.path.join(path, s)) out.sort() return out pros = find_in(".") # print(pros) hasher = hashlib.md5() for pro in pros: with open(pro) as f: s = f.read() hasher.update(s.encode('utf8')) current = hasher.hexdigest() if os.path.isfile("projectfiles.md5.tmp"): with open("projectfiles.md5.tmp") as f: old = f.read() else: old = "" if current.strip() == old.strip(): sys.exit(0) else: with open("projectfiles.md5.tmp", "w") as f: print(current, file=f) sys.exit(1)
bsd-3-clause
Python
4e62d7d9514449be5afc5a27b15726a254077e89
Remove dangling argument
fieldOfView/Cura,ynotstartups/Wanhao,totalretribution/Cura,hmflash/Cura,totalretribution/Cura,ynotstartups/Wanhao,fieldOfView/Cura,hmflash/Cura,Curahelper/Cura,senttech/Cura,senttech/Cura,Curahelper/Cura
MachineSettingsAction.py
MachineSettingsAction.py
from cura.MachineAction import MachineAction import cura.Settings.CuraContainerRegistry from UM.i18n import i18nCatalog from UM.Settings.DefinitionContainer import DefinitionContainer from UM.Application import Application from PyQt5.QtCore import pyqtSlot, QObject catalog = i18nCatalog("cura") class MachineSettingsAction(MachineAction, QObject): def __init__(self, parent = None): MachineAction.__init__(self, "MachineSettingsAction", catalog.i18nc("@action", "Machine Settings")) self._qml_url = "MachineSettingsAction.qml" cura.Settings.CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerAdded) def _execute(self): pass def _reset(self): pass def _onContainerAdded(self, container): # Add this action as a supported action to all machine definitions if isinstance(container, DefinitionContainer) and container.getMetaDataEntry("type") == "machine": Application.getInstance().getMachineActionManager().addSupportedAction(container.getId(), self.getKey()) @pyqtSlot() def forceUpdate(self): # Force rebuilding the build volume by reloading the global container stack. # This is a bit of a hack, but it seems quick enough. Application.getInstance().globalContainerStackChanged.emit()
from cura.MachineAction import MachineAction import cura.Settings.CuraContainerRegistry from UM.i18n import i18nCatalog from UM.Settings.DefinitionContainer import DefinitionContainer from UM.Application import Application from PyQt5.QtCore import pyqtSlot, QObject catalog = i18nCatalog("cura") class MachineSettingsAction(MachineAction, QObject, ): def __init__(self, parent = None): MachineAction.__init__(self, "MachineSettingsAction", catalog.i18nc("@action", "Machine Settings")) self._qml_url = "MachineSettingsAction.qml" cura.Settings.CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerAdded) def _execute(self): pass def _reset(self): pass def _onContainerAdded(self, container): # Add this action as a supported action to all machine definitions if isinstance(container, DefinitionContainer) and container.getMetaDataEntry("type") == "machine": Application.getInstance().getMachineActionManager().addSupportedAction(container.getId(), self.getKey()) @pyqtSlot() def forceUpdate(self): # Force rebuilding the build volume by reloading the global container stack. # This is a bit of a hack, but it seems quick enough. Application.getInstance().globalContainerStackChanged.emit()
agpl-3.0
Python
2e071c0e37fac657955de70fb7193b3e46ba2aef
Update subscribe_speakers_to_talks.py
pythonitalia/pycon_site,pythonitalia/pycon_site,leriomaggio/pycon_site,pythonitalia/pycon_site,pythonitalia/pycon_site,leriomaggio/pycon_site,leriomaggio/pycon_site,pythonitalia/pycon_site,leriomaggio/pycon_site,leriomaggio/pycon_site
p3/management/commands/subscribe_speakers_to_talks.py
p3/management/commands/subscribe_speakers_to_talks.py
# -*- coding: UTF-8 -*- from django.core.management.base import BaseCommand, CommandError from django.contrib.auth import get_user_model from conference import models as cmodels from hcomments import models as hmodels info = get_user_model().objects.get(email='info@pycon.it') class Command(BaseCommand): def handle(self, *args, **options): try: conf = args[0] except IndexError: raise CommandError('conference missing') qs = cmodels.TalkSpeaker.objects\ .filter(talk__conference=conf)\ .select_related('talk', 'speaker__user') for row in qs: u = row.speaker.user t = row.talk print '%s %s -> %s' % (u.first_name, u.last_name, t.title) hmodels.ThreadSubscription.objects.subscribe(t, u) hmodels.ThreadSubscription.objects.subscribe(t, info)
# -*- coding: UTF-8 -*- from django.core.management.base import BaseCommand, CommandError from conference import models as cmodels from hcomments import models as hmodels class Command(BaseCommand): def handle(self, *args, **options): try: conf = args[0] except IndexError: raise CommandError('conference missing') qs = cmodels.TalkSpeaker.objects\ .filter(talk__conference=conf)\ .select_related('talk', 'speaker__user') for row in qs: u = row.speaker.user t = row.talk print '%s %s -> %s' % (u.first_name, u.last_name, t.title) hmodels.ThreadSubscription.objects.subscribe(t, u)
bsd-2-clause
Python
78dd9bb220a8e1a03b51b801e023e4401a351892
Support animated pngs
stevearc/gif-split,stevearc/gif-split
gif_split/views.py
gif_split/views.py
import os import posixpath from cStringIO import StringIO import logging import requests from PIL import Image, ImageSequence from paste.httpheaders import CONTENT_DISPOSITION from pyramid.response import FileIter, FileResponse from pyramid.view import view_config from pyramid_duh import argify LOG = logging.getLogger(__name__) @view_config( route_name='root', renderer='index.jinja2') @argify def index_view(request, url=None): """ Root view '/' """ if url: filename, ext = posixpath.splitext(posixpath.basename(url)) ext = ext.lower() filename = filename + "_sprite" + ext if ext == '.gif': img_format = 'GIF' elif ext == '.png': img_format = 'PNG' else: img_format = None stream = download_gif(url) sprite = convert_gif(stream) data = StringIO() sprite.save(data, format=img_format) data.seek(0) disp = CONTENT_DISPOSITION.tuples(filename=filename) request.response.headers.update(disp) request.response.app_iter = FileIter(data) return request.response else: return {} def download_gif(url): return StringIO(requests.get(url).content) def convert_gif(stream): image = Image.open(stream) frames = ImageSequence.Iterator(image) frame_width, frame_height = 0, 0 frame_width, frame_height = frames[0].size width = frame_width*len(list(frames)) height = frame_height out = Image.new('RGBA', (width, height)) stream.seek(0) image = Image.open(stream) for i, frame in enumerate(ImageSequence.Iterator(image)): out.paste(frame, (frame_width*i, 0)) return out
import os import posixpath from cStringIO import StringIO import logging import requests from PIL import Image, ImageSequence from paste.httpheaders import CONTENT_DISPOSITION from pyramid.response import FileIter, FileResponse from pyramid.view import view_config from pyramid_duh import argify LOG = logging.getLogger(__name__) @view_config( route_name='root', renderer='index.jinja2') @argify def index_view(request, url=None): """ Root view '/' """ if url is not None: filename = posixpath.basename(url).replace('.gif', '') filename = filename + "_sprite.gif" stream = download_gif(url) sprite = convert_gif(stream) data = StringIO() sprite.save(data, format='GIF') data.seek(0) disp = CONTENT_DISPOSITION.tuples(filename=filename) request.response.headers.update(disp) request.response.app_iter = FileIter(data) return request.response else: return {} def download_gif(url): return StringIO(requests.get(url).content) def convert_gif(stream): image = Image.open(stream) frames = ImageSequence.Iterator(image) frame_width, frame_height = 0, 0 frame_width, frame_height = frames[0].size width = frame_width*len(list(frames)) height = frame_height out = Image.new('RGBA', (width, height)) stream.seek(0) image = Image.open(stream) for i, frame in enumerate(ImageSequence.Iterator(image)): out.paste(frame, (frame_width*i, 0)) return out
mit
Python
d00d809735210f53c3da71195107f1991814eb52
fix minor bug most likely due to merge error
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
labonneboite/common/models/user_favorite_offices.py
labonneboite/common/models/user_favorite_offices.py
# coding: utf8 import datetime from sqlalchemy import Column, ForeignKey, UniqueConstraint from sqlalchemy import desc from sqlalchemy import Integer, String, DateTime from sqlalchemy.orm import relationship from labonneboite.common.database import Base from labonneboite.common.database import db_session from labonneboite.common.models.base import CRUDMixin from labonneboite.conf import get_current_env, ENV_LBBDEV class UserFavoriteOffice(CRUDMixin, Base): """ Stores the favorites offices of a user. Important: This model has a relation to the `etablissements` model via the `office_siret` field. But the `etablissements` table is dropped and recreated during the offices import process (remember that `etablissements` is currently excluded from the migration system). Some entries in `etablissements` may disappear during this process. Therefore the `office_siret` foreign key integrity may be broken. So the foreign key integrity must be enforced by the script of the data deployment process. """ __tablename__ = 'user_favorite_offices' __table_args__ = ( UniqueConstraint('user_id', 'office_siret', name='_user_fav_office'), ) id = Column(Integer, primary_key=True) # Set `ondelete` to `CASCADE`: when a `user` is deleted, all his `favorites` are deleted too. user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'), nullable=False) # Set `ondelete` to `CASCADE`: when an `office` is deleted, all related `favorites` are deleted too. office_siret = Column(String(191), ForeignKey('etablissements.siret', ondelete='CASCADE'), nullable=True) date_created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False) user = relationship('User') if get_current_env() == ENV_LBBDEV: # disable relationship which mysteriously breaks on lbbdev only, not needed there anyway. pass else: office = relationship('Office', lazy='joined') __mapper_args__ = { 'order_by': desc(date_created), # Default order_by for all queries. } @classmethod def user_favs_as_sirets(cls, user): """ Returns the favorites offices of a user as a list of sirets. Useful to check if an office is already in the favorites of a user. """ if user.is_anonymous: return [] sirets = [fav.office_siret for fav in db_session.query(cls).filter_by(user_id=user.id)] return sirets
# coding: utf8 import datetime from sqlalchemy import Column, ForeignKey, UniqueConstraint from sqlalchemy import desc from sqlalchemy import Integer, String, DateTime from sqlalchemy.orm import relationship from labonneboite.common.database import Base from labonneboite.common.database import db_session from labonneboite.common.models.base import CRUDMixin from labonneboite.common import util from labonneboite.conf import get_current_env, ENV_LBBDEV class UserFavoriteOffice(CRUDMixin, Base): """ Stores the favorites offices of a user. Important: This model has a relation to the `etablissements` model via the `office_siret` field. But the `etablissements` table is dropped and recreated during the offices import process (remember that `etablissements` is currently excluded from the migration system). Some entries in `etablissements` may disappear during this process. Therefore the `office_siret` foreign key integrity may be broken. So the foreign key integrity must be enforced by the script of the data deployment process. """ __tablename__ = 'user_favorite_offices' __table_args__ = ( UniqueConstraint('user_id', 'office_siret', name='_user_fav_office'), ) id = Column(Integer, primary_key=True) # Set `ondelete` to `CASCADE`: when a `user` is deleted, all his `favorites` are deleted too. user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'), nullable=False) # Set `ondelete` to `CASCADE`: when an `office` is deleted, all related `favorites` are deleted too. office_siret = Column(String(191), ForeignKey('etablissements.siret', ondelete='CASCADE'), nullable=True) date_created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False) user = relationship('User') if get_current_env() == ENV_LBBDEV: # disable relationship which mysteriously breaks on lbbdev only, not needed there anyway. pass else: office = relationship('Office', lazy='joined') __mapper_args__ = { 'order_by': desc(date_created), # Default order_by for all queries. } @classmethod def user_favs_as_sirets(cls, user): """ Returns the favorites offices of a user as a list of sirets. Useful to check if an office is already in the favorites of a user. """ if user.is_anonymous: return [] sirets = [fav.office_siret for fav in db_session.query(cls).filter_by(user_id=user.id)] return sirets
agpl-3.0
Python
02d6e904fe02a4c53b1878a3f6c44c074de47d79
Add __str__ to Decorator
schwa-lab/libschwa,schwa-lab/libschwa,schwa-lab/libschwa,schwa-lab/libschwa
api/python/schwa/dr/decoration.py
api/python/schwa/dr/decoration.py
""" Utilities for managing document decoration by marking the document with the set of decorations that have been applied to it. """ from functools import wraps, partial def decorator(key=None): """ Wraps a docrep decorator, ensuring it is only executed once per document. Duplication is checked using the given key or the function object. """ def dec(fn): @wraps(fn) def wrapper(doc): try: if key in doc._decorated_by: return except AttributeError: doc._decorated_by = set() doc._decorated_by.add(key) fn(doc) return wrapper if callable(key): return dec(key) return dec class Decorator(object): """ An abstract document decorator, which wraps its decorate method to ensure it is only executed once per document. """ def __init__(self, key): # NOTE: wrapping __call__ like this didn't seem to work self.decorate = decorator(key)(self.decorate) self._key = key @classmethod def _build_key(cls, *args): return '{}-{}'.format(cls.__name__, '-'.join(repr(arg) for arg in args)) def __call__(self, doc): self.decorate(doc) def decorate(self, doc): raise NotImplementedError() def __str__(self): return self._key def requires_decoration(*decorators, **kwargs): """ Marks the document decoration dependencies for a function, where the document is found in the doc_arg positional argument (default 0) or doc_kwarg keyword argument (default 'doc'). """ doc_arg = kwargs.pop('doc_arg', 0) doc_kwarg = kwargs.pop('doc_kwarg', 'doc') if kwargs: raise ValueError("Got unexpected keyword arguments: {}".format(kwargs.keys())) def dec(fn): @wraps(fn) def wrapper(*args, **kwargs): try: doc = args[doc_arg] except IndexError: doc = kwargs[doc_kwarg] for decorate in decorators: decorate(doc) return fn(*args, **kwargs) return wrapper return dec method_requires_decoration = partial(requires_decoration, doc_arg=1)
""" Utilities for managing document decoration by marking the document with the set of decorations that have been applied to it. """ from functools import wraps, partial def decorator(key=None): """ Wraps a docrep decorator, ensuring it is only executed once per document. Duplication is checked using the given key or the function object. """ def dec(fn): @wraps(fn) def wrapper(doc): try: if key in doc._decorated_by: return except AttributeError: doc._decorated_by = set() doc._decorated_by.add(key) fn(doc) return wrapper if callable(key): return dec(key) return dec class Decorator(object): """ An abstract document decorator, which wraps its decorate method to ensure it is only executed once per document. """ def __init__(self, key): # NOTE: wrapping __call__ like this didn't seem to work self.decorate = decorator(key)(self.decorate) @classmethod def _build_key(cls, *args): return '{}-{}'.format(cls.__name__, '-'.join(repr(arg) for arg in args)) def __call__(self, doc): self.decorate(doc) def decorate(self, doc): raise NotImplementedError() def requires_decoration(*decorators, **kwargs): """ Marks the document decoration dependencies for a function, where the document is found in the doc_arg positional argument (default 0) or doc_kwarg keyword argument (default 'doc'). """ doc_arg = kwargs.pop('doc_arg', 0) doc_kwarg = kwargs.pop('doc_kwarg', 'doc') if kwargs: raise ValueError("Got unexpected keyword arguments: {}".format(kwargs.keys())) def dec(fn): @wraps(fn) def wrapper(*args, **kwargs): try: doc = args[doc_arg] except IndexError: doc = kwargs[doc_kwarg] for decorate in decorators: decorate(doc) return fn(*args, **kwargs) return wrapper return dec method_requires_decoration = partial(requires_decoration, doc_arg=1)
mit
Python
d7e6db61a0100e69b9a18c17a906e094e91ce7b3
fix wrong keyword param (passws) to MySQLdb.connect
kavinyao/SKBPR,kavinyao/SKBPR
database.py
database.py
""" Database Manager. """ import MySQLdb import MySQLdb.cursors class DatabaseManager(object): def __init__(self, host, user, passwd, database, charset='utf8', large_scale=False): """Be careful using large_scale=True, SSDictCursor seems not reliable.""" self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd, db=database, charset=charset) self.large_scale = large_scale def close(self): self.conn.close() # put here for better understandability cursor_types = { True: { True: MySQLdb.cursors.SSDictCursor, False: MySQLdb.cursors.SSCursor, }, False: { True: MySQLdb.cursors.DictCursor, False: MySQLdb.cursors.Cursor, }, } def __get_cursor_type(self, use_dict): return self.cursor_types[self.large_scale][use_dict] def __query(self, sql, values=(), use_dict=True): """Execute any SQL. You can use %s placeholder in sql and fill with values. return cursor""" cursor = self.conn.cursor(self.__get_cursor_type(use_dict)) cursor.execute(sql, values) return cursor def query(self, sql, values=()): """Execute any SQL and return affected rows.""" cursor = self.__query(sql, values) return cursor.rowcount def insert(self, sql, values=()): """Insert a row and return insert id.""" cursor = self.__query(sql, values) return cursor.lastrowid def get_rows(self, sql, values=()): """[Generator]Get rows of SELECT query.""" cursor = self.__query(sql, values) for i in xrange(cursor.rowcount): yield cursor.fetchone() def get_value(self, sql, idx=0): """Get value of the first row. This is handy if you want to retrive COUNT(*).""" cursor = self.__query(sql, use_dict=False) row = cursor.fetchone() return row[idx]
""" Database Manager. """ import MySQLdb import MySQLdb.cursors class DatabaseManager(object): def __init__(self, host, user, passwd, database, charset='utf8', large_scale=False): """Be careful using large_scale=True, SSDictCursor seems not reliable.""" self.conn = MySQLdb.connect(host=host, user=user, passws=passwd, db=database, charset=charset) self.large_scale = large_scale def close(self): self.conn.close() # put here for better understandability cursor_types = { True: { True: MySQLdb.cursors.SSDictCursor, False: MySQLdb.cursors.SSCursor, }, False: { True: MySQLdb.cursors.DictCursor, False: MySQLdb.cursors.Cursor, }, } def __get_cursor_type(self, use_dict): return self.cursor_types[self.large_scale][use_dict] def __query(self, sql, values=(), use_dict=True): """Execute any SQL. You can use %s placeholder in sql and fill with values. return cursor""" cursor = self.conn.cursor(self.__get_cursor_type(use_dict)) cursor.execute(sql, values) return cursor def query(self, sql, values=()): """Execute any SQL and return affected rows.""" cursor = self.__query(sql, values) return cursor.rowcount def insert(self, sql, values=()): """Insert a row and return insert id.""" cursor = self.__query(sql, values) return cursor.lastrowid def get_rows(self, sql, values=()): """[Generator]Get rows of SELECT query.""" cursor = self.__query(sql, values) for i in xrange(cursor.rowcount): yield cursor.fetchone() def get_value(self, sql, idx=0): """Get value of the first row. This is handy if you want to retrive COUNT(*).""" cursor = self.__query(sql, use_dict=False) row = cursor.fetchone() return row[idx]
mit
Python
2e164c5fe2e3a208dbdcbc51f287a9e5b7cc34a8
Add package_data entry in setup.py
pmorissette/klink,pmorissette/klink,dfroger/klink,dfroger/klink
setup.py
setup.py
from setuptools import setup from klink import __version__ setup( name='klink', version=__version__, url='https://github.com/pmorissette/klink', description='Klink is a simple and clean theme for creating Sphinx docs, inspired by jrnl', license='MIT', author='Philippe Morissette', author_email='morissette.philippe@gmail.com', packages=['klink'], package_data = {'klink': [ 'theme.conf', 'layout.html', 'static/css/klink.css', 'static/fonts/*.*', ]}, )
from setuptools import setup from klink import __version__ setup( name='klink', version=__version__, url='https://github.com/pmorissette/klink', description='Klink is a simple and clean theme for creating Sphinx docs, inspired by jrnl', license='MIT', author='Philippe Morissette', author_email='morissette.philippe@gmail.com', packages=['klink'] )
mit
Python
64c50a273c3e113affdb700f137bda78fd1a684d
update examples/progressbar.by
TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl
examples/progressbar.py
examples/progressbar.py
#!/usr/bin/env python # Tai Sakuma <sakuma@fnal.gov> from AlphaTwirl.ProgressBar import ProgressBar, ProgressBar2, MPProgressMonitor, ProgressReport from AlphaTwirl.EventReader import MPEventLoopRunner import time, random ##____________________________________________________________________________|| class EventLoop(object): def __init__(self, name): self.name = name self.readers = [ ] def __call__(self, progressReporter = None): n = random.randint(5, 50) time.sleep(random.randint(0, 3)) for i in xrange(n): time.sleep(0.1) report = ProgressReport(name = self.name, done = i + 1, total = n) progressReporter.report(report) return self.readers ##____________________________________________________________________________|| progressBar = ProgressBar() progressMonitor = MPProgressMonitor(presentation = progressBar) runner = MPEventLoopRunner(progressMonitor = progressMonitor) runner.begin() runner.run(EventLoop("loop")) runner.run(EventLoop("another loop")) runner.run(EventLoop("more loop")) runner.run(EventLoop("loop loop loop")) runner.run(EventLoop("l")) runner.run(EventLoop("loop6")) runner.run(EventLoop("loop7")) runner.run(EventLoop("loop8")) runner.end() ##____________________________________________________________________________||
#!/usr/bin/env python # Tai Sakuma <sakuma@fnal.gov> from AlphaTwirl.ProgressBar import ProgressBar, MPProgressMonitor, ProgressReport from AlphaTwirl.EventReader import MPEventLoopRunner import time, random ##____________________________________________________________________________|| class EventLoop(object): def __init__(self, name): self.name = name self.readers = [ ] def __call__(self, progressReporter = None): n = random.randint(5, 50) for i in xrange(n): time.sleep(0.1) report = ProgressReport(name = self.name, done = i + 1, total = n) progressReporter.report(report) return self.readers ##____________________________________________________________________________|| progressBar = ProgressBar() progressMonitor = MPProgressMonitor(presentation = progressBar) runner = MPEventLoopRunner(progressMonitor = progressMonitor) runner.begin() runner.run(EventLoop("loop1")) runner.run(EventLoop("loop2")) runner.run(EventLoop("loop3")) runner.run(EventLoop("loop4")) runner.run(EventLoop("loop5")) runner.run(EventLoop("loop6")) runner.run(EventLoop("loop7")) runner.run(EventLoop("loop8")) runner.end() ##____________________________________________________________________________||
bsd-3-clause
Python
f8bd4073beb50f9fb750170e79804d13ea50db0b
update example
ericdill/bluesky,ericdill/bluesky
examples/raster_mesh.py
examples/raster_mesh.py
from bluesky.examples import Mover, SynGauss, Syn2DGauss import bluesky.plans as bp import bluesky.spec_api as bsa import bluesky.callbacks from bluesky.standard_config import gs import bluesky.qt_kicker bluesky.qt_kicker.install_qt_kicker() # motors theta = Mover('theta', ['theta']) gamma = Mover('gamma', ['gamma']) # synthetic detectors coupled to one motor theta_det = SynGauss('theta_det', theta, 'theta', center=0, Imax=1, sigma=1) gamma_det = SynGauss('gamma_det', gamma, 'gamma', center=0, Imax=1, sigma=1) # synthetic detector coupled to two detectors tgd = Syn2DGauss('theta_gamma_det', theta, 'theta', gamma, 'gamma', center=(0, 0), Imax=1) # set up the default detectors gs.DETS = [theta_det, gamma_det, tgd] ysteps = 25 xsteps = 20 # hook up the live raster callback cb = bluesky.callbacks.LiveRaster((ysteps, xsteps), 'theta_gamma_det', clim=[0, 1]) lt = bluesky.callbacks.LiveTable([theta, gamma, tgd]) gs.MASTER_DET_FIELD = 'theta_gamma_det' mesha = bp.OuterProductAbsScanPlan(gs.DETS, theta, -2.5, 2.5, ysteps, gamma, -2, 2, xsteps, True) gs.RE(mesha, [cb, lt])
from bluesky.examples import Mover, SynGauss, Syn2DGauss import bluesky.simple_scans as bss import bluesky.spec_api as bsa import bluesky.callbacks from bluesky.standard_config import gs import bluesky.qt_kicker # motors theta = Mover('theta', ['theta']) gamma = Mover('gamma', ['gamma']) # synthetic detectors coupled to one motor theta_det = SynGauss('theta_det', theta, 'theta', center=0, Imax=1, sigma=1) gamma_det = SynGauss('gamma_det', gamma, 'gamma', center=0, Imax=1, sigma=1) # synthetic detector coupled to two detectors tgd = Syn2DGauss('theta_gamma_det', theta, 'theta', gamma, 'gamma', center=(0, 0), Imax=1) # set up the default detectors gs.DETS = [theta_det, gamma_det, tgd] ysteps = 25 xsteps = 20 # hook up the live raster callback #cb = bluesky.callbacks.LiveRaster((ysteps + 1, xsteps + 1), # 'theta_gamma_det', clim=[0, 1]) mesha = bss.OuterProductAbsScanPlan() # run a mesh scan gs.MASTER_DET_FIELD = 'theta_gamma_det' bsa.mesh(theta, -2.5, 2.5, ysteps, gamma, -2, 2, xsteps, False)
bsd-3-clause
Python
f8bb295bf1d10410d36a8a8880ff96303bbda451
Update announcements.py
Boijangle/GroupMe-Message-Bot
announcements.py
announcements.py
import sys import icalendar import requests import pytz from datetime import datetime, timedelta from libs import post_text from icalendar import Calendar from database import find_bot_nname import re r = requests.get(sys.argv[2]) icsData = r.text cal = Calendar.from_ical(icsData) for evt in cal.subcomponents: print(evt.items()) print(evt.subcomponents) start = evt.decoded('DTSTART') now = datetime.now(tz=pytz.utc) time_left = start - now if timedelta(minutes=0) < time_left < timedelta(minutes=10): raw_text = str(evt.decoded('SUMMARY')) search = re.search(r"([^ ]+)\s(.+)", raw_text) (nname, message) = search.groups('1') nname = nname[2:] message = message[:-1] print(nname) print(message) bot_id = find_bot_nname(nname) if not bot_id: bot_id = sys.argv[1] post_text("I was supposed to post '" + message + "' to " + nname, bot_id) else: bot_id = bot_id[0][0] post_text(message, bot_id)
import sys import icalendar import requests import pytz from datetime import datetime, timedelta from libs import post_text from icalendar import Calendar from database import find_bot_nname import re r = requests.get(sys.argv[2]) icsData = r.text cal = Calendar.from_ical(icsData) for evt in cal.subcomponents: print(evt.items()) print(evt.subcomponents start = evt.decoded('DTSTART') now = datetime.now(tz=pytz.utc) time_left = start - now if timedelta(minutes=0) < time_left < timedelta(minutes=10): raw_text = str(evt.decoded('SUMMARY')) search = re.search(r"([^ ]+)\s(.+)", raw_text) (nname, message) = search.groups('1') nname = nname[2:] message = message[:-1] print(nname) print(message) bot_id = find_bot_nname(nname) if not bot_id: bot_id = sys.argv[1] post_text("I was supposed to post '" + message + "' to " + nname, bot_id) else: bot_id = bot_id[0][0] post_text(message, bot_id)
mit
Python
c5e13436d7d453bd851e39591f82e2ef0d740d92
Fix typo
pyfarm/pyfarm-master,pyfarm/pyfarm-master,pyfarm/pyfarm-master
pyfarm/scheduler/celery_app.py
pyfarm/scheduler/celery_app.py
# No shebang line, this module is meant to be imported # # Copyright 2014 Ambient Entertainment GmbH & Co. KG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import timedelta from pyfarm.core.config import read_env_int from celery import Celery celery_app = Celery('pyfarm.tasks', broker='redis://', include=['pyfarm.scheduler.tasks']) celery_app.conf.CELERYBEAT_SCHEDULE = { "periodically_poll_agents": { "task": "pyfarm.scheduler.tasks.poll_agents", "schedule": timedelta( seconds=read_env_int("AGENTS_POLL_INTERVAL", 30))}, "periodical_scheduler": { "task": "pyfarm.scheduler.tasks.assign_tasks", "schedule": timedelta(seconds=read_env_int("SCHEDULER_INTERVAL", 30))}} if __name__ == '__main__': celery_app.start()
# No shebang line, this module is meant to be imported # # Copyright 2014 Ambient Entertainment GmbH & Co. KG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import timedelta from pyfarm.core.config import read_env_int from celery import Celery celery_app = Celery('pyfarm.tasks', broker='redis://', include=['pyfarm.scheduler.tasks']) celery_app.conf.CELERYBEAT_SCHEDULE = { "periodically_poll_agents": { "task": "pyfarm.scheduler.tasks.poll_agents", "schedule": timedelta( seconds=read_env_int("AGENTS_POLL_INTERVALE", 30))}, "periodical_scheduler": { "task": "pyfarm.scheduler.tasks.assign_tasks", "schedule": timedelta(seconds=read_env_int("SCHEDULER_INTERVAL", 30))}} if __name__ == '__main__': celery_app.start()
apache-2.0
Python
f755f9857020cfceaeb3cf9607e96cef66ccb048
update dev version after 0.21.1 tag [skip ci]
desihub/desitarget,desihub/desitarget
py/desitarget/_version.py
py/desitarget/_version.py
__version__ = '0.21.1.dev2037'
__version__ = '0.21.1'
bsd-3-clause
Python
47527996fe967d8ef713ff8814f71d49ab539fd8
update version
albertfxwang/grizli
grizli/version.py
grizli/version.py
# git describe --tags __version__ = "0.6.0-109-g647e4b4"
# git describe --tags __version__ = "0.6.0-86-g140db75"
mit
Python
b493082352de19ed8d3d52c8eda838064957bbc2
bump version to 1.2-BETA2
mirek2580/namebench
libnamebench/version.py
libnamebench/version.py
VERSION = '1.2-BETA2'
VERSION = '1.2-BETA1'
apache-2.0
Python
19cfe70c69b026429454fb8361ec3e8d6f1a0505
add show/hide requested signals
zwadar/pyqode.core,pyQode/pyqode.core,pyQode/pyqode.core
pyqode/core/widgets/preview.py
pyqode/core/widgets/preview.py
""" This module contains a widget that can show the html preview of an editor. """ from weakref import proxy from pyqode.qt import QtCore, QtWebWidgets from pyqode.core.api import DelayJobRunner class HtmlPreviewWidget(QtWebWidgets.QWebView): hide_requested = QtCore.Signal() show_requested = QtCore.Signal() def __init__(self, parent=None): super(HtmlPreviewWidget, self).__init__(parent) self._editor = None self._timer = DelayJobRunner(delay=1000) try: # prevent opening internal links when using QtWebKit self.page().setLinkDelegationPolicy( QtWebWidgets.QWebPage.DelegateAllLinks) except (TypeError, AttributeError): # no needed with QtWebEngine, internal links are properly handled # by the default implementation pass def set_editor(self, editor): url = QtCore.QUrl('') if editor is not None: url = QtCore.QUrl.fromLocalFile(editor.file.path) try: self.setHtml(editor.to_html(), url) except (TypeError, AttributeError): self.setHtml('<center>No preview available...</center>', url) self._editor = None self.hide_requested.emit() else: if self._editor is not None and editor != self._editor: try: self._editor.textChanged.disconnect(self._on_text_changed) except TypeError: pass editor.textChanged.connect(self._on_text_changed) self._editor = proxy(editor) self.show_requested.emit() def _on_text_changed(self, *_): self._timer.request_job(self._update_preview) def _update_preview(self): url = QtCore.QUrl('') if self._editor is not None: url = QtCore.QUrl.fromLocalFile(self._editor.file.path) try: pos = self.page().mainFrame().scrollBarValue(QtCore.Qt.Vertical) self.setHtml(self._editor.to_html(), url) self.page().mainFrame().setScrollBarValue(QtCore.Qt.Vertical, pos) except AttributeError: # Not possible with QtWebEngine??? # self._scroll_pos = self.page().mainFrame().scrollBarValue( # QtCore.Qt.Vertical) self.setHtml(self._editor.to_html(), url)
""" This module contains a widget that can show the html preview of an editor. """ from weakref import proxy from pyqode.qt import QtCore, QtWebWidgets from pyqode.core.api import DelayJobRunner class HtmlPreviewWidget(QtWebWidgets.QWebView): def __init__(self, parent=None): super(HtmlPreviewWidget, self).__init__(parent) self._editor = None self._timer = DelayJobRunner(delay=1000) try: # prevent opening internal links when using QtWebKit self.page().setLinkDelegationPolicy( QtWebWidgets.QWebPage.DelegateAllLinks) except (TypeError, AttributeError): # no needed with QtWebEngine, internal links are properly handled # by the default implementation pass def set_editor(self, editor): try: self.setHtml(editor.to_html()) except (TypeError, AttributeError): self.setHtml('<center>No preview available...</center>') self._editor = None else: if self._editor is not None and editor != self._editor: try: self._editor.textChanged.disconnect(self._on_text_changed) except TypeError: pass editor.textChanged.connect(self._on_text_changed) self._editor = proxy(editor) def _on_text_changed(self, *_): self._timer.request_job(self._update_preview) def _update_preview(self): try: pos = self.page().mainFrame().scrollBarValue(QtCore.Qt.Vertical) self.setHtml(self._editor.to_html()) self.page().mainFrame().setScrollBarValue(QtCore.Qt.Vertical, pos) except AttributeError: # Not possible with QtWebEngine??? # self._scroll_pos = self.page().mainFrame().scrollBarValue( # QtCore.Qt.Vertical) self.setHtml(self._editor.to_html())
mit
Python
d428bb582c6fe71e39bdedfbed1b355421f48139
Fix that
MPjct/PyMP
src/mysql_proto/com/stmt/prepare.py
src/mysql_proto/com/stmt/prepare.py
#!/usr/bin/env python # coding=utf-8 from packet import Packet from proto import Proto from flags import Flags class Prepare(Packet): query = "" def getPayload(self): payload = bytearray() payload.extend(Proto.build_byte(Flags.COM_STMT_PREPARE)) payload.extend(Proto.build_eop_str(self.query)) return payload @staticmethod def loadFromPacket(packet): obj = Prepare() proto = Proto(packet, 3) obj.sequenceId = proto.get_fixed_int(1) proto.get_filler(1) obj.query = proto.get_eop_str() return obj if __name__ == "__main__": import doctest doctest.testmod()
#!/usr/bin/env python # coding=utf-8 from packet import Packet from proto import Proto from flags import Flags class Prepare(Packet): query = "" def getPayload(self): payload = bytearray() payload.extend(Proto.build_byte(Flags.COM_STMT_PREPARE)) payload.extend(Proto.build_eop_str(self.query)) return payload @staticmethod def loadFromPacket(packet): obj = Statistics() proto = Proto(packet, 3) obj.sequenceId = proto.get_fixed_int(1) proto.get_filler(1) obj.query = proto.get_eop_str() return obj if __name__ == "__main__": import doctest doctest.testmod()
mit
Python
76611b7e6e97089b93626b472f91c04f16644034
Fix up some comments
raphael-boucher/channels,andrewgodwin/channels,raiderrobert/channels,andrewgodwin/django-channels,django/channels,Coread/channels,Krukov/channels,Coread/channels,linuxlewis/channels,Krukov/channels
channels/management/commands/runserver.py
channels/management/commands/runserver.py
import threading from django.core.management.commands.runserver import \ Command as RunserverCommand from channels import DEFAULT_CHANNEL_LAYER, channel_layers from channels.handler import ViewConsumer from channels.log import setup_logger from channels.worker import Worker class Command(RunserverCommand): def handle(self, *args, **options): self.verbosity = options.get("verbosity", 1) self.logger = setup_logger('django.channels', self.verbosity) super(Command, self).handle(*args, **options) def inner_run(self, *args, **options): # Check a handler is registered for http reqs; if not, add default one self.channel_layer = channel_layers[DEFAULT_CHANNEL_LAYER] if not self.channel_layer.registry.consumer_for_channel("http.request"): self.channel_layer.registry.add_consumer(ViewConsumer(), ["http.request"]) # Launch worker as subthread worker = WorkerThread(self.channel_layer, self.logger) worker.daemon = True worker.start() # Launch server in 'main' thread. Signals are disabled as it's still # actually a subthread under the autoreloader. self.logger.info("Daphne running, listening on %s:%s", self.addr, self.port) from daphne.server import Server Server( channel_layer=self.channel_layer, host=self.addr, port=int(self.port), signal_handlers=False, ).run() class WorkerThread(threading.Thread): """ Class that runs a worker """ def __init__(self, channel_layer, logger): super(WorkerThread, self).__init__() self.channel_layer = channel_layer self.logger = logger def run(self): self.logger.info("Worker thread running") worker = Worker(channel_layer=self.channel_layer) worker.run()
import threading from django.core.management.commands.runserver import \ Command as RunserverCommand from channels import DEFAULT_CHANNEL_LAYER, channel_layers from channels.handler import ViewConsumer from channels.log import setup_logger from channels.worker import Worker class Command(RunserverCommand): def handle(self, *args, **options): self.verbosity = options.get("verbosity", 1) self.logger = setup_logger('django.channels', self.verbosity) super(Command, self).handle(*args, **options) def inner_run(self, *args, **options): # Check a handler is registered for http reqs; if not, add default one self.channel_layer = channel_layers[DEFAULT_CHANNEL_LAYER] if not self.channel_layer.registry.consumer_for_channel("http.request"): self.channel_layer.registry.add_consumer(ViewConsumer(), ["http.request"]) # Report starting up # Launch worker as subthread (including autoreload logic) worker = WorkerThread(self.channel_layer, self.logger) worker.daemon = True worker.start() # Launch server in main thread (Twisted doesn't like being in a # subthread, and it doesn't need to autoreload as there's no user code) self.logger.info("Daphne running, listening on %s:%s", self.addr, self.port) from daphne.server import Server Server( channel_layer=self.channel_layer, host=self.addr, port=int(self.port), signal_handlers=False, ).run() class WorkerThread(threading.Thread): """ Class that runs a worker """ def __init__(self, channel_layer, logger): super(WorkerThread, self).__init__() self.channel_layer = channel_layer self.logger = logger def run(self): self.logger.info("Worker thread running") worker = Worker(channel_layer=self.channel_layer) worker.run()
bsd-3-clause
Python
e451ea4d698450813bd11fed6b501b839cd477a6
Reformat runworker a bit
raiderrobert/channels,raphael-boucher/channels,Coread/channels,Krukov/channels,andrewgodwin/django-channels,Krukov/channels,linuxlewis/channels,django/channels,Coread/channels,andrewgodwin/channels
channels/management/commands/runworker.py
channels/management/commands/runworker.py
from __future__ import unicode_literals from django.core.management import BaseCommand, CommandError from channels import DEFAULT_CHANNEL_LAYER, channel_layers from channels.log import setup_logger from channels.worker import Worker class Command(BaseCommand): leave_locale_alone = True def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument( '--layer', action='store', dest='layer', default=DEFAULT_CHANNEL_LAYER, help='Channel layer alias to use, if not the default.', ) parser.add_argument( '--only-channels', action='append', dest='only_channels', help='Limits this worker to only listening on the provided channels (supports globbing).', ) parser.add_argument( '--exclude-channels', action='append', dest='exclude_channels', help='Prevents this worker from listening on the provided channels (supports globbing).', ) def handle(self, *args, **options): # Get the backend to use self.verbosity = options.get("verbosity", 1) self.logger = setup_logger('django.channels', self.verbosity) self.channel_layer = channel_layers[options.get("layer", DEFAULT_CHANNEL_LAYER)] # Check that handler isn't inmemory if self.channel_layer.local_only(): raise CommandError( "You cannot span multiple processes with the in-memory layer. " + "Change your settings to use a cross-process channel layer." ) # Check a handler is registered for http reqs self.channel_layer.router.check_default() # Launch a worker self.logger.info("Running worker against channel layer %s", self.channel_layer) # Optionally provide an output callback callback = None if self.verbosity > 1: callback = self.consumer_called # Run the worker try: Worker( channel_layer=self.channel_layer, callback=callback, only_channels=options.get("only_channels", None), exclude_channels=options.get("exclude_channels", None), ).run() except KeyboardInterrupt: pass def consumer_called(self, channel, message): self.logger.debug("%s", channel)
from __future__ import unicode_literals from django.core.management import BaseCommand, CommandError from channels import DEFAULT_CHANNEL_LAYER, channel_layers from channels.log import setup_logger from channels.worker import Worker class Command(BaseCommand): leave_locale_alone = True def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument('--layer', action='store', dest='layer', default=DEFAULT_CHANNEL_LAYER, help='Channel layer alias to use, if not the default.') parser.add_argument('--only-channels', action='append', dest='only_channels', help='Limits this worker to only listening on the provided channels (supports globbing).') parser.add_argument('--exclude-channels', action='append', dest='exclude_channels', help='Prevents this worker from listening on the provided channels (supports globbing).') def handle(self, *args, **options): # Get the backend to use self.verbosity = options.get("verbosity", 1) self.logger = setup_logger('django.channels', self.verbosity) self.channel_layer = channel_layers[options.get("layer", DEFAULT_CHANNEL_LAYER)] # Check that handler isn't inmemory if self.channel_layer.local_only(): raise CommandError( "You cannot span multiple processes with the in-memory layer. " + "Change your settings to use a cross-process channel layer." ) # Check a handler is registered for http reqs self.channel_layer.router.check_default() # Launch a worker self.logger.info("Running worker against channel layer %s", self.channel_layer) # Optionally provide an output callback callback = None if self.verbosity > 1: callback = self.consumer_called # Run the worker try: Worker( channel_layer=self.channel_layer, callback=callback, only_channels=options.get("only_channels", None), exclude_channels=options.get("exclude_channels", None), ).run() except KeyboardInterrupt: pass def consumer_called(self, channel, message): self.logger.debug("%s", channel)
bsd-3-clause
Python
596f9752a7956c259217b0528bed924812d0631f
Add admin filter to filter attendees with children.
pysv/djep,EuroPython/djep,pysv/djep,zerok/pyconde-website-mirror,zerok/pyconde-website-mirror,zerok/pyconde-website-mirror,EuroPython/djep,pysv/djep,EuroPython/djep,pysv/djep,EuroPython/djep,pysv/djep
pyconde/accounts/admin.py
pyconde/accounts/admin.py
from django.contrib import admin from django.contrib.admin import SimpleListFilter from . import models class WithChildrenFilter(SimpleListFilter): title = 'Anzahl Kinder' parameter_name = 'children' def lookups(self, request, model_admin): return (('y', 'mit Kindern'), ('n', 'ohne Kinder')) def queryset(self, request, queryset): if self.value() == 'y': queryset = queryset.filter(num_accompanying_children__gt=0) elif self.value() == 'n': queryset = queryset.filter(num_accompanying_children=0) return queryset class ProfileAdmin(admin.ModelAdmin): list_display = ('pk', 'user', 'num_accompanying_children') list_display_links = ('pk', 'user') list_filter = (WithChildrenFilter,) admin.site.register(models.Profile, ProfileAdmin)
from django.contrib import admin from . import models admin.site.register(models.Profile, list_display=['user'])
bsd-3-clause
Python
1b5b43542fe3ba8f85076c6b6cb1e98a4614a0c6
reformat JobGroup to match other tables
pyfarm/pyfarm-master,pyfarm/pyfarm-master,pyfarm/pyfarm-master
pyfarm/models/jobgroup.py
pyfarm/models/jobgroup.py
# No shebang line, this module is meant to be imported # # Copyright 2015 Ambient Entertainment GmbH & Co. KG # Copyright 2015 Oliver Palmer # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Job Group Model =============== Model for job groups """ from pyfarm.master.application import db from pyfarm.models.core.cfg import ( TABLE_JOB_GROUP, TABLE_JOB_TYPE, TABLE_USER, MAX_JOBGROUP_NAME_LENGTH) from pyfarm.models.core.mixins import UtilityMixins from pyfarm.models.core.types import id_column, IDTypeWork class JobGroup(db.Model, UtilityMixins): """ Used to group jobs together for better presentation in the UI """ __tablename__ = TABLE_JOB_GROUP id = id_column(IDTypeWork) title = db.Column( db.String(MAX_JOBGROUP_NAME_LENGTH), nullable=False, doc="The title of the job group's name" ) main_jobtype_id = db.Column( IDTypeWork, db.ForeignKey("%s.id" % TABLE_JOB_TYPE), nullable=False, doc="ID of the jobtype of the main job in this " "group. Purely for display and filtering.") user_id = db.Column( db.Integer, db.ForeignKey("%s.id" % TABLE_USER), doc="The id of the user who owns these jobs" ) # # Relationships # main_jobtype = db.relationship( "JobType", backref=db.backref("jobgroups", lazy="dynamic"), doc="The jobtype of the main job in this group") user = db.relationship( "User", backref=db.backref("jobgroups", lazy="dynamic"), doc="The user who owns these jobs" )
# No shebang line, this module is meant to be imported # # Copyright 2015 Ambient Entertainment GmbH & Co. KG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Job Group Model =============== Model for job groups """ from pyfarm.master.application import db from pyfarm.models.core.cfg import ( TABLE_JOB_GROUP, TABLE_JOB_TYPE, TABLE_USER, MAX_JOBGROUP_NAME_LENGTH) from pyfarm.models.core.mixins import UtilityMixins from pyfarm.models.core.types import id_column, IDTypeWork class JobGroup(db.Model, UtilityMixins): """ Used to group jobs together for better presentation in the UI """ __tablename__ = TABLE_JOB_GROUP id = id_column(IDTypeWork) title = db.Column(db.String(MAX_JOBGROUP_NAME_LENGTH), nullable=False) main_jobtype_id = db.Column(IDTypeWork, db.ForeignKey("%s.id" % TABLE_JOB_TYPE), nullable=False, doc="ID of the jobtype of the main job in this " "group. Purely for display and " "filtering.") user_id = db.Column(db.Integer, db.ForeignKey("%s.id" % TABLE_USER), doc="The id of the user who owns these jobs") main_jobtype = db.relationship("JobType", backref=db.backref("jobgroups", lazy="dynamic"), doc="The jobtype of the main job in this " "group") user = db.relationship("User", backref=db.backref("jobgroups", lazy="dynamic"), doc="The user who owns these jobs")
apache-2.0
Python
f4106e3025c5dbb3136db94081b9998a052c8e70
Bump version to 2.0.0-alpha2
pyQode/pyqode.python,pyQode/pyqode.python,zwadar/pyqode.python,mmolero/pyqode.python
pyqode/python/__init__.py
pyqode/python/__init__.py
# -*- coding: utf-8 -*- """ pyqode.python is an extension of pyqode.core that brings support for the python programming language. It does so by providing a set of additional modes and panels for the frontend and by supplying dedicated workers for the backend. """ __version__ = "2.0.0-alpha2"
# -*- coding: utf-8 -*- """ pyqode.python is an extension of pyqode.core that brings support for the python programming language. It does so by providing a set of additional modes and panels for the frontend and by supplying dedicated workers for the backend. """ __version__ = "2.0.0-alpha1"
mit
Python
d96aac74b32a166ec724234540dc93a8ea526a3f
fix test error in windows
PyThaiNLP/pythainlp
pythainlp/tag/__init__.py
pythainlp/tag/__init__.py
# -*- coding: utf-8 -*- # TODO ปรับ API ให้เหมือน nltk from __future__ import absolute_import,division,print_function,unicode_literals import sys def pos_tag(text,engine='old'): """ ระบบ postaggers pos_tag(text,engine='old') engine ที่รองรับ * old เป็น UnigramTagger * artagger เป็น RDR POS Tagger """ if engine=='old': from .old import tag elif engine=='artagger': if sys.version_info < (3,4): sys.exit('Sorry, Python < 3.4 is not supported') def tag(text1): try: from artagger import Tagger except ImportError: import pip pip.main(['install','https://github.com/wannaphongcom/artagger/archive/master.zip']) try: from artagger import Tagger except ImportError: print("Error ! using 'pip install https://github.com/wannaphongcom/artagger/archive/master.zip'") sys.exit(0) tagger = Tagger() words = tagger.tag(' '.join(text1)) totag=[] for word in words: totag.append((word.word, word.tag)) return totag return tag(text)
# -*- coding: utf-8 -*- # TODO ปรับ API ให้เหมือน nltk from __future__ import absolute_import,division,print_function,unicode_literals import sys def pos_tag(text,engine='old'): """ ระบบ postaggers pos_tag(text,engine='old') engine ที่รองรับ * old เป็น UnigramTagger * artagger เป็น RDR POS Tagger """ if engine=='old': from .old import tag elif engine=='artagger': if sys.version_info < (3,4): sys.exit('Sorry, Python < 3.4 is not supported') def tag(text1): try: from artagger import Tagger except ImportError: import pip pip.main(['install','https://github.com/franziz/artagger/archive/master.zip']) try: from artagger import Tagger except ImportError: print("Error ! using 'pip install https://github.com/franziz/artagger/archive/master.zip'") sys.exit(0) tagger = Tagger() words = tagger.tag(' '.join(text1)) totag=[] for word in words: totag.append((word.word, word.tag)) return totag return tag(text)
apache-2.0
Python
adb0c2bd97c6c4ca7272d764b669cef90f81a5bb
Allow non-dev logins to dev builds
mrozekma/Sprint,mrozekma/Sprint,mrozekma/Sprint
handlers/login.py
handlers/login.py
from rorn.Box import LoginBox, ErrorBox, WarningBox, SuccessBox from rorn.Session import delay from User import User from Button import Button from LoadValues import isDevMode from Event import Event from utils import * @get('login') def login(handler, request): handler.title('Login') if handler.session['user']: print WarningBox('Logged In', 'You are already logged in as %s' % handler.session['user']) else: print LoginBox() @post('login') def loginPost(handler, request, p_username, p_password): handler.title('Login') user = User.load(username = p_username, password = User.crypt(p_username, p_password)) if user: if not user.hasPrivilege('User'): Event.login(handler, user, False, "Account disabled") delay(handler, ErrorBox("Login Failed", "Your account has been disabled")) redirect('/') if user.resetkey: user.resetkey = None user.save() handler.session['user'] = user Event.login(handler, user, True) delay(handler, SuccessBox("Login Complete", "Logged in as %s" % user, close = True)) redirect('/') else: Event.login(handler, None, False, "Failed login for %s" % p_username) delay(handler, ErrorBox("Login Failed", "Invalid username/password combination")) redirect('/') @get('logout') def logout(handler, request): print "<form method=\"post\" action=\"/logout\">" print Button('Logout', type = 'submit').negative() print "</form>" @post('logout') def logoutPost(handler, request): if handler.session['user']: del handler.session['user'] if 'impersonator' in handler.session: del handler.session['impersonator'] redirect('/') else: print ErrorBox("Logout Failed", "You are not logged in")
from rorn.Box import LoginBox, ErrorBox, WarningBox, SuccessBox from rorn.Session import delay from User import User from Button import Button from LoadValues import isDevMode from Event import Event from utils import * @get('login') def login(handler, request): handler.title('Login') if handler.session['user']: print WarningBox('Logged In', 'You are already logged in as %s' % handler.session['user']) else: print LoginBox() @post('login') def loginPost(handler, request, p_username, p_password): handler.title('Login') user = User.load(username = p_username, password = User.crypt(p_username, p_password)) if user: if not user.hasPrivilege('User'): Event.login(handler, user, False, "Account disabled") delay(handler, ErrorBox("Login Failed", "Your account has been disabled")) redirect('/') elif isDevMode() and not user.hasPrivilege('Dev'): Event.login(handler, user, False, "Non-dev login blocked") delay(handler, ErrorBox("Login Failed", "This is a development build")) redirect('/') if user.resetkey: user.resetkey = None user.save() handler.session['user'] = user Event.login(handler, user, True) delay(handler, SuccessBox("Login Complete", "Logged in as %s" % user, close = True)) redirect('/') else: Event.login(handler, None, False, "Failed login for %s" % p_username) delay(handler, ErrorBox("Login Failed", "Invalid username/password combination")) redirect('/') @get('logout') def logout(handler, request): print "<form method=\"post\" action=\"/logout\">" print Button('Logout', type = 'submit').negative() print "</form>" @post('logout') def logoutPost(handler, request): if handler.session['user']: del handler.session['user'] if 'impersonator' in handler.session: del handler.session['impersonator'] redirect('/') else: print ErrorBox("Logout Failed", "You are not logged in")
mit
Python
64713296cf4f4f3772a1ac23248d4fb930ee23ff
Bump to 0.3
cogniteev/python-gdrive
python_gdrive/__init__.py
python_gdrive/__init__.py
from client import GoogleDrive __version__ = '0.3'
from client import GoogleDrive __version__ = '0.3-dev'
apache-2.0
Python
3475aee89ef5b22a92a674400ea37430f8255924
handle Appengine Datastore Key Type
hudora/huTools
huTools/hujson.py
huTools/hujson.py
#!/usr/bin/env python # encoding: utf-8 """ hujson.py - extended json - tries to be compatible with simplejson hujson can encode additional types like decimal and datetime into valid json. All the heavy lifting is done by John Millikin's `jsonlib`, see https://launchpad.net/jsonlib Created by Maximillian Dornseif on 2010-09-10. Copyright (c) 2010 HUDORA. All rights reserved. """ from _jsonlib import UnknownSerializerError import _jsonlib import datetime def _unknown_handler(value): if isinstance(value, datetime.date): return str(value) elif isinstance(value, datetime.datetime): return value.isoformat() + 'Z' elif hasattr(value, 'properties'): return dict([(key, getattr(value, key)) for key in value.properties().keys()]) elif 'google.appengine.api.users.User' in str(type(value)): return "%s/%s" % (value.user_id(), value.email()) elif 'google.appengine.api.datastore_types.Key' in str(type(value)): return str(value) raise UnknownSerializerError("%s(%s)" % (type(value), value)) def dumps(val): return _jsonlib.write(val, on_unknown=_unknown_handler, indent=' ') def loads(data): return _jsonlib.read(data)
#!/usr/bin/env python # encoding: utf-8 """ hujson.py - extended json - tries to be compatible with simplejson hujson can encode additional types like decimal and datetime into valid json. All the heavy lifting is done by John Millikin's `jsonlib`, see https://launchpad.net/jsonlib Created by Maximillian Dornseif on 2010-09-10. Copyright (c) 2010 HUDORA. All rights reserved. """ from _jsonlib import UnknownSerializerError import _jsonlib import datetime def _unknown_handler(value): if isinstance(value, datetime.date): return str(value) elif isinstance(value, datetime.datetime): return value.isoformat() + 'Z' elif hasattr(value, 'properties'): return dict([(key, getattr(value, key)) for key in value.properties().keys()]) elif 'google.appengine.api.users.User' in str(type(value)): return "%s/%s" % (value.user_id(), value.email()) raise UnknownSerializerError("%s(%s)" % (type(value), value)) def dumps(val): return _jsonlib.write(val, on_unknown=_unknown_handler, indent=' ') def loads(data): return _jsonlib.read(data)
bsd-3-clause
Python
530844a16a573ab49850a22631f97d8ad89465c9
Clean Up NLU state
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
sara_flexbe_states/src/sara_flexbe_states/sara_nlu_spr.py
sara_flexbe_states/src/sara_flexbe_states/sara_nlu_spr.py
#!/usr/bin/env python # encoding=utf8 from __future__ import print_function from flexbe_core import EventState, Logger import rospy from wm_nlu.srv import AnswerQuestion from std_msgs.msg import String class SaraNLUspr(EventState): ''' Use wm_nlu to parse a sentence and return the answer. ># sentence string sentence to parse #> answer string answer <= understood Finished job. <= not_understood Finished job but no commands detected. <= fail service unavailable. ''' def __init__(self): # See example_state.py for basic explanations. super(SaraNLUspr, self).__init__(outcomes=['understood', 'not_understood', 'fail'], input_keys=['sentence'], output_keys=['answer']) serviceName = "/answer_question" Logger.loginfo("waiting forservice: " + serviceName) rospy.wait_for_service(serviceName) self.service = rospy.ServiceProxy(serviceName, AnswerQuestion) def execute(self, userdata): # Call the NLU service response = self.service(String(userdata.sentence)) # Checking the validity of the response if response.str.data is "": userdata.answer = response.str.data return "fail" userdata.answer = response.str.data return "understood"
#!/usr/bin/env python # encoding=utf8 from __future__ import print_function from flexbe_core import EventState, Logger import rospy import re from wm_nlu.srv import AnswerQuestion from std_msgs.msg import String class SaraNLUspr(EventState): ''' Use wm_nlu to parse a sentence and return the detected actions in a standard format (ActionForm) ># sentence string sentence to parse #> ActionForms string[] list of ActionForms <= understood Finished job. <= not_understood Finished job but no commands detected. <= fail service unavailable. ''' def __init__(self): # See example_state.py for basic explanations. super(SaraNLUspr, self).__init__(outcomes=['understood', 'not_understood', 'fail'], input_keys=['sentence'], output_keys=['answer']) self.RecurentSubject = None self.Person = None self.serviceName = "/answer_question" Logger.loginfo("waiting forservice: " + self.serviceName) rospy.wait_for_service(self.serviceName) def execute(self, userdata): # Call the NLU service serv = rospy.ServiceProxy(self.serviceName, AnswerQuestion) Resp = serv(String(userdata.sentence)) # Checking the validity of the responce if Resp.str.data is "": userdata.answer = Resp.str return "fail" userdata.answer = Resp.str return "understood" def on_enter(self, userdata): Logger.loginfo('Enter SaraNLU')
bsd-3-clause
Python
142d3ebf66e31aad2363fc0c421dc573dc9b1157
Simplify current_service() function
scikit-build/scikit-ci
ci/utils.py
ci/utils.py
# -*- coding: utf-8 -*- """This module defines functions generally useful in scikit-ci.""" import os try: from .constants import SERVICES, SERVICES_ENV_VAR except (SystemError, ValueError): from constants import SERVICES, SERVICES_ENV_VAR def current_service(): for service, env_var in SERVICES_ENV_VAR.items(): if os.environ.get(env_var, 'false').lower() == 'true': return service raise LookupError( "unknown service: None of the environment variables {} are set " "to 'true' or 'True'".format(", ".join(SERVICES_ENV_VAR.values())) ) def current_operating_system(service): return os.environ[SERVICES[service]] if SERVICES[service] else None
# -*- coding: utf-8 -*- """This module defines functions generally useful in scikit-ci.""" import os try: from .constants import SERVICES, SERVICES_ENV_VAR except (SystemError, ValueError): from constants import SERVICES, SERVICES_ENV_VAR def current_service(): for service in SERVICES.keys(): if os.environ.get( SERVICES_ENV_VAR[service], 'false').lower() == 'true': return service raise LookupError( "unknown service: None of the environment variables {} are set " "to 'true' or 'True'".format(", ".join(SERVICES_ENV_VAR.values())) ) def current_operating_system(service): return os.environ[SERVICES[service]] if SERVICES[service] else None
apache-2.0
Python
89766874e7ef17bdce4cfa7cae9898336928c19e
Remove satellites from JSON
BrodaNoel/bropy,BrodaNoel/bropy
modules/gy-gps6mv1/core/get.py
modules/gy-gps6mv1/core/get.py
#! /usr/bin/python # Written by Dan Mandle http://dan.mandle.me September 2012 # Modified by Broda Noel @brodanoel (in all social networks) # License: GPL 2.0 from gps import * from time import * import time import threading import sys gpsd = None #seting the global variable class GpsPoller(threading.Thread): def __init__(self): threading.Thread.__init__(self) global gpsd #bring it in scope gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info self.current_value = None self.running = True #setting the thread running to true def run(self): global gpsd while gpsp.running: gpsd.next() #this will continue to loop and grab EACH set of gpsd info to clear the buffer if __name__ == '__main__': gpsp = GpsPoller() # create the thread try: gpsp.start() # start it up attempts = 0 gotData = False while gotData == False and attempts < 3: #It may take a second or two to get good data if gpsd.fix.latitude != 0 or gpsd.fix.longitude != 0: gotData = True attempts += 1 print '{' print 'latitude:', gpsd.fix.latitude, ',' print 'longitude:', gpsd.fix.longitude, ',' print 'time:', gpsd.fix.time, ',' print 'utcTime:', gpsd.utc, ',' print 'altitude:', gpsd.fix.altitude, ',' print 'eps:', gpsd.fix.eps, ',' print 'epx:', gpsd.fix.epx, ',' print 'epv:', gpsd.fix.epv, ',' print 'ept:', gpsd.fix.ept, ',' print 'speed:', gpsd.fix.speed, ',' print 'climb:', gpsd.fix.climb, ',' print 'track:', gpsd.fix.track, ',' print 'mode:', gpsd.fix.mode, ',' #print 'satellites:', gpsd.satellites print '}' sys.exit() else: time.sleep(1) #set to whatever except (KeyboardInterrupt, SystemExit): #when you press ctrl+c gpsp.running = False gpsp.join() # wait for the thread to finish what it's doing
#! /usr/bin/python # Written by Dan Mandle http://dan.mandle.me September 2012 # Modified by Broda Noel @brodanoel (in all social networks) # License: GPL 2.0 from gps import * from time import * import time import threading import sys gpsd = None #seting the global variable class GpsPoller(threading.Thread): def __init__(self): threading.Thread.__init__(self) global gpsd #bring it in scope gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info self.current_value = None self.running = True #setting the thread running to true def run(self): global gpsd while gpsp.running: gpsd.next() #this will continue to loop and grab EACH set of gpsd info to clear the buffer if __name__ == '__main__': gpsp = GpsPoller() # create the thread try: gpsp.start() # start it up attempts = 0 gotData = False while gotData == False and attempts < 3: #It may take a second or two to get good data if gpsd.fix.latitude != 0 or gpsd.fix.longitude != 0: gotData = True attempts += 1 print '{' print 'latitude:', gpsd.fix.latitude, ',' print 'longitude:', gpsd.fix.longitude, ',' print 'time:', gpsd.fix.time, ',' print 'utcTime:', gpsd.utc, ',' print 'altitude:', gpsd.fix.altitude, ',' print 'eps:', gpsd.fix.eps, ',' print 'epx:', gpsd.fix.epx, ',' print 'epv:', gpsd.fix.epv, ',' print 'ept:', gpsd.fix.ept, ',' print 'speed:', gpsd.fix.speed, ',' print 'climb:', gpsd.fix.climb, ',' print 'track:', gpsd.fix.track, ',' print 'mode:', gpsd.fix.mode, ',' print 'satellites:', gpsd.satellites print '}' sys.exit() else: time.sleep(1) #set to whatever except (KeyboardInterrupt, SystemExit): #when you press ctrl+c gpsp.running = False gpsp.join() # wait for the thread to finish what it's doing
mit
Python
3685715cd260f4f5ca392caddf7fb0c01af9ebcc
Add in comments for orgs and places too, remove limit
mysociety/pombola,Hutspace/odekro,ken-muturi/pombola,mysociety/pombola,Hutspace/odekro,patricmutwiri/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,Hutspace/odekro,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,hzj123/56th,Hutspace/odekro,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,Hutspace/odekro,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th
mzalendo/comments2/feeds.py
mzalendo/comments2/feeds.py
from disqus.wxr_feed import ContribCommentsWxrFeed # from comments2.models import Comment from core.models import Person, Place, Organisation # http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format class CommentWxrFeed(ContribCommentsWxrFeed): link = "/" def items(self): list = [] list.extend( Person.objects.all() ) list.extend( Organisation.objects.all() ) list.extend( Place.objects.all() ) return list def item_pubdate(self, item): return item.created def item_description(self, item): return str(item) def item_guid(self, item): # set to none so that the output dsq:thread_identifier is empty return None def item_comments(self, item): return item.comments.all() def comment_user_name(self, comment): return str(comment.user) def comment_user_email(self, comment): return comment.user.email or str(comment.id) + '@bogus-email-address.com' def comment_user_url(self, comment): return None def comment_is_approved(self, comment): return 1
from disqus.wxr_feed import ContribCommentsWxrFeed # from comments2.models import Comment from core.models import Person # http://help.disqus.com/customer/portal/articles/472150-custom-xml-import-format class CommentWxrFeed(ContribCommentsWxrFeed): link = "/" def items(self): return Person.objects.all()[:5] # remove [:5] before generating full dump def item_pubdate(self, item): return item.created def item_description(self, item): return str(item) def item_guid(self, item): # set to none so that the output dsq:thread_identifier is empty return None def item_comments(self, item): return item.comments.all() def comment_user_name(self, comment): return str(comment.user) def comment_user_email(self, comment): return comment.user.email or str(comment.id) + '@bogus-email-address.com' def comment_user_url(self, comment): return None def comment_is_approved(self, comment): return 1
agpl-3.0
Python
fe998a48be769f6a957611584145706b71385cc9
Fix airflow jobs check cmd for TriggererJob (#19185)
nathanielvarona/airflow,lyft/incubator-airflow,cfei18/incubator-airflow,apache/incubator-airflow,danielvdende/incubator-airflow,bolkedebruin/airflow,bolkedebruin/airflow,nathanielvarona/airflow,apache/airflow,Acehaidrey/incubator-airflow,bolkedebruin/airflow,Acehaidrey/incubator-airflow,Acehaidrey/incubator-airflow,apache/incubator-airflow,Acehaidrey/incubator-airflow,danielvdende/incubator-airflow,cfei18/incubator-airflow,apache/airflow,nathanielvarona/airflow,lyft/incubator-airflow,danielvdende/incubator-airflow,lyft/incubator-airflow,mistercrunch/airflow,danielvdende/incubator-airflow,mistercrunch/airflow,cfei18/incubator-airflow,nathanielvarona/airflow,mistercrunch/airflow,nathanielvarona/airflow,danielvdende/incubator-airflow,apache/airflow,nathanielvarona/airflow,apache/airflow,apache/incubator-airflow,cfei18/incubator-airflow,bolkedebruin/airflow,apache/airflow,lyft/incubator-airflow,danielvdende/incubator-airflow,apache/airflow,cfei18/incubator-airflow,bolkedebruin/airflow,mistercrunch/airflow,Acehaidrey/incubator-airflow,Acehaidrey/incubator-airflow,apache/incubator-airflow,cfei18/incubator-airflow
airflow/jobs/__init__.py
airflow/jobs/__init__.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import airflow.jobs.backfill_job import airflow.jobs.base_job import airflow.jobs.local_task_job import airflow.jobs.scheduler_job import airflow.jobs.triggerer_job # noqa
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import airflow.jobs.backfill_job import airflow.jobs.base_job import airflow.jobs.local_task_job import airflow.jobs.scheduler_job # noqa
apache-2.0
Python
d266de64cbcc7ed8672e9bb61cdb966870fccfdc
Use random.choice() & reduce len() duplication
bowen0701/algorithms_data_structures
alg_percentile_select.py
alg_percentile_select.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import random def percentile_select(ls, k): """Kth percentile selection algorithm. Just select the kth element, without caring about the relative ordering of the rest of them. The algorithm performs in place without allocating new memory for the three sublists using three pointers. Time complexity: O(n). """ v = random.choice(ls) idx_eq_v = [i for i, a in enumerate(ls) if a == v] idx_le_v = [i for i, a in enumerate(ls) if a < v] idx_ge_v = [i for i, a in enumerate(ls) if a > v] n_le = len(idx_le_v) n_eq = len(idx_eq_v) if k <= n_le: le_v_ls = [ls[idx] for idx in idx_le_v] return percentile_select(le_v_ls, k) elif n_le < k <= n_le + n_eq: return v elif k > n_le + n_eq: ge_v_ls = [ls[idx] for idx in idx_ge_v] return percentile_select(ge_v_ls, k - n_le - n_eq) def main(): n = 100 ls = range(n) random.shuffle(ls) print('Get median by selection:') print(percentile_select(ls, n // 2)) print('Get min by selection:') print(percentile_select(ls, 1)) print('Get max by selection:') print(percentile_select(ls, n)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function import random def percentile_select(ls, k): """Kth percentile selection algorithm. Just select the kth element, without caring about the relative ordering of the rest of them. The algorithm performs in place without allocating new memory for the three sublists using three pointers. Time complexity: O(n). """ v = random.sample(ls, 1)[0] idx_eq_v = [i for i, a in enumerate(ls) if a == v] idx_le_v = [i for i, a in enumerate(ls) if a < v] idx_ge_v = [i for i, a in enumerate(ls) if a > v] if k <= len(idx_le_v): le_v_ls = [ls[idx] for idx in idx_le_v] return percentile_select(le_v_ls, k) elif len(idx_le_v) < k <= len(idx_le_v) + len(idx_eq_v): return v elif k > len(idx_le_v) + len(idx_eq_v): ge_v_ls = [ls[idx] for idx in idx_ge_v] return percentile_select(ge_v_ls, k - len(idx_le_v) - len(idx_eq_v)) def main(): n = 100 ls = range(n) random.shuffle(ls) print('List: {}'.format(ls)) print('Get median by selection:') print(percentile_select(ls, n // 2)) print('Get min by selection:') print(percentile_select(ls, 1)) print('Get max by selection:') print(percentile_select(ls, n)) if __name__ == '__main__': main()
bsd-2-clause
Python
d1c16f90ca86bc1bd11a81f021d8317a82902a69
print annotation
varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor
ui/app/models.py
ui/app/models.py
from . import db class Spans(db.Model): __tablename__ = 'zipkin_spans' span_id = db.Column(db.Integer) parent_id = db.Column(db.Integer) trace_id = db.Column(db.Integer) span_name = db.Column(db.String(255)) debug = db.Column(db.Integer) duration = db.Column(db.Integer) created_ts = db.Column(db.Integer) def __repr__(self): return '<Span %r>' % self.span_name class Annotations(db.Model): __tablename__ = 'zipkin_annotations' span_id = db.Column(db.Integer) trace_id = db.Column(db.Integer) span_name = db.Column(db.String(255)) service_name = db.Column(db.String(255)) value = db.Column(db.Text) ipv4 = db.Column(db.Integer) port = db.Column(db.Integer) a_timestamp = db.Column(db.Integer) duration = db.Column(db.Integer) def __repr__(self): return '<Annotation %r - %r>' % (self.span_name, self.service_name)
from . import db class Spans(db.Model): __tablename__ = 'zipkin_spans' span_id = db.Column(db.Integer) parent_id = db.Column(db.Integer) trace_id = db.Column(db.Integer) span_name = db.Column(db.String(255)) debug = db.Column(db.Integer) duration = db.Column(db.Integer) created_ts = db.Column(db.Integer) def __repr__(self): return '<Span %r>' % self.span_name class Annotations(db.Model): __tablename__ = 'zipkin_annotations' span_id = db.Column(db.Integer) trace_id = db.Column(db.Integer) span_name = db.Column(db.String(255)) service_name = db.Column(db.String(255)) value = db.Column(db.Text) ipv4 = db.Column(db.Integer) port = db.Column(db.Integer) a_timestamp = db.Column(db.Integer) duration = db.Column(db.Integer)
bsd-2-clause
Python
b38555ff465f59333f32c2bb556f6b7a236e288b
disable traceview for now
rsalmond/seabus,rsalmond/seabus,rsalmond/seabus,rsalmond/seabus,rsalmond/seabus
seabus/web/web.py
seabus/web/web.py
from flask import Flask import oboe from oboeware import OboeMiddleware from seabus.web.blueprint import blueprint from seabus.common.database import db from seabus.web.socketio import socketio def create_app(config=None): app = Flask(__name__) if config is not None: app.config.from_object('seabus.web.config.{}'.format(config)) else: app.config.from_object('seabus.web.config.Dev') socketio.init_app(app) app.register_blueprint(blueprint) db.init_app(app) tv_app = OboeMiddleware(app) return app
from flask import Flask import oboe from oboeware import OboeMiddleware from seabus.web.blueprint import blueprint from seabus.common.database import db from seabus.web.socketio import socketio def create_app(config=None): app = Flask(__name__) if config is not None: app.config.from_object('seabus.web.config.{}'.format(config)) else: app.config.from_object('seabus.web.config.Dev') socketio.init_app(app) app.register_blueprint(blueprint) db.init_app(app) #TODO: tv_app = OboeMiddleware(app) return app
mit
Python
b5241e62cb7cc09b5d469f1cf3908fa1d7cedc21
Tweak the settings.
openspending/gobble
gobble/settings.py
gobble/settings.py
"""User configurable settings""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from future import standard_library standard_library.install_aliases() from os import getenv from logging import DEBUG, INFO from os.path import expanduser, join, abspath _home = abspath(join(expanduser('~'))) class Production(object): JSON_INDENT = 4 EXPANDED_LOG_STYLE = True CONSOLE_LOG_LEVEL = DEBUG FILE_LOG_LEVEL = DEBUG FILE_LOG_FORMAT = '[%(asctime)s] [%(module)s] [%(levelname)s] %(message)s' CONSOLE_LOG_FORMAT = '[%(name)s] [%(levelname)s] %(message)s' OS_URL = 'http://next.openspending.org' DATAPACKAGE_DETECTION_THRESHOLD = 1 VALIDATION_FEEDBACK_OPTIONS = ['message'] DATAFILE_HASHING_BLOCK_SIZE = 65536 CONFIG_DIR = join(_home, '.gobble') CONFIG_FILE = join(_home, '.gobble', 'settings.json') TOKEN_FILE = join(_home, '.gobble', 'token.json') LOG_FILE = join(_home, '.gobble', 'user.log') MOCK_REQUESTS = False LOCALHOST = ('127.0.0.1', 8001) class Development(Production): CONSOLE_LOG_LEVEL = DEBUG FILE_LOG_LEVEL = None LOG_FILE = None OS_URL = 'http://dev.openspending.org' CONFIG_DIR = join(_home, '.gobble.dev') CONFIG_FILE = join(_home, '.gobble.dev', 'config.json') TOKEN_FILE = join(_home, '.gobble.dev', 'token.json') MOCK_REQUESTS = bool(getenv('GOBBLE_MOCK_REQUESTS', False)) CONSOLE_LOG_FORMAT = ('[%(name)s] ' '[%(asctime)s] ' '[%(module)s] ' '[%(funcName)s] ' '[%(lineno)d] ' '[%(levelname)s] ' '%(message)s') class Testing(Production): MOCK_REQUESTS = True
"""User configurable settings""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from future import standard_library standard_library.install_aliases() from os import getenv from logging import DEBUG, INFO from os.path import expanduser, join, abspath _home = abspath(join(expanduser('~'))) class Production(object): CONSOLE_LOG_LEVEL = INFO FILE_LOG_LEVEL = DEBUG FILE_LOG_FORMAT = '[%(asctime)s] [%(module)s] [%(levelname)s] %(message)s' CONSOLE_LOG_FORMAT = '[%(name)s] [%(module)s] [%(levelname)s] %(message)s' OS_URL = 'http://next.openspending.org' DATAPACKAGE_DETECTION_THRESHOLD = 1 VALIDATION_FEEDBACK_OPTIONS = ['message'] DATAFILE_HASHING_BLOCK_SIZE = 65536 CONFIG_DIR = join(_home, '.gobble') CONFIG_FILE = join(_home, '.gobble', 'settings.json') TOKEN_FILE = join(_home, '.gobble', 'token.json') LOG_FILE = join(_home, '.gobble', 'user.log') MOCK_REQUESTS = False LOCALHOST = ('127.0.0.1', 8001) class Development(Production): CONSOLE_LOG_LEVEL = DEBUG FILE_LOG_LEVEL = None LOG_FILE = None OS_URL = 'http://dev.openspending.org' CONFIG_DIR = join(_home, '.gobble.dev') CONFIG_FILE = join(_home, '.gobble.dev', 'config.json') TOKEN_FILE = join(_home, '.gobble.dev', 'token.json') MOCK_REQUESTS = bool(getenv('GOBBLE_MOCK_REQUESTS', False)) class Testing(Production): MOCK_REQUESTS = True
mit
Python
2050017ced613f5c0282dcfaf07494b8dbcc8e41
Update ipc_lista2.05.py
any1m1c/ipc20161
lista2/ipc_lista2.05.py
lista2/ipc_lista2.05.py
#ipc_lista2.05 #Professor: Jucimar Junior #Any Mendes Carvalho - 1615310044 # # # # #Faça um programa para a leitura de duas notas parciais de um aluno. O programa deve calcular a média alcançada por aluno e apresentar: #--A mensagem "Aprovado", se a média alcançada for maior ou igual a sete; #--A mensagem "Reprovado", se a média for menor que sete; #--A mensagem "Aprovado com Distincao", se a média for igual a dez. n1 = int(input("Insira a primeira nota: ")) n2 = int(input("Insira a segunda nota: ")) media = (n1+n2)/2
#ipc_lista2.05 #Professor: Jucimar Junior #Any Mendes Carvalho - 1615310044 # # # # #Faça um programa para a leitura de duas notas parciais de um aluno. O programa deve calcular a média alcançada por aluno e apresentar: #--A mensagem "Aprovado", se a média alcançada for maior ou igual a sete; #--A mensagem "Reprovado", se a média for menor que sete; #--A mensagem "Aprovado com Distincao", se a média for igual a dez. n1 = int(input("Insira a primeira nota: ")) n2 = int(input("Insira a segunda nota: ")) media = (n1+n2)
apache-2.0
Python
f3da1fab9af2279182a09922aae00fcee73a92ee
Fix imports for Django >= 1.6
andialbrecht/django-goog
goog/middleware.py
goog/middleware.py
from django.conf import settings try: from django.conf.urls.defaults import patterns, include except ImportError: # Django >= 1.6 from django.conf.urls import patterns, include import goog.urls from goog import utils class GoogDevelopmentMiddleware(object): def devmode_enabled(self, request): """Returns True iff the devmode is enabled.""" return utils.is_devmode() def process_request(self, request): # This urlconf patching is inspired by debug_toolbar. # https://github.com/robhudson/django-debug-toolbar if self.devmode_enabled(request): original_urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) if original_urlconf != 'goog.urls': goog.urls.urlpatterns += patterns( '', ('', include(original_urlconf)), ) request.urlconf = 'goog.urls'
from django.conf import settings from django.conf.urls.defaults import patterns, include import goog.urls from goog import utils class GoogDevelopmentMiddleware(object): def devmode_enabled(self, request): """Returns True iff the devmode is enabled.""" return utils.is_devmode() def process_request(self, request): # This urlconf patching is inspired by debug_toolbar. # https://github.com/robhudson/django-debug-toolbar if self.devmode_enabled(request): original_urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) if original_urlconf != 'goog.urls': goog.urls.urlpatterns += patterns( '', ('', include(original_urlconf)), ) request.urlconf = 'goog.urls'
bsd-3-clause
Python
91178909bab31e9db42d86d5783152890f65795d
update cms
hayashizakitakaaki/Introduction_mysite,hayashizakitakaaki/Introduction_mysite,hayashizakitakaaki/Introduction_mysite
cms/urls.py
cms/urls.py
from django.conf.urls import url from cms import views from django.contrib.auth import views as auth_views urlpatterns = [ # 一覧 url(r'^dailyreport/$', views.daily_list, name='daily_list'), # 日報操作 url(r'^dailyreport/add/$', views.daily_edit, name='daily_add'), # 登録 url(r'^dailyreport/mod/(?P<daily_id>\d+)/$', views.daily_edit, name='daily_mod'), # 修正 url(r'^dailyreport/del/(?P<daily_id>\d+)/$', views.daily_del, name='daily_del'), # 削除 # コメント操作 url(r'^dailyreport/comment/add/(?P<daily_id>\d+)/$', views.comment_edit, name='comment_add'), # 登録 url(r'^dailyreport/comment/mod/(?P<daily_id>\d+)/(?P<comment_id>\d+)/$', views.comment_edit, name='comment_mod'), # 修正 # 詳細 url(r'^dailyreport/detail/(?P<pk>\d+)/$', views.daily_detail.as_view(), name='daily_detail'), # 削除 ]
from django.conf.urls import url from cms import views from django.contrib.auth import views as auth_views urlpatterns = [ # 一覧 url(r'^dailyreport/$', views.daily_list, name='daily_list'), # 日報操作 url(r'^dailyreport/add/$', views.daily_edit, name='daily_add'), # 登録 url(r'^dailyreport/mod/(?P<daily_id>\d+)/$', views.daily_edit, name='daily_mod'), # 修正 url(r'^dailyreport/del/(?P<daily_id>\d+)/$', views.daily_del, name='daily_del'), # 削除 # コメント操作 url(r'^dailyreport/comment/add/(?P<daily_id>\d+)/$', views.comment_edit, name='comment_add'), # 登録 url(r'^dailyreport/comment/mod/(?P<daily_id>\d+)/(?P<impression_id>\d+)/$', views.comment_edit, name='comment_mod'), # 修正 # 詳細 url(r'^dailyreport/detail/(?P<daily_id>\d+)/$', views.daily_detail.as_view, name='daily_detail'), # 削除 ]
mit
Python
1212966326eb096e10b52277b0c6b53126262e3b
Improve messages in example
tysonholub/twilio-python,twilio/twilio-python
examples/basic_usage.py
examples/basic_usage.py
import os from twilio.twiml import Response from twilio.rest import Client ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID') AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN') def example(): """ Some example usage of different twilio resources. """ client = Client(ACCOUNT_SID, AUTH_TOKEN) # Get all messages all_messages = client.messages.list() print('There are {} messages in your account.'.format(len(all_messages))) # Get only last 10 messages... some_messages = client.messages.list(limit=10) print('Here are the last 10 messages in your account:') for m in some_messages: print(m) # Get messages in smaller pages... all_messages = client.messages.list(page_size=10) print('There are {} messages in your account.'.format(len(all_messages))) print('Sending a message...') new_message = client.messages.create(to='XXXX', from_='YYYY', body='Twilio rocks!') print('Making a call...') new_call = client.calls.create(to='XXXX', from_='YYYY', method='GET') print('Serving TwiML') twiml_response = Response() twiml_response.say('Hello!') twiml_response.hangup() twiml_xml = twiml_response.toxml() print('Generated twiml: {}'.format(twiml_xml)) if __name__ == '__main__': example()
import os from twilio.twiml import Response from twilio.rest import Client ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID') AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN') def example(): """ Some example usage of different twilio resources. """ client = Client(ACCOUNT_SID, AUTH_TOKEN) print('Get all the messages...') all_messages = client.messages.list() print('There are {} messages in your account.'.format(len(all_messages))) print('Get only last 10 messages...') some_messages = client.messages.list(limit=10) print('Get messages in smaller pages...') some_messages = client.messages.list(page_size=10) print('Sending a message...') new_message = client.messages.create(to='XXXX', from_='YYYY', body='Twilio rocks!') print('Making a call...') new_call = client.calls.create(to='XXXX', from_='YYYY', method='GET') print('Serving TwiML') twiml_response = Response() twiml_response.say('Hello!') twiml_response.hangup() twiml_xml = twiml_response.toxml() print('Generated twiml: {}'.format(twiml_xml)) if __name__ == '__main__': example()
mit
Python
5ab3f3d06216381b697781d80069354745110de1
make yaml put out unicode
adamgot/python-plexlibrary
plexlibrary/utils.py
plexlibrary/utils.py
# -*- coding: utf-8 -*- import yaml from yaml import Loader, SafeLoader class Colors(object): RED = "\033[1;31m" BLUE = "\033[1;34m" CYAN = "\033[1;36m" GREEN = "\033[0;32m" RESET = "\033[0;0m" BOLD = "\033[;1m" REVERSE = "\033[;7m" class YAMLBase(object): def __init__(self, filename): # Make sure pyyaml always returns unicode def construct_yaml_str(self, node): return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) with open(filename, 'r') as f: try: self.data = yaml.safe_load(f) except yaml.YAMLError as e: raise e def __getitem__(self, k): return self.data[k] def __iter__(self, k): return self.data.itervalues()
# -*- coding: utf-8 -*- import yaml class Colors(object): RED = "\033[1;31m" BLUE = "\033[1;34m" CYAN = "\033[1;36m" GREEN = "\033[0;32m" RESET = "\033[0;0m" BOLD = "\033[;1m" REVERSE = "\033[;7m" class YAMLBase(object): def __init__(self, filename): with open(filename, 'r') as f: try: self.data = yaml.safe_load(f) except yaml.YAMLError as e: raise e def __getitem__(self, k): return self.data[k] def __iter__(self, k): return self.data.itervalues()
bsd-3-clause
Python
201a9d75e9c4a2c84372fe58a674977f2435130f
update fastapi example.
honeybadger-io/honeybadger-python,honeybadger-io/honeybadger-python
examples/fastapi/app.py
examples/fastapi/app.py
from fastapi import FastAPI, HTTPException, APIRouter from honeybadger import honeybadger, contrib import pydantic honeybadger.configure(api_key='c10787cf') app = FastAPI(title="Honeybadger - FastAPI.") app.add_middleware(contrib.ASGIHoneybadger, params_filters=["client"]) @app.get("/raise_some_error", tags=["Notify"]) def raise_some_error(a: str = "foo"): """Raises an error.""" raise Exception(f"SomeError Occurred (a = {a})") class DivideRequest(pydantic.BaseModel): a: int b: int = 0 @app.post("/divide", response_model=int, tags=["Notify"]) def divide(req: DivideRequest): """Divides `a` by `b`.""" return req.a / req.b @app.post("/raise_status_code", tags=["Don't Notify"]) def raise_status_code(status_code: int = 404, detail: str = "Forced 404."): """This exception is raised on purpose, so will not be notified.""" raise HTTPException(status_code=404, detail=detail) some_router = APIRouter() @some_router.get("/some_router/endpoint", tags=["Notify"]) def some_router_endpoint(): """Try raising an error from a router.""" raise Exception("Exception Raised by some router endpoint.") app.include_router(some_router)
from fastapi import FastAPI, HTTPException, APIRouter from honeybadger import honeybadger, contrib from honeybadger.contrib import asgi from honeybadger.contrib import fastapi import pydantic honeybadger.configure(api_key='c10787cf') app = FastAPI() # contrib.FastAPIHoneybadger(app) app.add_middleware(asgi.ASGIHoneybadger, params_filters=["user-agent", "host", "url", "query_string", "client"]) @app.get("/raise_some_error") def raise_some_error(a: str): """Raises an error.""" raise Exception(f"SomeError Occurred (a = {a})") class DivideRequest(pydantic.BaseModel): a: int b: int = 0 @app.post("/divide") def divide(req: DivideRequest): """Divides `a` by `b`.""" return req.a / req.b @app.post("/raise_404") def raise_404(req: DivideRequest, a: bool = True): raise HTTPException(status_code=404, detail="Raising on purpose.") some_router = APIRouter() @some_router.get("/some_router_endpoint") def some_router_endpoint(): raise Exception("Exception Raised by some router endpoint.") app.include_router(some_router)
mit
Python
ba084db6c16e5dee9e9ff06a3bee02f4dbfb5c82
Add environment variable to control use of UNIX socket proxying
Metaswitch/powerstrip,Metaswitch/powerstrip
powerstrip.tac
powerstrip.tac
import os from twisted.application import service, internet #from twisted.protocols.policies import TrafficLoggingFactory from urlparse import urlparse from powerstrip.powerstrip import ServerProtocolFactory application = service.Application("Powerstrip") DOCKER_HOST = os.environ.get('DOCKER_HOST') ENABLE_UNIX_SOCKET = os.environ.get('POWERSTRIP_UNIX_SOCKET', "") if DOCKER_HOST is None: # Default to assuming we've got a Docker socket bind-mounted into a # container we're running in. if "YES" in ENABLE_UNIX_SOCKET: DOCKER_HOST = "unix:///host-var-run/docker.real.sock" else: DOCKER_HOST = "unix:///host-var-run/docker.sock" if "://" not in DOCKER_HOST: DOCKER_HOST = "tcp://" + DOCKER_HOST if DOCKER_HOST.startswith("tcp://"): parsed = urlparse(DOCKER_HOST) dockerAPI = ServerProtocolFactory(dockerAddr=parsed.hostname, dockerPort=parsed.port) elif DOCKER_HOST.startswith("unix://"): socketPath = DOCKER_HOST[len("unix://"):] dockerAPI = ServerProtocolFactory(dockerSocket=socketPath) #logged = TrafficLoggingFactory(dockerAPI, "api-") # Refuse to listen on a TCP port, until # https://github.com/ClusterHQ/powerstrip/issues/56 is resolved. # TODO: maybe allow to specify a numberic Docker group (gid) as environment # variable, and also (optionally) the name of the socket file it creates... if "YES" in ENABLE_UNIX_SOCKET: dockerServer = internet.UNIXServer("/host-var-run/docker.sock", dockerAPI, mode=0660) dockerServer.setServiceParent(application)
import os from twisted.application import service, internet #from twisted.protocols.policies import TrafficLoggingFactory from urlparse import urlparse from powerstrip.powerstrip import ServerProtocolFactory application = service.Application("Powerstrip") DOCKER_HOST = os.environ.get('DOCKER_HOST') if DOCKER_HOST is None: # Default to assuming we've got a Docker socket bind-mounted into a # container we're running in. DOCKER_HOST = "unix:///host-var-run/docker.real.sock" if "://" not in DOCKER_HOST: DOCKER_HOST = "tcp://" + DOCKER_HOST if DOCKER_HOST.startswith("tcp://"): parsed = urlparse(DOCKER_HOST) dockerAPI = ServerProtocolFactory(dockerAddr=parsed.hostname, dockerPort=parsed.port) elif DOCKER_HOST.startswith("unix://"): socketPath = DOCKER_HOST[len("unix://"):] dockerAPI = ServerProtocolFactory(dockerSocket=socketPath) #logged = TrafficLoggingFactory(dockerAPI, "api-") # Refuse to listen on a TCP port, until # https://github.com/ClusterHQ/powerstrip/issues/56 is resolved. # TODO: maybe allow to specify a numberic Docker group (gid) as environment # variable, and also (optionally) the name of the socket file it creates... dockerServer = internet.UNIXServer("/host-var-run/docker.sock", dockerAPI, mode=0660) dockerServer.setServiceParent(application)
apache-2.0
Python
33e693337ab646eaccb724b9c4b3eb3352c6e412
fix pagination
makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity
mapentity/pagination.py
mapentity/pagination.py
from rest_framework_datatables.pagination import DatatablesPageNumberPagination class MapentityDatatablePagination(DatatablesPageNumberPagination): """ Custom datatable pagination for Mapentity list views. """ pass # def get_count_and_total_count(self, queryset, view): # """ Handle count for all filters """ # count, total_count = super().get_count_and_total_count(queryset, view) # count = queryset.count() # replace count by real count - not only drf-datatables count # return count, total_count
from rest_framework_datatables.pagination import DatatablesPageNumberPagination class MapentityDatatablePagination(DatatablesPageNumberPagination): """ Custom datatable pagination for Mapentity list views. """ def get_count_and_total_count(self, queryset, view): """ Handle count for all filters """ count, total_count = super().get_count_and_total_count(queryset, view) count = queryset.count() # replace count by real count - not only drf-datatables count return count, total_count
bsd-3-clause
Python
c7679393ae11766cc9da4474f4db1d0dbe50ac91
Bump to 0.11.0
mwarkentin/django-watchman,JBKahn/django-watchman,mwarkentin/django-watchman,JBKahn/django-watchman
watchman/__init__.py
watchman/__init__.py
__version__ = '0.11.0'
__version__ = '0.10.1'
bsd-3-clause
Python
815fecf36f9c0114a9aa8594b58226ead223b313
fix type bug
tstringer/pypic,tstringer/pypic
app/app.py
app/app.py
"""Do work""" import argparse import logging import os import sys from cameracontroller.cameracontroller import CameraController from storage.cloudstorage import CloudStorage logger = logging.getLogger('pypic') log_dir = os.path.expanduser('~/log') if not os.path.exists(log_dir): os.makedirs(log_dir) logging.basicConfig( filename=os.path.join(log_dir, 'pypiclog'), format='%(asctime)s :: %(levelname)s :: %(message)s', level=logging.DEBUG ) def exception_handler(exception_type, exception, traceback): logger.error(str(exception)) sys.excepthook = exception_handler def main(): """Main script execution""" parser = argparse.ArgumentParser() parser.add_argument( '-c', '--continuous', action='store_true', help='If set, run the video feed continuously' ) parser.add_argument( '-d', '--duration', default=10, type=float, help='Duration (in seconds) to run the video loop' ) args = parser.parse_args() camera_controller = CameraController( os.path.expanduser('~/pypic_output'), CloudStorage( os.environ.get('AZSTORAGE_ACCOUNT_NAME'), os.environ.get('AZSTORAGE_ACCOUNT_KEY') ) ) camera_controller.record_video( continuous=args.continuous, duration=args.duration ) if __name__ == '__main__': main()
"""Do work""" import argparse import logging import os import sys from cameracontroller.cameracontroller import CameraController from storage.cloudstorage import CloudStorage logger = logging.getLogger('pypic') log_dir = os.path.expanduser('~/log') if not os.path.exists(log_dir): os.makedirs(log_dir) logging.basicConfig( filename=os.path.join(log_dir, 'pypiclog'), format='%(asctime)s :: %(levelname)s :: %(message)s', level=logging.DEBUG ) def exception_handler(exception_type, exception, traceback): logger.error(str(exception)) sys.excepthook = exception_handler def main(): """Main script execution""" parser = argparse.ArgumentParser() parser.add_argument( '-c', '--continuous', action='store_true', help='If set, run the video feed continuously' ) parser.add_argument( '-d', '--duration', default=10, help='Duration (in seconds) to run the video loop' ) args = parser.parse_args() camera_controller = CameraController( os.path.expanduser('~/pypic_output'), CloudStorage( os.environ.get('AZSTORAGE_ACCOUNT_NAME'), os.environ.get('AZSTORAGE_ACCOUNT_KEY') ) ) camera_controller.record_video( continuous=args.continuous, duration=args.duration ) if __name__ == '__main__': main()
mit
Python
b0dd7879fbf2000c86a2f77995495d480c890713
Add search by location
predicthq/sdk-py
usecases/events/search_by_location.py
usecases/events/search_by_location.py
from predicthq import Client # Please copy paste your access token here # or read our Quickstart documentation if you don't have a token yet # https://developer.predicthq.com/guides/quickstart/ ACCESS_TOKEN = 'abc123' phq = Client(access_token=ACCESS_TOKEN) # The events endpoint supports three types of search by location: # - by area # - by fuzzy location search around # - by geoname place ID (see places endpoint for more details) # The within parameter allows you to search for events within # a specified area. It expects a string in the form # {radius}{unit}@{latitude},{longitude} # where the radius unit can be one of: m, km, ft, mi. # https://developer.predicthq.com/resources/events/#param-within # Please note that the the within parameter uses the lat, lon order # but the location field in the event response uses the lon, lat GeoJSON order. for event in phq.events.search(within='10km@-36.844480,174.768368'): print(event.rank, event.category, event.title, event.location) # The fuzzy location search around doesn't restrict search results # to the specified latitude, longitude and offset. # In most cases, you only need to use the `origin` key, # e.g. {'origin': '{lat},{lon}'} # Please not that this affects the relevance of your search results. # https://developer.predicthq.com/resources/events/#param-loc-around for event in phq.events.search(location_around={'origin': '-36.844480,174.768368'}): print(event.rank, event.category, event.title, event.location, event.relevance) # Finally, you can specify a geoname place ID or a list of place IDs or # airport codes (see https://developer.predicthq.com/csv/airport_codes.csv) # The scope suffix (includes events having children or parent of the place ID) # or the exact (only events with the specified place ID) suffixes can be used. # https://developer.predicthq.com/resources/events/#param-place for event in phq.events.search(place={'scope': '5128638'}): # place ID print(event.rank, event.category, event.title, event.place_hierarchies) for event in phq.events.search(place={'scope': 'SFO'}): # airport code print(event.rank, event.category, event.title, event.place_hierarchies)
from predicthq import Client # Please copy paste your access token here # or read our Quickstart documentation if you don't have a token yet # https://developer.predicthq.com/guides/quickstart/ ACCESS_TOKEN = 'abc123' phq = Client(access_token=ACCESS_TOKEN)
mit
Python
997cd53d1d045840118876227b9c5588e153195b
fix not equal override. thanks @hodgestar
universalcore/unicore-cms,universalcore/unicore-cms,universalcore/unicore-cms
cms/models.py
cms/models.py
import re import unicodedata RE_NUMERICAL_SUFFIX = re.compile(r'^[\w-]*-(\d+)+$') from gitmodel import fields, models class FilterMixin(object): @classmethod def filter(cls, **fields): items = list(cls.all()) for field, value in fields.items(): if hasattr(cls, field): items = [a for a in items if getattr(a, field) == value] else: raise Exception('invalid field %s' % field) return items class SlugifyMixin(object): def slugify(self, value): """ Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. """ value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) def generate_slug(self): if hasattr(self, 'title') and self.title: if hasattr(self, 'slug') and not self.slug: self.slug = self.slugify(unicode(self.title))[:40] def save(self, *args, **kwargs): self.generate_slug() return super(SlugifyMixin, self).save(*args, **kwargs) class Category(FilterMixin, SlugifyMixin, models.GitModel): slug = fields.SlugField(required=True, id=True) title = fields.CharField(required=True) def __eq__(self, other): return self.slug == other.slug def __ne__(self, other): return self.slug != other.slug class Page(FilterMixin, SlugifyMixin, models.GitModel): slug = fields.SlugField(required=True, id=True) title = fields.CharField(required=True) content = fields.CharField(required=False) published = fields.BooleanField(default=True) primary_category = fields.RelatedField(Category, required=False)
import re import unicodedata RE_NUMERICAL_SUFFIX = re.compile(r'^[\w-]*-(\d+)+$') from gitmodel import fields, models class FilterMixin(object): @classmethod def filter(cls, **fields): items = list(cls.all()) for field, value in fields.items(): if hasattr(cls, field): items = [a for a in items if getattr(a, field) == value] else: raise Exception('invalid field %s' % field) return items class SlugifyMixin(object): def slugify(self, value): """ Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. """ value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) def generate_slug(self): if hasattr(self, 'title') and self.title: if hasattr(self, 'slug') and not self.slug: self.slug = self.slugify(unicode(self.title))[:40] def save(self, *args, **kwargs): self.generate_slug() return super(SlugifyMixin, self).save(*args, **kwargs) class Category(FilterMixin, SlugifyMixin, models.GitModel): slug = fields.SlugField(required=True, id=True) title = fields.CharField(required=True) def __eq__(self, other): return self.slug == other.slug def __ne__(self, other): return self.slug == other.slug class Page(FilterMixin, SlugifyMixin, models.GitModel): slug = fields.SlugField(required=True, id=True) title = fields.CharField(required=True) content = fields.CharField(required=False) published = fields.BooleanField(default=True) primary_category = fields.RelatedField(Category, required=False)
bsd-2-clause
Python
dbfb095f6b90c2517416652d53b6db6b5ee919a4
Bump version
vmihailenco/fabdeploy
fabdeploy/__init__.py
fabdeploy/__init__.py
VERSION = (0, 3, 4, 'final', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = '%s %s %s' % (version, VERSION[3], VERSION[4]) return version
VERSION = (0, 3, 3, 'final', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = '%s %s %s' % (version, VERSION[3], VERSION[4]) return version
bsd-3-clause
Python
dc49ce292d4e0669598abb7f45ba389efde0dabc
Fix testTeleopPanel
manuelli/director,RobotLocomotion/director,openhumanoids/director,manuelli/director,patmarion/director,patmarion/director,mitdrc/director,mitdrc/director,RobotLocomotion/director,mitdrc/director,mitdrc/director,openhumanoids/director,manuelli/director,patmarion/director,openhumanoids/director,mitdrc/director,RobotLocomotion/director,openhumanoids/director,openhumanoids/director,patmarion/director,RobotLocomotion/director,manuelli/director,patmarion/director,RobotLocomotion/director,manuelli/director
src/python/tests/testTeleopPanel.py
src/python/tests/testTeleopPanel.py
from director import robotsystem from director.consoleapp import ConsoleApp from director import transformUtils from director import visualization as vis from director import objectmodel as om from director import teleoppanel from director import playbackpanel from director import planningutils from PythonQt import QtCore, QtGui import numpy as np def checkGraspFrame(inputGraspFrame, side): ''' Return True if the given grasp frame matches the grasp frame of the teleop robot model's current pose, else False. ''' pose = teleopJointController.q teleopGraspFrame = ikPlanner.newGraspToWorldFrame(pose, side, ikPlanner.newGraspToHandFrame(side)) p1, q1 = transformUtils.poseFromTransform(inputGraspFrame) p2, q2 = transformUtils.poseFromTransform(teleopGraspFrame) try: np.testing.assert_allclose(p1, p2, rtol=1e-3) np.testing.assert_allclose(q1, q2, rtol=1e-3) return True except AssertionError: return False def onIkStartup(ikServer, startSuccess): side = 'left' goalFrame = transformUtils.frameFromPositionAndRPY([0.5, 0.5, 1.2], [0, 90, -90]) assert not checkGraspFrame(goalFrame, side) frame = teleopPanel.endEffectorTeleop.newReachTeleop(goalFrame, side) assert checkGraspFrame(goalFrame, side) teleopPanel.ui.planButton.click() assert playbackPanel.plan is not None teleopPanel.ikPlanner.useCollision = True; teleopPanel.ui.planButton.click() assert playbackPanel.plan is not None frame.setProperty('Edit', True) app.startTestingModeQuitTimer() app = ConsoleApp() app.setupGlobals(globals()) view = app.createView() robotsystem.create(view, globals()) playbackPanel = playbackpanel.PlaybackPanel(planPlayback, playbackRobotModel, playbackJointController, robotStateModel, robotStateJointController, manipPlanner) planningUtils = planningutils.PlanningUtils(robotStateModel, robotStateJointController) teleopPanel = teleoppanel.TeleopPanel(robotStateModel, robotStateJointController, teleopRobotModel, teleopJointController, ikPlanner, manipPlanner, affordanceManager, playbackPanel.setPlan, playbackPanel.hidePlan, planningUtils) manipPlanner.connectPlanReceived(playbackPanel.setPlan) ikServer.connectStartupCompleted(onIkStartup) startIkServer() w = QtGui.QWidget() l = QtGui.QGridLayout(w) l.addWidget(view, 0, 0) l.addWidget(playbackPanel.widget, 1, 0) l.addWidget(teleopPanel.widget, 0, 1, 2, 1) l.setMargin(0) l.setSpacing(0) w.show() w.resize(1600, 900) app.start(enableAutomaticQuit=False)
from director import robotsystem from director.consoleapp import ConsoleApp from director import transformUtils from director import visualization as vis from director import objectmodel as om from director import teleoppanel from director import playbackpanel from PythonQt import QtCore, QtGui import numpy as np def checkGraspFrame(inputGraspFrame, side): ''' Return True if the given grasp frame matches the grasp frame of the teleop robot model's current pose, else False. ''' pose = teleopJointController.q teleopGraspFrame = ikPlanner.newGraspToWorldFrame(pose, side, ikPlanner.newGraspToHandFrame(side)) p1, q1 = transformUtils.poseFromTransform(inputGraspFrame) p2, q2 = transformUtils.poseFromTransform(teleopGraspFrame) try: np.testing.assert_allclose(p1, p2, rtol=1e-3) np.testing.assert_allclose(q1, q2, rtol=1e-3) return True except AssertionError: return False def onIkStartup(ikServer, startSuccess): side = 'left' goalFrame = transformUtils.frameFromPositionAndRPY([0.5, 0.5, 1.2], [0, 90, -90]) assert not checkGraspFrame(goalFrame, side) frame = teleopPanel.endEffectorTeleop.newReachTeleop(goalFrame, side) assert checkGraspFrame(goalFrame, side) teleopPanel.ui.planButton.click() assert playbackPanel.plan is not None teleopPanel.ikPlanner.useCollision = True; teleopPanel.ui.planButton.click() assert playbackPanel.plan is not None frame.setProperty('Edit', True) app.startTestingModeQuitTimer() app = ConsoleApp() app.setupGlobals(globals()) view = app.createView() robotsystem.create(view, globals()) playbackPanel = playbackpanel.PlaybackPanel(planPlayback, playbackRobotModel, playbackJointController, robotStateModel, robotStateJointController, manipPlanner) teleopPanel = teleoppanel.TeleopPanel(robotStateModel, robotStateJointController, teleopRobotModel, teleopJointController, ikPlanner, manipPlanner, affordanceManager, playbackPanel.setPlan, playbackPanel.hidePlan) manipPlanner.connectPlanReceived(playbackPanel.setPlan) ikServer.connectStartupCompleted(onIkStartup) startIkServer() w = QtGui.QWidget() l = QtGui.QGridLayout(w) l.addWidget(view, 0, 0) l.addWidget(playbackPanel.widget, 1, 0) l.addWidget(teleopPanel.widget, 0, 1, 2, 1) l.setMargin(0) l.setSpacing(0) w.show() w.resize(1600, 900) app.start(enableAutomaticQuit=False)
bsd-3-clause
Python
0d056e041f141391b115aef1f1cc5aa684876535
save signature saliency
rafaelolg/salienpy
view_saliency.py
view_saliency.py
#!/usr/bin/env python import cv2 import numpy import sys import salienpy.frequency_tuned import salienpy.signature def main(img): cv2.imshow('Original Image', img) ftuned = salienpy.frequency_tuned.frequency_tuned_saliency(img) cv2.imshow('Frequency Tuned', ftuned) signa = salienpy.signature.signature_saliency(img) cv2.imshow('Signature Saliency', signa) cv2.imwrite('signature.png', signa) cv2.waitKey() if __name__ == '__main__': if len(sys.argv) > 1: img = cv2.imread(sys.argv[1]) else: cam = cv2.VideoCapture(0) status, img = cam.read() main(img)
#!/usr/bin/env python import cv2 import numpy import sys import salienpy.frequency_tuned import salienpy.signature def main(img): cv2.imshow('Original Image', img) ftuned = salienpy.frequency_tuned.frequency_tuned_saliency(img) cv2.imshow('Frequency Tuned', ftuned) signa = salienpy.signature.signature_saliency(img) cv2.imshow('Signature Saliency', signa) cv2.waitKey() if __name__ == '__main__': if len(sys.argv) > 1: img = cv2.imread(sys.argv[1]) else: cam = cv2.VideoCapture(0) status, img = cam.read() main(img)
mit
Python
28968ca117fc18dfe513c06ce4ead2295830fd94
remove redundant parenthesis
morgenst/pyfluka
plugins/BasePlugin.py
plugins/BasePlugin.py
__author__ = 'marcusmorgenstern' __mail__ = '' from abc import ABCMeta, abstractmethod class BasePlugin: """ Metaclass for guarantee of interface. Each plugin must provide initialisation taking optional configuration and invoke method taking data """ __metaclass__ = ABCMeta def __init__(self, config=None): """ initialisation :param config (dict): configuration params for plugin :return: void """ self.dep = [] @abstractmethod def invoke(self, data): """ Entry for plugin execution :param data (dict): input data :return: void """ pass
__author__ = 'marcusmorgenstern' __mail__ = '' from abc import ABCMeta, abstractmethod class BasePlugin(): """ Metaclass for guarantee of interface. Each plugin must provide initialisation taking optional configuration and invoke method taking data """ __metaclass__ = ABCMeta def __init__(self, config=None): """ initialisation :param config (dict): configuration params for plugin :return: void """ self.dep = [] @abstractmethod def invoke(self, data): """ Entry for plugin execution :param data (dict): input data :return: void """ pass
mit
Python
79e2044380d2d5a9568b76777bc7b1950dcaaeb8
Bump version to 14.1.0
hhursev/recipe-scraper
recipe_scrapers/__version__.py
recipe_scrapers/__version__.py
__version__ = "14.1.0"
__version__ = "14.0.0"
mit
Python
7009e1f0b316da5f17247786810676f70d282f93
Add assertion.__all__
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
extenteten/assertion.py
extenteten/assertion.py
import collections import numpy import tensorflow as tf from .util import func_scope __all__ = [ 'is_int', 'is_natural_num', 'is_natural_num_sequence', 'is_sequence', 'assert_no_nan', ] def is_int(num): return (isinstance(num, int) or isinstance(num, numpy.integer) or (isinstance(num, numpy.ndarray) and num.ndim == 0 and issubclass(num.dtype.type, numpy.integer))) def is_natural_num(num): return is_int(num) and num > 0 def is_natural_num_sequence(num_list, length=None): return (is_sequence(num_list) and all(is_natural_num(num) for num in num_list) and (length == None or len(num_list) == length)) def is_sequence(obj): return isinstance(obj, collections.Sequence) @func_scope() def assert_no_nan(tensor): return tf.assert_equal(tf.reduce_any(tf.is_nan(tensor)), False)
import collections import numpy import tensorflow as tf from .util import func_scope def is_int(num): return (isinstance(num, int) or isinstance(num, numpy.integer) or (isinstance(num, numpy.ndarray) and num.ndim == 0 and issubclass(num.dtype.type, numpy.integer))) def is_natural_num(num): return is_int(num) and num > 0 def is_natural_num_sequence(num_list, length=None): return (is_sequence(num_list) and all(is_natural_num(num) for num in num_list) and (length == None or len(num_list) == length)) def is_sequence(obj): return isinstance(obj, collections.Sequence) @func_scope() def assert_no_nan(tensor): return tf.assert_equal(tf.reduce_any(tf.is_nan(tensor)), False)
unlicense
Python
0f34d5d685c844dbf1fca2cf60b27d75726fa14b
Adjust internal imports
oemof/feedinlib
feedinlib/__init__.py
feedinlib/__init__.py
__copyright__ = "Copyright oemof developer group" __license__ = "MIT" __version__ = '0.1.0rc3' from .powerplants import Photovoltaic, WindPowerPlant from .models import ( Pvlib, WindpowerlibTurbine, WindpowerlibTurbineCluster, get_power_plant_data, ) from . import era5
__copyright__ = "Copyright oemof developer group" __license__ = "MIT" __version__ = '0.1.0rc3' from feedinlib.powerplants import Photovoltaic, WindPowerPlant from feedinlib.models import ( Pvlib, WindpowerlibTurbine, WindpowerlibTurbineCluster, get_power_plant_data, )
mit
Python
3876130a94f3a43a6b34dd3be22ef963238bda3b
fix migration
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
mygpo/usersettings/migrations/0002_move_existing.py
mygpo/usersettings/migrations/0002_move_existing.py
import json from django.db import migrations def move_podcastsettings(apps, schema_editor): PodcastConfig = apps.get_model("subscriptions", "PodcastConfig") UserSettings = apps.get_model("usersettings", "UserSettings") ContentType = apps.get_model('contenttypes', 'ContentType') for cfg in PodcastConfig.objects.all(): if not json.loads(cfg.settings): continue setting, created = UserSettings.objects.update_or_create( user=cfg.user, # we can't get the contenttype from cfg.podcast as it would be a # different model content_type=ContentType.objects.get(app_label='podcasts', model='podcast'), object_id=cfg.podcast.pk, defaults={ 'settings': cfg.settings, } ) def move_usersettings(apps, schema_editor): UserProfile = apps.get_model("users", "UserProfile") UserSettings = apps.get_model("usersettings", "UserSettings") for profile in UserProfile.objects.all(): if not json.loads(profile.settings): continue setting, created = UserSettings.objects.update_or_create( user=profile.user, content_type=None, object_id=None, defaults={ 'settings': profile.settings, } ) class Migration(migrations.Migration): dependencies = [ ('usersettings', '0001_initial'), ('subscriptions', '0002_unique_constraint'), ('users', '0011_syncgroup_blank'), ] operations = [ migrations.RunPython(move_podcastsettings), migrations.RunPython(move_usersettings), ]
import json from django.db import migrations from django.contrib.contenttypes.models import ContentType def move_podcastsettings(apps, schema_editor): PodcastConfig = apps.get_model("subscriptions", "PodcastConfig") UserSettings = apps.get_model("usersettings", "UserSettings") for cfg in PodcastConfig.objects.all(): if not json.loads(cfg.settings): continue setting, created = UserSettings.objects.update_or_create( user=cfg.user, # we can't get the contenttype from cfg.podcast as it would be a # different model content_type=ContentType.objects.filter(app_label='podcasts', model='podcast'), object_id=cfg.podcast.pk, defaults={ 'settings': cfg.settings, } ) def move_usersettings(apps, schema_editor): UserProfile = apps.get_model("users", "UserProfile") UserSettings = apps.get_model("usersettings", "UserSettings") for profile in UserProfile.objects.all(): if not json.loads(profile.settings): continue setting, created = UserSettings.objects.update_or_create( user=profile.user, content_type=None, object_id=None, defaults={ 'settings': profile.settings, } ) class Migration(migrations.Migration): dependencies = [ ('usersettings', '0001_initial'), ('subscriptions', '0002_unique_constraint'), ('users', '0011_syncgroup_blank'), ] operations = [ migrations.RunPython(move_podcastsettings), migrations.RunPython(move_usersettings), ]
agpl-3.0
Python
b410cbc1d58c5dce85b1bdff85fa881de58bf299
fix BadArgument
Naught0/qtbot
cogs/error.py
cogs/error.py
#!/bin/env python from discord.ext.commands import errors import sys import traceback class ErrorHandler: def __init__(self, bot): self.bot = bot async def on_command_error(self, ctx, error): """ Handle command errors more gracefully """ if isinstance(error, errors.CommandNotFound): return if isinstance(error, errors.NotOwner): return await ctx.send('Sorry, only the owner of qtbot may run this command.') if isinstance(error, errors.CommandOnCooldown): return await ctx.send(f'This command is on cooldown. Please retry in `{error.retry_after:.0f}` second(s).') if isinstance(error, errors.MissingRequiredArgument): return await ctx.send(f'Command missing required argument `{error.param}`.') if isinstance(error, errors.MissingPermissions): return await ctx.send(f'Sorry you need permissions: `{",".join(error.missing_perms)}` to do that.') if isinstance(error, errors.BotMissingPermissions): return await ctx.send(f'Sorry I need permissions: `{",".join(error.missing_perms)}` to do that.') if isinstance(error, errors.BadArgument): return await ctx.send(error.__traceback__) print(f'Ignoring exception in command {ctx.command}:', file=sys.stderr) traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr) def setup(bot): bot.add_cog(ErrorHandler(bot))
#!/bin/env python from discord.ext.commands import errors import sys import traceback class ErrorHandler: def __init__(self, bot): self.bot = bot async def on_command_error(self, ctx, error): """ Handle command errors more gracefully """ if isinstance(error, errors.CommandNotFound): return if isinstance(error, errors.NotOwner): return await ctx.send('Sorry, only the owner of qtbot may run this command.') if isinstance(error, errors.CommandOnCooldown): return await ctx.send(f'This command is on cooldown. Please retry in `{error.retry_after:.0f}` second(s).') if isinstance(error, errors.MissingRequiredArgument): return await ctx.send(f'Command missing required argument `{error.param}`.') if isinstance(error, errors.MissingPermissions): return await ctx.send(f'Sorry you need permissions: `{",".join(error.missing_perms)}` to do that.') if isinstance(error, errors.BotMissingPermissions): return await ctx.send(f'Sorry I need permissions: `{",".join(error.missing_perms)}` to do that.') if isinstance(error, errors.BadArgument): return await ctx.send(f'{error.message}') print(f'Ignoring exception in command {ctx.command}:', file=sys.stderr) traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr) def setup(bot): bot.add_cog(ErrorHandler(bot))
mit
Python
1a83696454d5be09b07d1e1e6a23ea76c77012a9
Fix global imports
jvivian/rnaseq-lib,jvivian/rnaseq-lib
src/rnaseq_lib/__init__.py
src/rnaseq_lib/__init__.py
import rnaseq_lib.civic import rnaseq_lib.data import rnaseq_lib.diff_exp import rnaseq_lib.dim_red import rnaseq_lib.docker import rnaseq_lib.drugs import rnaseq_lib.graphs import rnaseq_lib.gtf import rnaseq_lib.images import rnaseq_lib.plot import rnaseq_lib.plot.dr import rnaseq_lib.plot.hview import rnaseq_lib.tissues import rnaseq_lib.tissues.plots import rnaseq_lib.utils import rnaseq_lib.web import rnaseq_lib.web.openfda import rnaseq_lib.web.synapse
import rnaseq_lib.R import rnaseq_lib.civic import rnaseq_lib.data import rnaseq_lib.de import rnaseq_lib.dim_red import rnaseq_lib.docker import rnaseq_lib.drugs import rnaseq_lib.graphs import rnaseq_lib.gtf import rnaseq_lib.images import rnaseq_lib.plotting import rnaseq_lib.tissues import rnaseq_lib.utils import rnaseq_lib.web import rnaseq_lib.web.openfda import rnaseq_lib.web.synapse
mit
Python
918a168b53e9f026393aaa17347fc855f7e4a70a
add background task, remove extra roles code, use .format
imsky/quickstart,imsky/quickstart
files/devops/fabfile.py
files/devops/fabfile.py
# Fabfile from Quickstart # qkst.io/devops/fabfile from fabric.api import ( task, parallel, roles, run, local, sudo, put, env, settings ) from fabric.contrib.project import rsync_project from fabric.context_managers import cd, prefix from fabric.tasks import execute env.hosts = ['root@localhost:22'] @task def bootstrap(): sudo('apt-get update') sudo('apt-get install -y sysstat wget unzip htop dtach') @task def start(): execute('service', 'cron') @task def service(name, action='start'): sudo('service {0} {1} || true'.format(name, action)) @task def background(process, name='bgprocess'): run('dtach -n `mktemp -u /tmp/{0}.XXXXX` {1}'.format(process, name)) @task def install_deb(url): sudo('wget {0} -O /tmp/download.deb'.format(url)) sudo('dpkg -i /tmp/download.deb && rm /tmp/download.deb') @task def status(): run('service --status-all') run('vmstat') run('df -h') run('iostat') @task def upload(local='./', remote='/tmp'): rsync_project( local_dir=local, remote_dir=remote, exclude=['.git', '*.pyc', '.DS_Store'], extra_opts='-lp' # preserve symlinks and permissions ) @task def put_as_user(file, remote, user): with settings(user=user): put(file, remote)
# Fabfile from Quickstart # qkst.io/devops/fabfile from fabric.api import ( task, parallel, roles, run, local, sudo, put, env, settings ) from fabric.contrib.project import rsync_project from fabric.context_managers import cd, prefix from fabric.tasks import execute env.user = 'root' env.roledefs = { 'local': ['localhost:22'] } env.roledefs['all'] = [host for role in env.roledefs.values() for host in role] @task @roles('local') def setup(): sudo('apt-get update') sudo('apt-get install -y python python-pip python-virtualenv') run('pip install fabric') @task @parallel def install_deb(url): sudo('wget %s -O /tmp/download.deb' % url) sudo('dpkg -i /tmp/download.deb && rm /tmp/download.deb') @task def upload(local='./', remote='/tmp'): rsync_project( local_dir=local, remote_dir=remote, exclude=['.git'], extra_opts='-lp' # preserve symlinks and permissions ) @task def put_as_user(file, remote, user): with settings(user=user): put(file, remote) @task def context_demo(): with cd('/tmp'): run('touch testfile') with prefix('cd /tmp') run('rm testfile')
mit
Python
a4f69decb2b22822660033265a6517510c8a2eb5
clean up some convert some strings to fstrings use fewer imports
mralext20/alex-bot
cogs/utils.py
cogs/utils.py
# -*- coding: utf-8 -*- from discord.ext import commands from datetime import datetime from cogs.cog import Cog import discord class Utils(Cog): """The description for Utils goes here.""" @commands.command(name='reload', hidden=True) @commands.is_owner() async def cog_reload(self, ctx, *, cog: str): """Command which Reloads a Module. Remember to use dot path. e.g: cogs.owner""" try: self.bot.unload_extension(cog) self.bot.load_extension(cog) except Exception as e: await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}') else: await ctx.send('**`SUCCESS`**') @commands.command() async def ping(self, ctx): await ctx.send(f"Pong! time is {ctx.bot.latency * 1000:.2f} ms") @commands.command() async def time(self,ctx): time = datetime.now().strftime("%a, %e %b %Y %H:%M:%S (%-I:%M %p)") await ctx.send(f'the time in alaska is {time}') @commands.command() @commands.is_owner() async def upload(self, ctx, file): with open(file, 'rb') as f: try: await ctx.send(file = discord.File(f, file)) except FileNotFoundError: await ctx.send(f"no such file: {file}") def setup(bot): bot.add_cog(Utils(bot))
# -*- coding: utf-8 -*- from discord.ext import commands from datetime import datetime from cogs.cog import Cog import discord class Utils(Cog): """The description for Utils goes here.""" @commands.command(name='reload', hidden=True) @commands.is_owner() async def cog_reload(self, ctx, *, cog: str): """Command which Reloads a Module. Remember to use dot path. e.g: cogs.owner""" try: self.bot.unload_extension(cog) self.bot.load_extension(cog) except Exception as e: await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}') else: await ctx.send('**`SUCCESS`**') @commands.command() async def ping(self, ctx): await ctx.send(f"Pong! time is {ctx.bot.latency * 1000:.2f)} ms") @commands.command() async def time(self,ctx): time = datetime.now().strftime("%a, %e %b %Y %H:%M:%S (%-I:%M %p)") await ctx.send(f'the time in alaska is {time}') @commands.command() @commands.is_owner() async def upload(self, ctx, file): with open(file, 'rb') as f: try: await ctx.send(file = discord.File(f, file)) except FileNotFoundError: await ctx.send(f"no such file: {file}") def setup(bot): bot.add_cog(Utils(bot))
mit
Python
ba81222c33b4b80c5148c21bb30c60412c85847b
Fix search query
syseleven/puppet-base,syseleven/puppet-base,syseleven/puppet-base,syseleven/puppet-base
files/kernel-cleanup.py
files/kernel-cleanup.py
#!/usr/bin/env python2.7 """ kernel-cleanup.py Find all installed kernel-related packages and mark them as automatically installed. Then, purge those of these packages that APT now considers auto-removable. Ubuntu APT has logic that prevents us from removing all kernels this way. As an additional safeguard, we always avoid purging the currently running kernel from this script. """ import apt import os os.environ["PATH"] = "/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin" class SourcePackageFilter(apt.cache.Filter): def __init__(self, source_packages): self.spkgs = source_packages def apply(self, pkg): if pkg.is_installed: if pkg.installed.source_name in self.spkgs: return True return False class SignedKernelFilter(apt.cache.Filter): def apply(self, pkg): return bool(pkg.is_installed and pkg.section in ("kernel", "utils") and pkg.name.startswith("linux-signed")) class KernelCleaner(object): def __init__(self): self.c = apt.cache.Cache() def get_kernels(self): return self.c.get_providing_packages("linux-image") def get_tracks(self): return set([(pkg.installed or pkg.candidate).source_name for pkg in self.get_kernels()]) def get_kernel_packages(self): packages = apt.cache.FilteredCache(self.c) packages.set_filter(SourcePackageFilter(self.get_tracks())) return packages def get_signed_kernel_packages(self): packages = apt.cache.FilteredCache(self.c) packages.set_filter(SignedKernelFilter()) return packages def mark_kernels_auto(self): for pkg in self.get_kernel_packages(): pkg.mark_auto() self.c.commit() def purge_signed_kernels(self): for pkg in self.get_signed_kernel_packages(): pkg.mark_delete(auto_fix=False, purge=True) self.c.commit() def purge_old_kernels(self): release = os.uname()[2] for pkg in self.get_kernel_packages(): if release not in pkg.name: if pkg.is_auto_removable: pkg.mark_delete(auto_fix=False, purge=True) self.c.commit() def main(): kc = KernelCleaner() kc.purge_signed_kernels() kc.mark_kernels_auto() kc.purge_old_kernels() if __name__ == "__main__": main()
#!/usr/bin/env python2.7 """ kernel-cleanup.py Find all installed kernel-related packages and mark them as automatically installed. Then, purge those of these packages that APT now considers auto-removable. Ubuntu APT has logic that prevents us from removing all kernels this way. As an additional safeguard, we always avoid purging the currently running kernel from this script. """ import apt import os os.environ["PATH"] = "/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin" class SourcePackageFilter(apt.cache.Filter): def __init__(self, source_packages): self.spkgs = source_packages def apply(self, pkg): if pkg.is_installed: if pkg.installed.source_name in self.spkgs: return True return False class SignedKernelFilter(apt.cache.Filter): def apply(self, pkg): return bool(pkg.is_installed and pkg.section == "kernel" and pkg.name.startswith("linux-signed")) class KernelCleaner(object): def __init__(self): self.c = apt.cache.Cache() def get_kernels(self): return self.c.get_providing_packages("linux-image") def get_tracks(self): return set([(pkg.installed or pkg.candidate).source_name for pkg in self.get_kernels()]) def get_kernel_packages(self): packages = apt.cache.FilteredCache(self.c) packages.set_filter(SourcePackageFilter(self.get_tracks())) return packages def get_signed_kernel_packages(self): packages = apt.cache.FilteredCache(self.c) packages.set_filter(SignedKernelFilter()) return packages def mark_kernels_auto(self): for pkg in self.get_kernel_packages(): pkg.mark_auto() self.c.commit() def purge_signed_kernels(self): for pkg in self.get_signed_kernel_packages(): pkg.mark_delete(auto_fix=False, purge=True) self.c.commit() def purge_old_kernels(self): release = os.uname()[2] for pkg in self.get_kernel_packages(): if release not in pkg.name: if pkg.is_auto_removable: pkg.mark_delete(auto_fix=False, purge=True) self.c.commit() def main(): kc = KernelCleaner() kc.purge_signed_kernels() kc.mark_kernels_auto() kc.purge_old_kernels() if __name__ == "__main__": main()
apache-2.0
Python
1c17b4b10374129d9e26f7023a93ea587dfe7fc7
update version number to 1.0.10-pre as prep for staging/release
mprefer/findingaids,emory-libraries/findingaids,mprefer/findingaids,emory-libraries/findingaids
findingaids/__init__.py
findingaids/__init__.py
__version_info__ = (1, 0, 10, 'pre') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join(str(i) for i in __version_info__[:-1]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) #THIS IS DUPLICATE CODE FROM DWRANGLER AND SHOULD EVENTUALLY BE MOVED INTO EULCORE # Extends the normal render_to_response to include RequestContext objects. # Taken from http://www.djangosnippets.org/snippets/3/ # Other similar implementations and adaptations http://lincolnloop.com/blog/2008/may/10/getting-requestcontext-your-templates/ # I also added the SCRIPT_NAME to dictionary so it would be available to templates # Since I always uset his for this application it makes sense for this app but # I'm unsure this is the best way overall. # TODO: update to use new render shortcut provided in newer versions of django def render_with_context(req, *args, **kwargs): kwargs['context_instance'] = RequestContext(req, {'script_name': req.META['SCRIPT_NAME']}) # Line below was an attempt to add script name to the context so I could # deal with template paths for the SITE_URL in a way that handled # apps being installed in a site subURL. # args[1]['script_name'] = req.META['SCRIPT_NAME'] return render_to_response(*args, **kwargs)
__version_info__ = (1, 0, 9, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join(str(i) for i in __version_info__[:-1]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) #THIS IS DUPLICATE CODE FROM DWRANGLER AND SHOULD EVENTUALLY BE MOVED INTO EULCORE # Extends the normal render_to_response to include RequestContext objects. # Taken from http://www.djangosnippets.org/snippets/3/ # Other similar implementations and adaptations http://lincolnloop.com/blog/2008/may/10/getting-requestcontext-your-templates/ # I also added the SCRIPT_NAME to dictionary so it would be available to templates # Since I always uset his for this application it makes sense for this app but # I'm unsure this is the best way overall. def render_with_context(req, *args, **kwargs): kwargs['context_instance'] = RequestContext(req, {'script_name': req.META['SCRIPT_NAME']}) # Line below was an attempt to add script name to the context so I could # deal with template paths for the SITE_URL in a way that handled # apps being installed in a site subURL. # args[1]['script_name'] = req.META['SCRIPT_NAME'] return render_to_response(*args, **kwargs)
apache-2.0
Python
61e67ed5740148f74e67aef09afc65ef1c3fd6a8
Handle commands in a very trivial way
bboe/hackday_bot,bboe/hackday_bot
hackday_bot/bot.py
hackday_bot/bot.py
"""hackday_bot.bot module.""" import logging import re import time from prawcore.exceptions import PrawcoreException AVAILABLE_COMMANDS = ('help', 'interested', 'join', 'leave', 'uninterested') COMMAND_RE = re.compile(r'(?:\A|\s)!({})(?=\s|\Z)' .format('|'.join(AVAILABLE_COMMANDS))) logger = logging.getLogger(__package__) class Bot(object): """Bot manages comments made to the specified subreddit.""" def __init__(self, subreddit): """Initialize an instance of Bot. :param subreddit: The subreddit to monitor for new comments. """ self.subreddit = subreddit def _command_help(self, comment): comment.reply('help text will go here') def _command_interested(self, comment): comment.reply('soon I will record your interest') def _command_join(self, comment): comment.reply('soon I will record your sign up') def _command_leave(self, comment): comment.reply('soon I will record your abdication') def _command_uninterested(self, comment): comment.reply('soon I will record your uninterest') def _handle_comment(self, comment): commands = set(COMMAND_RE.findall(comment.body)) if len(commands) > 1: comment.reply('Please provide only a single command.') elif len(commands) == 1: command = commands.pop() getattr(self, '_command_{}'.format(command))(comment) logger.debug('Handled {} by {}'.format(command, comment.author)) def run(self): """Run the bot indefinitely.""" running = True subreddit_url = '{}{}'.format(self.subreddit._reddit.config.reddit_url, self.subreddit.url) logger.info('Watching for comments on: {}'.format(subreddit_url)) while running: try: for comment in self.subreddit.stream.comments(): self._handle_comment(comment) except KeyboardInterrupt: logger.info('Termination received. Goodbye!') running = False except PrawcoreException: logger.exception('run loop') time.sleep(10) return 0
"""hackday_bot.bot module.""" import logging import time from prawcore.exceptions import PrawcoreException logger = logging.getLogger(__package__) class Bot(object): """Bot manages comments made to the specified subreddit.""" def __init__(self, subreddit): """Initialize an instance of Bot. :param subreddit: The subreddit to monitor for new comments. """ self.subreddit = subreddit def _handle_comment(self, comment): logger.info(comment) def run(self): """Run the bot indefinitely.""" running = True subreddit_url = '{}{}'.format(self.subreddit._reddit.config.reddit_url, self.subreddit.url) logger.info('Watching for comments on: {}'.format(subreddit_url)) while running: try: for comment in self.subreddit.stream.comments(): self._handle_comment(comment) except KeyboardInterrupt: logger.info('Termination received. Goodbye!') running = False except PrawcoreException: logger.exception('run loop') time.sleep(10) return 0
bsd-2-clause
Python
61cf4e2feb3d8920179e28719822c7fb34ea6550
Add defaults to the ibm RNG
JelteF/statistics
3/ibm_rng.py
3/ibm_rng.py
def ibm_rng(x1, a=65539, c=0, m=2**31): x = x1 while True: x = (a * x + c) % m yield x / (m-1) def main(): rng = ibm_rng(1, 65539, 0, 2**31) while True: x = next(rng) print(x) if __name__ == '__main__': main()
def ibm_rng(x1, a, c, m): x = x1 while True: x = (a * x + c) % m yield x / (m-1) def main(): rng = ibm_rng(1, 65539, 0, 2**31) while True: x = next(rng) print(x) if __name__ == '__main__': main()
mit
Python
776c2992b64911f86740cdf0af4f05c7587430c7
Bump version
beerfactory/hbmqtt
hbmqtt/__init__.py
hbmqtt/__init__.py
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. VERSION = (0, 9, 5, 'alpha', 0)
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. VERSION = (0, 9, 4, 'final', 0)
mit
Python
0202eeed429149cbfafd53d9ba6281a0926ea9df
Add labels to account forms and add a NewUserWithPasswordForm that adds password inputs to the new user form.
fin/froide,okfse/froide,CodeforHawaii/froide,stefanw/froide,stefanw/froide,stefanw/froide,CodeforHawaii/froide,ryankanno/froide,okfse/froide,LilithWittmann/froide,CodeforHawaii/froide,catcosmo/froide,ryankanno/froide,fin/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,catcosmo/froide,LilithWittmann/froide,fin/froide,fin/froide,ryankanno/froide,catcosmo/froide,okfse/froide,ryankanno/froide,catcosmo/froide,stefanw/froide,CodeforHawaii/froide,LilithWittmann/froide,CodeforHawaii/froide,stefanw/froide,ryankanno/froide,LilithWittmann/froide,okfse/froide
froide/account/forms.py
froide/account/forms.py
from django import forms from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.contrib.auth.models import User from helper.widgets import EmailInput class NewUserForm(forms.Form): first_name = forms.CharField(max_length=30, label=_('First name'), widget=forms.TextInput(attrs={'placeholder': _('First Name'), 'class': 'inline'})) last_name = forms.CharField(max_length=30, label=_('Last name'), widget=forms.TextInput(attrs={'placeholder': _('Last Name'), 'class': 'inline'})) user_email = forms.EmailField(label=_('Email address'), widget=EmailInput(attrs={'placeholder': _('mail@ddress.net')})) def clean_first_name(self): return self.cleaned_data['first_name'].strip() def clean_last_name(self): return self.cleaned_data['last_name'].strip() def clean_user_email(self): email = self.cleaned_data['user_email'] try: User.objects.get(email=email) except User.DoesNotExist: pass else: raise forms.ValidationError(mark_safe( _('This email address already has an account. <a href="%s?simple" class="target-small">Please login using that email address.</a>') % reverse("account-login"))) return email class NewUserWithPasswordForm(NewUserForm): password = forms.CharField(widget=forms.PasswordInput, label=_('Password')) password2 = forms.CharField(widget=forms.PasswordInput, label=_('Password (repeat)')) def clean(self): cleaned = super(NewUserWithPasswordForm, self).clean() if cleaned['password'] != cleaned['password2']: raise forms.ValidationError(_("Passwords do not match!")) return cleaned class UserLoginForm(forms.Form): email = forms.EmailField(widget=EmailInput( attrs={'placeholder': _('mail@ddress.net')}), label=_('Email address')) password = forms.CharField(widget=forms.PasswordInput, label=_('Password'))
from django import forms from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.contrib.auth.models import User from helper.widgets import EmailInput class NewUserForm(forms.Form): first_name = forms.CharField(max_length=30, widget=forms.TextInput(attrs={'placeholder': _('First Name'), 'class': 'inline'})) last_name = forms.CharField(max_length=30, widget=forms.TextInput( attrs={'placeholder': _('Last Name'), 'class': 'inline'})) user_email = forms.EmailField(widget=EmailInput( attrs={'placeholder': _('mail@ddress.net')})) def clean_first_name(self): return self.cleaned_data['first_name'].strip() def clean_last_name(self): return self.cleaned_data['last_name'].strip() def clean_user_email(self): email = self.cleaned_data['user_email'] try: User.objects.get(email=email) except User.DoesNotExist: pass else: raise forms.ValidationError(mark_safe( _('This email address already has an account. <a href="%s?simple" class="target-small">Please login using that email address.</a>') % reverse("account-login"))) return email class UserLoginForm(forms.Form): email = forms.EmailField(widget=EmailInput( attrs={'placeholder': _('mail@ddress.net')})) password = forms.CharField(widget=forms.PasswordInput)
mit
Python
2a5fbcd2e3da01150c2690c145100270d3f0ec81
fix clipnorm
icoxfog417/tying-wv-and-wc
model/lang_model_sgd.py
model/lang_model_sgd.py
import copy import numpy as np import tensorflow as tf from keras import backend as K from keras.optimizers import Optimizer from keras.callbacks import LearningRateScheduler from model.setting import Setting class LangModelSGD(Optimizer): def __init__(self, setting, verbose=True): super(LangModelSGD, self).__init__(clipnorm=setting.norm_clipping) self.iterations = K.variable(0., name="iterations") self.lr = K.variable(1.0, name="lr") self.epoch_interval = K.variable(setting.epoch_interval) self.decay = K.variable(setting.decay) self.verbose = verbose def get_updates(self, params, constraints, loss): grads = self.get_gradients(loss, params) self.updates = [] self.updates.append(K.update_add(self.iterations, 1)) for p, g in zip(params, grads): self.updates.append((p, p - self.lr * g)) return self.updates def get_config(self): config = {"iterations": float(K.get_value(self.iterations)), "lr": float(K.get_value(self.lr)) } base_config = super(LangModelSGD, self).get_config() return dict(list(base_config.items()) + list(config.items())) def get_lr_scheduler(self): def scheduler(epoch): epoch_interval = K.get_value(self.epoch_interval) if epoch != 0 and (epoch + 1) % epoch_interval == 0: lr = K.get_value(self.lr) decay = K.get_value(self.decay) K.set_value(self.lr, lr * decay) if self.verbose: print(self.get_config()) return K.get_value(self.lr) return LearningRateScheduler(scheduler)
import copy import numpy as np import tensorflow as tf from keras import backend as K from keras.optimizers import Optimizer from keras.callbacks import LearningRateScheduler from model.setting import Setting class LangModelSGD(Optimizer): def __init__(self, setting, verbose=True): super(LangModelSGD, self).__init__() self.iterations = K.variable(0., name="iterations") self.lr = K.variable(1.0, name="lr") self.epoch_interval = K.variable(setting.epoch_interval) self.decay = K.variable(setting.decay) self._clipnorm = setting.norm_clipping self.verbose = verbose def get_updates(self, params, constraints, loss): grads = self.get_gradients(loss, params) norm = K.sqrt(sum([K.sum(K.square(g)) for g in grads])) grads = [clip_norm(g, self._clipnorm, norm) for g in grads] self.updates = [] self.updates.append(K.update_add(self.iterations, 1)) for p, g in zip(params, grads): self.updates.append((p, p - self.lr * g)) return self.updates def get_config(self): config = {"iterations": float(K.get_value(self.iterations)), "lr": float(K.get_value(self.lr)) } base_config = super(LangModelSGD, self).get_config() return dict(list(base_config.items()) + list(config.items())) def get_lr_scheduler(self): def scheduler(epoch): epoch_interval = K.get_value(self.epoch_interval) if epoch != 0 and (epoch + 1) % epoch_interval == 0: lr = K.get_value(self.lr) decay = K.get_value(self.decay) K.set_value(self.lr, lr * decay) if self.verbose: print(self.get_config()) return K.get_value(self.lr) return LearningRateScheduler(scheduler)
mit
Python
db3cee63baf64d00b2d2ac4fcf726f287b6d7af2
Update call to proxy fix to use new method signature
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
app/proxy_fix.py
app/proxy_fix.py
from werkzeug.middleware.proxy_fix import ProxyFix class CustomProxyFix(object): def __init__(self, app, forwarded_proto): self.app = ProxyFix(app, x_for=1, x_proto=1, x_host=1, x_port=0, x_prefix=0) self.forwarded_proto = forwarded_proto def __call__(self, environ, start_response): environ.update({ "HTTP_X_FORWARDED_PROTO": self.forwarded_proto }) return self.app(environ, start_response) def init_app(app): app.wsgi_app = CustomProxyFix(app.wsgi_app, app.config.get('HTTP_PROTOCOL', 'http'))
from werkzeug.middleware.proxy_fix import ProxyFix class CustomProxyFix(object): def __init__(self, app, forwarded_proto): self.app = ProxyFix(app) self.forwarded_proto = forwarded_proto def __call__(self, environ, start_response): environ.update({ "HTTP_X_FORWARDED_PROTO": self.forwarded_proto }) return self.app(environ, start_response) def init_app(app): app.wsgi_app = CustomProxyFix(app.wsgi_app, app.config.get('HTTP_PROTOCOL', 'http'))
mit
Python
d56cfbf87c01ac496200341a723ddcee88798a01
Add setup of default translator object so doctests can run when using _. Fixes #509.
Pylons/pylons,Pylons/pylons,Pylons/pylons,moreati/pylons,moreati/pylons,moreati/pylons
pylons/test.py
pylons/test.py
"""Test related functionality Adds a Pylons plugin to `nose <http://www.somethingaboutorange.com/mrl/projects/nose/>`_ that loads the Pylons app *before* scanning for doc tests. This can be configured in the projects :file:`setup.cfg` under a ``[nosetests]`` block: .. code-block:: ini [nosetests] with-pylons=development.ini Alternate ini files may be specified if the app should be loaded using a different configuration. """ import os import sys import nose.plugins import pkg_resources from paste.deploy import loadapp import pylons from pylons.i18n.translation import _get_translator pylonsapp = None class PylonsPlugin(nose.plugins.Plugin): """Nose plugin extension For use with nose to allow a project to be configured before nose proceeds to scan the project for doc tests and unit tests. This prevents modules from being loaded without a configured Pylons environment. """ enabled = False enableOpt = 'pylons_config' name = 'pylons' def add_options(self, parser, env=os.environ): """Add command-line options for this plugin""" env_opt = 'NOSE_WITH_%s' % self.name.upper() env_opt.replace('-', '_') parser.add_option("--with-%s" % self.name, dest=self.enableOpt, type="string", default="", help="Setup Pylons environment with the config file" " specified by ATTR [NOSE_ATTR]") def configure(self, options, conf): """Configure the plugin""" self.config_file = None self.conf = conf if hasattr(options, self.enableOpt): self.enabled = bool(getattr(options, self.enableOpt)) self.config_file = getattr(options, self.enableOpt) def begin(self): """Called before any tests are collected or run Loads the application, and in turn its configuration. """ global pylonsapp path = os.getcwd() sys.path.insert(0, path) pkg_resources.working_set.add_entry(path) self.app = pylonsapp = loadapp('config:' + self.config_file, relative_to=path) # For tests that utilize the i18n _ object, initialize a NullTranslator pylons.translator._push_object(_get_translator(pylons.config.get('lang')))
"""Test related functionality Adds a Pylons plugin to `nose <http://www.somethingaboutorange.com/mrl/projects/nose/>`_ that loads the Pylons app *before* scanning for doc tests. This can be configured in the projects :file:`setup.cfg` under a ``[nosetests]`` block: .. code-block:: ini [nosetests] with-pylons=development.ini Alternate ini files may be specified if the app should be loaded using a different configuration. """ import os import sys import nose.plugins import pkg_resources from paste.deploy import loadapp import pylons from pylons.i18n.translation import _get_translator pylonsapp = None class PylonsPlugin(nose.plugins.Plugin): """Nose plugin extension For use with nose to allow a project to be configured before nose proceeds to scan the project for doc tests and unit tests. This prevents modules from being loaded without a configured Pylons environment. """ enabled = False enableOpt = 'pylons_config' name = 'pylons' def add_options(self, parser, env=os.environ): """Add command-line options for this plugin""" env_opt = 'NOSE_WITH_%s' % self.name.upper() env_opt.replace('-', '_') parser.add_option("--with-%s" % self.name, dest=self.enableOpt, type="string", default="", help="Setup Pylons environment with the config file" " specified by ATTR [NOSE_ATTR]") def configure(self, options, conf): """Configure the plugin""" self.config_file = None self.conf = conf if hasattr(options, self.enableOpt): self.enabled = bool(getattr(options, self.enableOpt)) self.config_file = getattr(options, self.enableOpt) def begin(self): """Called before any tests are collected or run Loads the application, and in turn its configuration. """ global pylonsapp path = os.getcwd() sys.path.insert(0, path) pkg_resources.working_set.add_entry(path) self.app = pylonsapp = loadapp('config:' + self.config_file, relative_to=path)
bsd-3-clause
Python
747d2563fd566a70420a04d3db209fffc813f147
fix docs/hash-tree.py for python 3
dougalsutherland/skl-groups,dougalsutherland/skl-groups
docs/hash-tree.py
docs/hash-tree.py
#!/usr/bin/env python # Write a directory to the Git index. # Prints the directory's SHA-1 to stdout. # # Copyright 2013 Lars Buitinck / University of Amsterdam. # License: MIT (http://opensource.org/licenses/MIT) # Based on: # https://github.com/larsmans/seqlearn/blob/d7a3d82c/doc/hash-tree.py import os from os.path import split from posixpath import join from subprocess import check_output, Popen, PIPE import sys def hash_file(path): """Write file at path to Git index, return its SHA1 as a string.""" return check_output(["git", "hash-object", "-w", "--", path]).decode().strip() def _lstree(files, dirs): """Make git ls-tree like output.""" for f, sha1 in files: yield "100644 blob {}\t{}\0".format(sha1, f) for d, sha1 in dirs: yield "040000 tree {}\t{}\0".format(sha1, d) def _mktree(files, dirs): mkt = Popen(["git", "mktree", "-z"], stdin=PIPE, stdout=PIPE) inp = "".join(_lstree(files, dirs)).encode('ascii') return mkt.communicate(inp)[0].strip().decode() def hash_dir(path): """Write directory at path to Git index, return its SHA1 as a string.""" dir_hash = {} for root, dirs, files in os.walk(path, topdown=False): f_hash = ((f, hash_file(join(root, f))) for f in files) d_hash = ((d, dir_hash[join(root, d)]) for d in dirs) # split+join normalizes paths on Windows (note the imports) dir_hash[join(*split(root))] = _mktree(f_hash, d_hash) return dir_hash[path] if __name__ == "__main__": print(hash_dir(sys.argv[1]))
#!/usr/bin/env python # Write a directory to the Git index. # Prints the directory's SHA-1 to stdout. # # Copyright 2013 Lars Buitinck / University of Amsterdam. # License: MIT (http://opensource.org/licenses/MIT) # https://github.com/larsmans/seqlearn/blob/d7a3d82c/doc/hash-tree.py import os from os.path import split from posixpath import join from subprocess import check_output, Popen, PIPE import sys def hash_file(path): """Write file at path to Git index, return its SHA1 as a string.""" return check_output(["git", "hash-object", "-w", "--", path]).strip() def _lstree(files, dirs): """Make git ls-tree like output.""" for f, sha1 in files: yield "100644 blob {}\t{}\0".format(sha1, f) for d, sha1 in dirs: yield "040000 tree {}\t{}\0".format(sha1, d) def _mktree(files, dirs): mkt = Popen(["git", "mktree", "-z"], stdin=PIPE, stdout=PIPE) return mkt.communicate("".join(_lstree(files, dirs)))[0].strip() def hash_dir(path): """Write directory at path to Git index, return its SHA1 as a string.""" dir_hash = {} for root, dirs, files in os.walk(path, topdown=False): f_hash = ((f, hash_file(join(root, f))) for f in files) d_hash = ((d, dir_hash[join(root, d)]) for d in dirs) # split+join normalizes paths on Windows (note the imports) dir_hash[join(*split(root))] = _mktree(f_hash, d_hash) return dir_hash[path] if __name__ == "__main__": print(hash_dir(sys.argv[1]))
bsd-3-clause
Python
34c0c6c73a65da3120aa52600254afc909e9a3bc
Remove unused main and unused imports
gotling/PyTach,gotling/PyTach,gotling/PyTach
pytach/wsgi.py
pytach/wsgi.py
import bottle import config from web import web app = application = bottle.Bottle() app.merge(web.app) config.arguments['--verbose'] = True
import bottle from bottle import route, run from web import web import config app = application = bottle.Bottle() app.merge(web.app) config.arguments['--verbose'] = True if __name__ == '__main__': app.run(host='0.0.0.0', port=8082, debug=True)
mit
Python
bea0ead3dfcc055d219966c64437652c0eb2cf84
Update demo.py
Kaceykaso/design_by_roomba,Kaceykaso/design_by_roomba
python/demo.py
python/demo.py
#! /usr/bin/env python import serial import time # Serial port N = "/dev/ttyUSB0" def ints2str(lst): ''' Taking a list of notes/lengths, convert it to a string ''' s = "" for i in lst: if i < 0 or i > 255: raise Exception s = s + str(chr(i)) return s # do some initialization magic s = serial.Serial(N, 57600, timeout=4) # start code s.write(ints2str([128])) # Full mode s.write(ints2str([132])) # Drive s.write(ints2str([137, 1, 44, 128, 0])) # wait s.write(ints2str([156, 1, 144])) # Turn s.write(ints2str([137, 1, 44, 0, 1])) #wait s.write(ints2str([157, 0, 90])) quit()
#! /usr/bin/env python import serial import time import sys # Serial port N = "/dev/ttyUSB0" def ints2str(lst): ''' Taking a list of notes/lengths, convert it to a string ''' s = "" for i in lst: if i < 0 or i > 255: raise Exception s = s + str(chr(i)) return s # do some initialization magic s = serial.Serial(N, 57600, timeout=4) # start code s.write(ints2str([128])) # Full mode s.write(ints2str([132])) # Drive s.write(ints2str([137, 1, 44, 128, 0])) # wait s.write(ints2str([156, 1, 144])) # Turn s.write(ints2str([137, 1, 44, 0, 1])) #wait s.write(ints2str([157, 0, 90])) sys.exit()
mit
Python
1ca6ccb50992836720e86a7c3c766a5497cf7588
Remove unused import
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
mint/django_rest/rbuilder/querysets/views.py
mint/django_rest/rbuilder/querysets/views.py
#!/usr/bin/python # # Copyright (c) 2011 rPath, Inc. # # All rights reserved. # from mint.django_rest.deco import return_xml, requires from mint.django_rest.rbuilder import service class BaseQuerySetService(service.BaseService): pass class QuerySetService(BaseQuerySetService): @return_xml def rest_GET(self, request, querySetId=None): return self.get(querySetId) def get(self, querySetId): if querySetId: return self.mgr.getQuerySet(querySetId) else: return self.mgr.getQuerySets() @requires('query_set') @return_xml def rest_POST(self, request, query_set): return self.mgr.addQuerySet(query_set)
#!/usr/bin/python # # Copyright (c) 2011 rPath, Inc. # # All rights reserved. # from mint.django_rest.deco import return_xml, requires from mint.django_rest.rbuilder import service from mint.django_rest.rbuilder.querysets import manager class BaseQuerySetService(service.BaseService): pass class QuerySetService(BaseQuerySetService): @return_xml def rest_GET(self, request, querySetId=None): return self.get(querySetId) def get(self, querySetId): if querySetId: return self.mgr.getQuerySet(querySetId) else: return self.mgr.getQuerySets() @requires('query_set') @return_xml def rest_POST(self, request, query_set): return self.mgr.addQuerySet(query_set)
apache-2.0
Python
d86bdec5d7d57fe74cb463e391798bd1e5be87ff
Update Ghana code to match current Pombola
geoffkilpin/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola
pombola/ghana/urls.py
pombola/ghana/urls.py
from django.conf.urls import patterns, url, include from django.views.generic import TemplateView from .views import data_upload, info_page_upload urlpatterns = patterns('', url(r'^intro$', TemplateView.as_view(template_name='intro.html')), url(r'^data/upload/mps/$', data_upload, name='data_upload'), url(r'^data/upload/info-page/$', info_page_upload, name='info_page_upload'), url('', include('django.contrib.auth.urls')), )
from django.conf.urls import patterns, include, url, handler404 from django.views.generic import TemplateView import django.contrib.auth.views from .views import data_upload, info_page_upload urlpatterns = patterns('', url(r'^intro$', TemplateView.as_view(template_name='intro.html')), url(r'^data/upload/mps/$', data_upload, name='data_upload'), url(r'^data/upload/info-page/$', info_page_upload, name='info_page_upload'), #auth views url(r'^accounts/login$', django.contrib.auth.views.login, name='login'), url(r'^accounts/logut$', django.contrib.auth.views.logout, name='logout'), #url(r'^accounts/register$', registration.backends.simple.urls, name='register'), )
agpl-3.0
Python
9b75fd09220e61fd511c99e63f8d2b30e6a0f868
stop using deprecated assertEquals()
rholder/csv2es
test_csv2es.py
test_csv2es.py
## Copyright 2015 Ray Holder ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. import csv2es import unittest class TestDelimiter(unittest.TestCase): def test_sanitize(self): self.assertEqual(None, csv2es.sanitize_delimiter(None, False)) self.assertEqual(str('\t'), csv2es.sanitize_delimiter(None, True)) self.assertEqual(str('|'), csv2es.sanitize_delimiter('|', False)) self.assertEqual(str('|'), csv2es.sanitize_delimiter(u'|', False)) self.assertEqual(str('\t'), csv2es.sanitize_delimiter('|', True)) self.assertEqual(str('\t'), csv2es.sanitize_delimiter('||', True)) self.assertRaises(Exception, csv2es.sanitize_delimiter, '||', False) class TestLoading(unittest.TestCase): def test_csv(self): # TODO fill this in self.assertTrue(True) def test_tsv(self): # TODO fill this in self.assertTrue(True) if __name__ == '__main__': unittest.main()
## Copyright 2015 Ray Holder ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. import csv2es import unittest class TestDelimiter(unittest.TestCase): def test_sanitize(self): self.assertEquals(None, csv2es.sanitize_delimiter(None, False)) self.assertEquals(str('\t'), csv2es.sanitize_delimiter(None, True)) self.assertEquals(str('|'), csv2es.sanitize_delimiter('|', False)) self.assertEquals(str('|'), csv2es.sanitize_delimiter(u'|', False)) self.assertEquals(str('\t'), csv2es.sanitize_delimiter('|', True)) self.assertEquals(str('\t'), csv2es.sanitize_delimiter('||', True)) self.assertRaises(Exception, csv2es.sanitize_delimiter, '||', False) class TestLoading(unittest.TestCase): def test_csv(self): # TODO fill this in self.assertTrue(True) def test_tsv(self): # TODO fill this in self.assertTrue(True) if __name__ == '__main__': unittest.main()
apache-2.0
Python
97a8a349d26b364e57aaac6f8d920770810aa8d8
Correct localized strings
looker/sentry,1tush/sentry,wong2/sentry,alexm92/sentry,jean/sentry,pauloschilling/sentry,SilentCircle/sentry,Kryz/sentry,mvaled/sentry,pauloschilling/sentry,BuildingLink/sentry,JTCunning/sentry,JamesMura/sentry,kevinlondon/sentry,alexm92/sentry,korealerts1/sentry,JTCunning/sentry,zenefits/sentry,JamesMura/sentry,mitsuhiko/sentry,drcapulet/sentry,wujuguang/sentry,mvaled/sentry,SilentCircle/sentry,JackDanger/sentry,daevaorn/sentry,mvaled/sentry,BayanGroup/sentry,hongliang5623/sentry,jokey2k/sentry,ewdurbin/sentry,vperron/sentry,kevinastone/sentry,BuildingLink/sentry,llonchj/sentry,TedaLIEz/sentry,felixbuenemann/sentry,drcapulet/sentry,Natim/sentry,fuziontech/sentry,rdio/sentry,jean/sentry,argonemyth/sentry,beni55/sentry,beeftornado/sentry,gencer/sentry,gg7/sentry,rdio/sentry,boneyao/sentry,ngonzalvez/sentry,JTCunning/sentry,imankulov/sentry,alexm92/sentry,fotinakis/sentry,gg7/sentry,zenefits/sentry,camilonova/sentry,kevinlondon/sentry,vperron/sentry,mvaled/sentry,NickPresta/sentry,ngonzalvez/sentry,daevaorn/sentry,beeftornado/sentry,argonemyth/sentry,imankulov/sentry,camilonova/sentry,looker/sentry,jean/sentry,nicholasserra/sentry,felixbuenemann/sentry,BuildingLink/sentry,looker/sentry,jokey2k/sentry,fotinakis/sentry,BayanGroup/sentry,TedaLIEz/sentry,rdio/sentry,daevaorn/sentry,felixbuenemann/sentry,zenefits/sentry,SilentCircle/sentry,argonemyth/sentry,zenefits/sentry,JamesMura/sentry,daevaorn/sentry,Natim/sentry,pauloschilling/sentry,JamesMura/sentry,vperron/sentry,1tush/sentry,ifduyue/sentry,gencer/sentry,kevinastone/sentry,NickPresta/sentry,beni55/sentry,JackDanger/sentry,wujuguang/sentry,ewdurbin/sentry,mvaled/sentry,beni55/sentry,fuziontech/sentry,drcapulet/sentry,nicholasserra/sentry,rdio/sentry,llonchj/sentry,BuildingLink/sentry,hongliang5623/sentry,fuziontech/sentry,jean/sentry,NickPresta/sentry,wong2/sentry,llonchj/sentry,ifduyue/sentry,mitsuhiko/sentry,gg7/sentry,looker/sentry,wong2/sentry,gencer/sentry,boneyao/sentry,songyi199111/sentry,BayanGroup/sentry,gencer/sentry,korealerts1/sentry,korealerts1/sentry,hongliang5623/sentry,Natim/sentry,imankulov/sentry,ifduyue/sentry,mvaled/sentry,beeftornado/sentry,SilentCircle/sentry,JamesMura/sentry,boneyao/sentry,fotinakis/sentry,ifduyue/sentry,ewdurbin/sentry,NickPresta/sentry,fotinakis/sentry,nicholasserra/sentry,kevinastone/sentry,camilonova/sentry,Kryz/sentry,JackDanger/sentry,Kryz/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,1tush/sentry,looker/sentry,jean/sentry,wujuguang/sentry,ifduyue/sentry,jokey2k/sentry,kevinlondon/sentry,songyi199111/sentry,TedaLIEz/sentry,songyi199111/sentry,ngonzalvez/sentry
src/sentry/constants.py
src/sentry/constants.py
""" sentry.constants ~~~~~~~~~~~~~~~~ These settings act as the default (base) settings for the Sentry-provided web-server :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from django.utils.datastructures import SortedDict from django.utils.translation import ugettext_lazy as _ SORT_OPTIONS = SortedDict(( ('priority', _('Priority')), ('date', _('Last Seen')), ('new', _('First Seen')), ('freq', _('Frequency')), ('tottime', _('Total Time Spent')), ('avgtime', _('Average Time Spent')), ('accel_15', _('Trending: %(minutes)d minutes' % {'minutes': 15})), ('accel_60', _('Trending: %(minutes)d minutes' % {'minutes': 60})), )) SORT_CLAUSES = { 'priority': 'sentry_groupedmessage.score', 'date': 'EXTRACT(EPOCH FROM sentry_groupedmessage.last_seen)', 'new': 'EXTRACT(EPOCH FROM sentry_groupedmessage.first_seen)', 'freq': 'sentry_groupedmessage.times_seen', 'tottime': 'sentry_groupedmessage.time_spent_total', 'avgtime': '(sentry_groupedmessage.time_spent_total / sentry_groupedmessage.time_spent_count)', } SCORE_CLAUSES = SORT_CLAUSES.copy() SQLITE_SORT_CLAUSES = SORT_CLAUSES.copy() SQLITE_SORT_CLAUSES.update({ 'date': 'sentry_groupedmessage.last_seen', 'new': 'sentry_groupedmessage.first_seen', }) SQLITE_SCORE_CLAUSES = SQLITE_SORT_CLAUSES.copy() MYSQL_SORT_CLAUSES = SORT_CLAUSES.copy() MYSQL_SORT_CLAUSES.update({ 'date': 'sentry_groupedmessage.last_seen', 'new': 'sentry_groupedmessage.first_seen', }) MYSQL_SCORE_CLAUSES = SCORE_CLAUSES.copy() MYSQL_SCORE_CLAUSES.update({ 'date': 'UNIX_TIMESTAMP(sentry_groupedmessage.last_seen)', 'new': 'UNIX_TIMESTAMP(sentry_groupedmessage.first_seen)', }) SEARCH_SORT_OPTIONS = SortedDict(( ('score', _('Score')), ('date', _('Last Seen')), ('new', _('First Seen')), )) STATUS_UNRESOLVED = 0 STATUS_RESOLVED = 1 STATUS_MUTED = 2 STATUS_LEVELS = ( (STATUS_UNRESOLVED, _('Unresolved')), (STATUS_RESOLVED, _('Resolved')), (STATUS_MUTED, _('Muted')), ) MEMBER_OWNER = 0 MEMBER_USER = 50 MEMBER_SYSTEM = 100 MEMBER_TYPES = ( (MEMBER_OWNER, _('Admin')), (MEMBER_USER, _('User')), (MEMBER_SYSTEM, _('System Agent')), )
""" sentry.constants ~~~~~~~~~~~~~~~~ These settings act as the default (base) settings for the Sentry-provided web-server :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from django.utils.datastructures import SortedDict from django.utils.translation import ugettext_lazy as _ SORT_OPTIONS = SortedDict(( ('priority', _('Priority')), ('date', _('Last Seen')), ('new', _('First Seen')), ('freq', _('Frequency')), ('tottime', _('Total Time Spent')), ('avgtime', _('Average Time Spent')), ('accel_15', _('Trending: %(minutes)d minutes' % {'minutes': 15})), ('accel_60', _('Trending: %(minutes)d minutes' % {'minutes': 60})), )) SORT_CLAUSES = { 'priority': 'sentry_groupedmessage.score', 'date': 'EXTRACT(EPOCH FROM sentry_groupedmessage.last_seen)', 'new': 'EXTRACT(EPOCH FROM sentry_groupedmessage.first_seen)', 'freq': 'sentry_groupedmessage.times_seen', 'tottime': 'sentry_groupedmessage.time_spent_total', 'avgtime': '(sentry_groupedmessage.time_spent_total / sentry_groupedmessage.time_spent_count)', } SCORE_CLAUSES = SORT_CLAUSES.copy() SQLITE_SORT_CLAUSES = SORT_CLAUSES.copy() SQLITE_SORT_CLAUSES.update({ 'date': 'sentry_groupedmessage.last_seen', 'new': 'sentry_groupedmessage.first_seen', }) SQLITE_SCORE_CLAUSES = SQLITE_SORT_CLAUSES.copy() MYSQL_SORT_CLAUSES = SORT_CLAUSES.copy() MYSQL_SORT_CLAUSES.update({ 'date': 'sentry_groupedmessage.last_seen', 'new': 'sentry_groupedmessage.first_seen', }) MYSQL_SCORE_CLAUSES = SCORE_CLAUSES.copy() MYSQL_SCORE_CLAUSES.update({ 'date': 'UNIX_TIMESTAMP(sentry_groupedmessage.last_seen)', 'new': 'UNIX_TIMESTAMP(sentry_groupedmessage.first_seen)', }) SEARCH_SORT_OPTIONS = SortedDict(( ('score', _('Score')), ('date', _('Last Seen')), ('new', _('First Seen')), )) STATUS_UNRESOLVED = 0 STATUS_RESOLVED = 1 STATUS_MUTED = 2 STATUS_LEVELS = ( (STATUS_UNRESOLVED, _('unresolved')), (STATUS_RESOLVED, _('resolved')), (STATUS_MUTED, _('muted')), ) MEMBER_OWNER = 0 MEMBER_USER = 50 MEMBER_SYSTEM = 100 MEMBER_TYPES = ( (MEMBER_OWNER, _('admin')), (MEMBER_USER, _('user')), (MEMBER_SYSTEM, _('system agent')), )
bsd-3-clause
Python
ca2a6d06f09f5f2d511d6cf676fdd9a8f6c411cf
remove cruft, bump heroku
dev-coop/brains,dev-coop/brains
src/settings/production.py
src/settings/production.py
from base import * DEBUG = False ALLOWED_HOSTS = ["*"] SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "afaefawe23af") assert SECRET_KEY, "Set your DJANGO_SECRET_KEY env var" # Celery BROKER_URL = os.environ.get('CLOUDAMQP_URL', None) #assert BROKER_URL, "Celery BROKER_URL env var missing!" # Memcached CACHES = { 'default': { 'BACKEND': 'django_bmemcached.memcached.BMemcached', 'LOCATION': os.environ.get('MEMCACHEDCLOUD_SERVERS', '').split(','), 'OPTIONS': { 'username': os.environ.get('MEMCACHEDCLOUD_USERNAME'), 'password': os.environ.get('MEMCACHEDCLOUD_PASSWORD') } } }
from base import * DEBUG = False ALLOWED_HOSTS = ["*"] SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "afaefawe23af") assert SECRET_KEY, "Set your DJANGO_SECRET_KEY env var" # Celery BROKER_URL = os.environ.get('CLOUDAMQP_URL', None) # BROKER_URL = os.environ.get("RABBITMQ_BIGWIG_URL", None) #assert BROKER_URL, "Celery BROKER_URL env var missing!" # Memcached CACHES = { 'default': { 'BACKEND': 'django_bmemcached.memcached.BMemcached', 'LOCATION': os.environ.get('MEMCACHEDCLOUD_SERVERS', '').split(','), 'OPTIONS': { 'username': os.environ.get('MEMCACHEDCLOUD_USERNAME'), 'password': os.environ.get('MEMCACHEDCLOUD_PASSWORD') } } }
mit
Python
5526f8e3dca2f84fce34df5a134bada8479a2f69
Fix dumpdata ordering for VRFs
digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox
netbox/ipam/models/__init__.py
netbox/ipam/models/__init__.py
# Ensure that VRFs are imported before IPs/prefixes so dumpdata & loaddata work correctly from .fhrp import * from .vrfs import * from .ip import * from .services import * from .vlans import * __all__ = ( 'ASN', 'Aggregate', 'IPAddress', 'IPRange', 'FHRPGroup', 'FHRPGroupAssignment', 'Prefix', 'RIR', 'Role', 'RouteTarget', 'Service', 'ServiceTemplate', 'VLAN', 'VLANGroup', 'VRF', )
from .fhrp import * from .ip import * from .services import * from .vlans import * from .vrfs import * __all__ = ( 'ASN', 'Aggregate', 'IPAddress', 'IPRange', 'FHRPGroup', 'FHRPGroupAssignment', 'Prefix', 'RIR', 'Role', 'RouteTarget', 'Service', 'ServiceTemplate', 'VLAN', 'VLANGroup', 'VRF', )
apache-2.0
Python
fe0691595eea7197db07f3505446e1553df3d188
Bump version number after merging pull request.
cmbruns/pyopenvr,cmbruns/pyopenvr,cmbruns/pyopenvr,cmbruns/pyopenvr
src/openvr/version.py
src/openvr/version.py
# Store the version here so: # 1) we don't load dependencies by storing it in __init__.py # 2) we can import it in setup.py for the same reason # 3) we can import it into your module module # http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.0.0602a'
# Store the version here so: # 1) we don't load dependencies by storing it in __init__.py # 2) we can import it in setup.py for the same reason # 3) we can import it into your module module # http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.0.0601'
bsd-3-clause
Python
4e74ba40f442dd27ddd29464b518c2a06ad1019a
Bump version
machtfit/django-oscar,machtfit/django-oscar,machtfit/django-oscar
src/oscar/__init__.py
src/oscar/__init__.py
import os # Use 'dev', 'beta', or 'final' as the 4th element to indicate release type. VERSION = (1, 0, 1, 'machtfit', 22) def get_short_version(): return '%s.%s' % (VERSION[0], VERSION[1]) def get_version(): return '{}.{}.{}-{}-{}'.format(*VERSION) # Cheeky setting that allows each template to be accessible by two paths. # Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both # 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be # extended by templates with the same filename OSCAR_MAIN_TEMPLATE_DIR = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'templates/oscar') OSCAR_CORE_APPS = [ 'oscar', 'oscar.apps.analytics', 'oscar.apps.checkout', 'oscar.apps.address', 'oscar.apps.shipping', 'oscar.apps.catalogue', 'oscar.apps.catalogue.reviews', 'oscar.apps.partner', 'oscar.apps.basket', 'oscar.apps.payment', 'oscar.apps.offer', 'oscar.apps.order', 'oscar.apps.customer', 'oscar.apps.promotions', 'oscar.apps.voucher', 'oscar.apps.wishlists', 'oscar.apps.dashboard', 'oscar.apps.dashboard.reports', 'oscar.apps.dashboard.users', 'oscar.apps.dashboard.orders', 'oscar.apps.dashboard.promotions', 'oscar.apps.dashboard.catalogue', 'oscar.apps.dashboard.offers', 'oscar.apps.dashboard.partners', 'oscar.apps.dashboard.pages', 'oscar.apps.dashboard.ranges', 'oscar.apps.dashboard.reviews', 'oscar.apps.dashboard.vouchers', 'oscar.apps.dashboard.communications', # 3rd-party apps that oscar depends on 'treebeard', 'sorl.thumbnail', 'django_tables2', ] def get_core_apps(overrides=None): """ Return a list of oscar's apps amended with any passed overrides """ if not overrides: return OSCAR_CORE_APPS def get_app_label(app_label, overrides): pattern = app_label.replace('oscar.apps.', '') for override in overrides: if override.endswith(pattern): if 'dashboard' in override and 'dashboard' not in pattern: continue return override return app_label apps = [] for app_label in OSCAR_CORE_APPS: apps.append(get_app_label(app_label, overrides)) return apps
import os # Use 'dev', 'beta', or 'final' as the 4th element to indicate release type. VERSION = (1, 0, 1, 'machtfit', 21) def get_short_version(): return '%s.%s' % (VERSION[0], VERSION[1]) def get_version(): return '{}.{}.{}-{}-{}'.format(*VERSION) # Cheeky setting that allows each template to be accessible by two paths. # Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both # 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be # extended by templates with the same filename OSCAR_MAIN_TEMPLATE_DIR = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'templates/oscar') OSCAR_CORE_APPS = [ 'oscar', 'oscar.apps.analytics', 'oscar.apps.checkout', 'oscar.apps.address', 'oscar.apps.shipping', 'oscar.apps.catalogue', 'oscar.apps.catalogue.reviews', 'oscar.apps.partner', 'oscar.apps.basket', 'oscar.apps.payment', 'oscar.apps.offer', 'oscar.apps.order', 'oscar.apps.customer', 'oscar.apps.promotions', 'oscar.apps.voucher', 'oscar.apps.wishlists', 'oscar.apps.dashboard', 'oscar.apps.dashboard.reports', 'oscar.apps.dashboard.users', 'oscar.apps.dashboard.orders', 'oscar.apps.dashboard.promotions', 'oscar.apps.dashboard.catalogue', 'oscar.apps.dashboard.offers', 'oscar.apps.dashboard.partners', 'oscar.apps.dashboard.pages', 'oscar.apps.dashboard.ranges', 'oscar.apps.dashboard.reviews', 'oscar.apps.dashboard.vouchers', 'oscar.apps.dashboard.communications', # 3rd-party apps that oscar depends on 'treebeard', 'sorl.thumbnail', 'django_tables2', ] def get_core_apps(overrides=None): """ Return a list of oscar's apps amended with any passed overrides """ if not overrides: return OSCAR_CORE_APPS def get_app_label(app_label, overrides): pattern = app_label.replace('oscar.apps.', '') for override in overrides: if override.endswith(pattern): if 'dashboard' in override and 'dashboard' not in pattern: continue return override return app_label apps = [] for app_label in OSCAR_CORE_APPS: apps.append(get_app_label(app_label, overrides)) return apps
bsd-3-clause
Python
aa7bbd84fa16105417ceb7f9e06d392a4e54fdc6
Remove unused import
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
salt/beacons/twilio_txt_msg.py
salt/beacons/twilio_txt_msg.py
# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. .. code-block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" interval: 10 ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() ret.append(output) return ret
# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. .. code-block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" interval: 10 ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() ret.append(output) return ret
apache-2.0
Python
c340c1b92a3d82a25ce2e43b19603ee58de0b146
Improve celery logging
keaneokelley/home,keaneokelley/home,keaneokelley/home
home/core/async.py
home/core/async.py
""" async.py ~~~~~~~~ Handles running of tasks in an asynchronous fashion. Not explicitly tied to Celery. The `run` method simply must exist here and handle the execution of whatever task is passed to it, whether or not it is handled asynchronously. """ from apscheduler.schedulers.background import BackgroundScheduler from celery import Celery from celery.security import setup_security from celery.utils.log import get_task_logger setup_security(allowed_serializers=['pickle', 'json'], serializer='pickle') queue = Celery('home', broker='redis://', backend='redis://', serializer='pickle') queue.conf.update( CELERY_TASK_SERIALIZER='pickle', CELERY_ACCEPT_CONTENT=['pickle', 'json'], ) scheduler = BackgroundScheduler() scheduler.start() logger = get_task_logger(__name__) @queue.task def _run(method, **kwargs) -> None: """ Run the configured actions in multiple processes. """ logger.info('Running {} with config: {}'.format(method.__name__, kwargs)) method(**kwargs) def run(method, delay=0, **kwargs): return _run.apply_async(args=[method], kwargs=kwargs, countdown=float(delay))
""" async.py ~~~~~~~~ Handles running of tasks in an asynchronous fashion. Not explicitly tied to Celery. The `run` method simply must exist here and handle the execution of whatever task is passed to it, whether or not it is handled asynchronously. """ from apscheduler.schedulers.background import BackgroundScheduler from celery import Celery from celery.security import setup_security setup_security(allowed_serializers=['pickle', 'json'], serializer='pickle') queue = Celery('home', broker='redis://', backend='redis://', serializer='pickle') queue.conf.update( CELERY_TASK_SERIALIZER='pickle', CELERY_ACCEPT_CONTENT=['pickle', 'json'], ) scheduler = BackgroundScheduler() scheduler.start() @queue.task def _run(method, **kwargs) -> None: """ Run the configured actions in multiple processes. """ method(**kwargs) def run(method, delay=0, **kwargs): return _run.apply_async(args=[method], kwargs=kwargs, countdown=float(delay))
mit
Python
9b18db54d64e168231079255334649fb9b503f3e
Add murrine back into monodevelop-mac-dev packages list
bl8/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild,bl8/bockbuild,bl8/bockbuild,mono/bockbuild
profiles/monodevelop-mac-dev/packages.py
profiles/monodevelop-mac-dev/packages.py
import os from bockbuild.darwinprofile import DarwinProfile class MonoDevelopMacDevPackages: def __init__ (self): # Toolchain self.packages.extend ([ 'autoconf.py', 'automake.py', 'libtool.py', 'gettext.py', 'pkg-config.py' ]) # Base Libraries self.packages.extend ([ 'libpng.py', 'libjpeg.py', 'libtiff.py', 'libxml2.py', 'freetype.py', 'fontconfig.py', 'pixman.py', 'cairo.py', 'glib.py', 'pango.py', 'atk.py', 'intltool.py', 'gdk-pixbuf.py', 'gtk+.py', 'libglade.py', ]) # Theme self.packages.extend ([ 'librsvg.py', 'hicolor-icon-theme.py', 'gtk-engines.py', 'murrine.py', 'gtk-quartz-engine.py' ]) # Mono self.packages.extend ([ 'mono.py', 'gtk-sharp.py', 'mono-addins.py', ]) self.packages = [os.path.join ('..', '..', 'packages', p) for p in self.packages]
import os from bockbuild.darwinprofile import DarwinProfile class MonoDevelopMacDevPackages: def __init__ (self): # Toolchain self.packages.extend ([ 'autoconf.py', 'automake.py', 'libtool.py', 'gettext.py', 'pkg-config.py' ]) # Base Libraries self.packages.extend ([ 'libpng.py', 'libjpeg.py', 'libtiff.py', 'libxml2.py', 'freetype.py', 'fontconfig.py', 'pixman.py', 'cairo.py', 'glib.py', 'pango.py', 'atk.py', 'intltool.py', 'gdk-pixbuf.py', 'gtk+.py', 'libglade.py', ]) # Theme self.packages.extend ([ 'librsvg.py', 'hicolor-icon-theme.py', 'gtk-engines.py', 'gtk-quartz-engine.py' ]) # Mono self.packages.extend ([ 'mono.py', 'gtk-sharp.py', 'mono-addins.py', ]) self.packages = [os.path.join ('..', '..', 'packages', p) for p in self.packages]
mit
Python
5ca4e1df8fc67f9b56d5ea55cb4e17e78c5c6ed5
Fix test factory
dbinetti/barberscore,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django
project/apps/smanager/tests/factories.py
project/apps/smanager/tests/factories.py
# Standard Library import datetime import rest_framework_jwt # Third-Party from factory import Faker # post_generation, from factory import Iterator from factory import LazyAttribute from factory import PostGenerationMethodCall from factory import RelatedFactory from factory import Sequence from factory import SubFactory from factory.django import DjangoModelFactory from factory.django import mute_signals from factory.fuzzy import FuzzyInteger # Django from django.db.models.signals import pre_delete from django.db.models.signals import pre_save from django.db.models.signals import m2m_changed # First-Party from apps.smanager.models import Repertory from apps.smanager.models import Assignment from apps.smanager.models import Contest from apps.smanager.models import Entry from apps.smanager.models import Session from rest_framework_jwt.models import User class AssignmentFactory(DjangoModelFactory): # status = Assignment.STATUS.active kind = Assignment.KIND.official # convention = SubFactory('factories.ConventionFactory') # person = SubFactory('factories.PersonFactory') class Meta: model = Assignment class ContestFactory(DjangoModelFactory): # status = Contest.STATUS.included session = SubFactory('apps.smanager.tests.factories.SessionFactory') # award = SubFactory('factories.AwardFactory') class Meta: model = Contest class EntryFactory(DjangoModelFactory): status = Entry.STATUS.new is_evaluation = True is_private = False session = SubFactory('apps.smanager.tests.factories.SessionFactory') # group = SubFactory('factories.GroupFactory') class Meta: model = Entry class RepertoryFactory(DjangoModelFactory): # status = Repertory.STATUS.active # group = SubFactory('factories.GroupFactory') entry = SubFactory('apps.smanager.tests.factories.EntryFactory') class Meta: model = Repertory class SessionFactory(DjangoModelFactory): status = Session.STATUS.new kind = Session.KIND.quartet name = "International Championship" district = Session.DISTRICT.bhs is_invitational = False num_rounds = 2 # convention = SubFactory('factories.ConventionFactory') class Meta: model = Session # @post_generation # def create_rounds(self, create, extracted, **kwargs): # if create: # for i in range(self.num_rounds): # num = i + 1 # kind = self.num_rounds - i # RoundFactory( # session=self, # num=num, # kind=kind, # ) @mute_signals(pre_delete, pre_save, m2m_changed) class UserFactory(DjangoModelFactory): username = Faker('uuid4') password = PostGenerationMethodCall('set_password', 'password') is_staff = False class Meta: model = User
# Standard Library import datetime import rest_framework_jwt # Third-Party from factory import Faker # post_generation, from factory import Iterator from factory import LazyAttribute from factory import PostGenerationMethodCall from factory import RelatedFactory from factory import Sequence from factory import SubFactory from factory.django import DjangoModelFactory from factory.django import mute_signals from factory.fuzzy import FuzzyInteger # Django from django.db.models.signals import pre_delete from django.db.models.signals import pre_save from django.db.models.signals import m2m_changed # First-Party from apps.smanager.models import Repertory from apps.smanager.models import Assignment from apps.smanager.models import Contest from apps.smanager.models import Entry from apps.smanager.models import Session from rest_framework_jwt.models import User class AssignmentFactory(DjangoModelFactory): # status = Assignment.STATUS.active kind = Assignment.KIND.official # convention = SubFactory('factories.ConventionFactory') # person = SubFactory('factories.PersonFactory') class Meta: model = Assignment class ContestFactory(DjangoModelFactory): # status = Contest.STATUS.included session = SubFactory('apps.smanager.tests.factories.SessionFactory') # award = SubFactory('factories.AwardFactory') class Meta: model = Contest class EntryFactory(DjangoModelFactory): status = Entry.STATUS.new is_evaluation = True is_private = False session = SubFactory('apps.smanager.tests.factories.SessionFactory') # group = SubFactory('factories.GroupFactory') class Meta: model = Entry class RepertoryFactory(DjangoModelFactory): # status = Repertory.STATUS.active # group = SubFactory('factories.GroupFactory') entry = SubFactory('apps.smanager.tests.factories.EntryFactory') class Meta: model = Repertory class SessionFactory(DjangoModelFactory): status = Session.STATUS.new kind = Session.KIND.quartet is_invitational = False num_rounds = 2 # convention = SubFactory('factories.ConventionFactory') class Meta: model = Session # @post_generation # def create_rounds(self, create, extracted, **kwargs): # if create: # for i in range(self.num_rounds): # num = i + 1 # kind = self.num_rounds - i # RoundFactory( # session=self, # num=num, # kind=kind, # ) @mute_signals(pre_delete, pre_save, m2m_changed) class UserFactory(DjangoModelFactory): username = Faker('uuid4') password = PostGenerationMethodCall('set_password', 'password') is_staff = False class Meta: model = User
bsd-2-clause
Python
f76daf38fb8998bbf5d0b663ff64572fb240fd24
bump Python API version am: 454844e69f am: 07e940c2de am: 19c98a2e99 am: 9bc9e3d185 am: 4289bd8427
google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto
src/trace_processor/python/setup.py
src/trace_processor/python/setup.py
from distutils.core import setup setup( name='perfetto', packages=['perfetto', 'perfetto.trace_processor'], package_data={'perfetto.trace_processor': ['*.descriptor']}, include_package_data=True, version='0.3.0', license='apache-2.0', description='Python API for Perfetto\'s Trace Processor', author='Perfetto', author_email='perfetto-pypi@google.com', url='https://perfetto.dev/', download_url='https://github.com/google/perfetto/archive/refs/tags/v20.1.tar.gz', keywords=['trace processor', 'tracing', 'perfetto'], install_requires=[ 'protobuf', ], classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: Apache Software License', "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ], )
from distutils.core import setup setup( name='perfetto', packages=['perfetto', 'perfetto.trace_processor'], package_data={'perfetto.trace_processor': ['*.descriptor']}, include_package_data=True, version='0.2.9', license='apache-2.0', description='Python API for Perfetto\'s Trace Processor', author='Perfetto', author_email='perfetto-pypi@google.com', url='https://perfetto.dev/', download_url='https://github.com/google/perfetto/archive/v6.0.tar.gz', keywords=['trace processor', 'tracing', 'perfetto'], install_requires=[ 'protobuf', ], classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: Apache Software License', "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ], )
apache-2.0
Python
3b2730edbbef3f32aef6682d9d446d8416fc7562
add setWindowMinimizeButtonHint() for dialog
SF-Zhou/quite
quite/controller/dialog_ui_controller.py
quite/controller/dialog_ui_controller.py
from . import WidgetUiController from ..gui import Shortcut from PySide.QtCore import Qt class DialogUiController(WidgetUiController): def __init__(self, parent=None, ui_file=None): super().__init__(parent, ui_file) Shortcut('ctrl+w', self.w).excited.connect(self.w.close) def exec(self): return self.w.exec() def setWindowMinimizeButtonHint(self): self.w.setWindowFlags(Qt.WindowMinimizeButtonHint | Qt.WindowMaximizeButtonHint) @classmethod def class_exec(cls, *args, **kwargs): return cls(*args, **kwargs).exec()
from . import WidgetUiController from ..gui import Shortcut class DialogUiController(WidgetUiController): def __init__(self, parent=None, ui_file=None): super().__init__(parent, ui_file) Shortcut('ctrl+w', self.w).excited.connect(self.w.close) def exec(self): return self.w.exec() @classmethod def class_exec(cls, *args, **kwargs): return cls(*args, **kwargs).exec()
mit
Python
a08919c24e1af460ccba8820eb6646492848621e
Bump Version 0.5.4
douban/libmc,mckelvin/libmc,douban/libmc,mckelvin/libmc,mckelvin/libmc,douban/libmc,douban/libmc,douban/libmc,mckelvin/libmc,mckelvin/libmc
libmc/__init__.py
libmc/__init__.py
from ._client import ( PyClient, ThreadUnsafe, encode_value, MC_DEFAULT_EXPTIME, MC_POLL_TIMEOUT, MC_CONNECT_TIMEOUT, MC_RETRY_TIMEOUT, MC_HASH_MD5, MC_HASH_FNV1_32, MC_HASH_FNV1A_32, MC_HASH_CRC_32, MC_RETURN_SEND_ERR, MC_RETURN_RECV_ERR, MC_RETURN_CONN_POLL_ERR, MC_RETURN_POLL_TIMEOUT_ERR, MC_RETURN_POLL_ERR, MC_RETURN_MC_SERVER_ERR, MC_RETURN_PROGRAMMING_ERR, MC_RETURN_INVALID_KEY_ERR, MC_RETURN_INCOMPLETE_BUFFER_ERR, MC_RETURN_OK, ) __VERSION__ = "0.5.4" __version__ = "v0.5.4" __author__ = "mckelvin" __email__ = "mckelvin@users.noreply.github.com" __date__ = "Thu Jul 16 18:20:00 2015 +0800" class Client(PyClient): pass __all__ = [ 'Client', 'ThreadUnsafe', '__VERSION__', 'encode_value', 'MC_DEFAULT_EXPTIME', 'MC_POLL_TIMEOUT', 'MC_CONNECT_TIMEOUT', 'MC_RETRY_TIMEOUT', 'MC_HASH_MD5', 'MC_HASH_FNV1_32', 'MC_HASH_FNV1A_32', 'MC_HASH_CRC_32', 'MC_RETURN_SEND_ERR', 'MC_RETURN_RECV_ERR', 'MC_RETURN_CONN_POLL_ERR', 'MC_RETURN_POLL_TIMEOUT_ERR', 'MC_RETURN_POLL_ERR', 'MC_RETURN_MC_SERVER_ERR', 'MC_RETURN_PROGRAMMING_ERR', 'MC_RETURN_INVALID_KEY_ERR', 'MC_RETURN_INCOMPLETE_BUFFER_ERR', 'MC_RETURN_OK', ]
from ._client import ( PyClient, ThreadUnsafe, encode_value, MC_DEFAULT_EXPTIME, MC_POLL_TIMEOUT, MC_CONNECT_TIMEOUT, MC_RETRY_TIMEOUT, MC_HASH_MD5, MC_HASH_FNV1_32, MC_HASH_FNV1A_32, MC_HASH_CRC_32, MC_RETURN_SEND_ERR, MC_RETURN_RECV_ERR, MC_RETURN_CONN_POLL_ERR, MC_RETURN_POLL_TIMEOUT_ERR, MC_RETURN_POLL_ERR, MC_RETURN_MC_SERVER_ERR, MC_RETURN_PROGRAMMING_ERR, MC_RETURN_INVALID_KEY_ERR, MC_RETURN_INCOMPLETE_BUFFER_ERR, MC_RETURN_OK, ) __VERSION__ = '0.5.3' __version__ = "v0.5.3-3-g3eb1a97" __author__ = "mckelvin" __email__ = "mckelvin@users.noreply.github.com" __date__ = "Sat Jul 11 14:24:54 2015 +0800" class Client(PyClient): pass __all__ = [ 'Client', 'ThreadUnsafe', '__VERSION__', 'encode_value', 'MC_DEFAULT_EXPTIME', 'MC_POLL_TIMEOUT', 'MC_CONNECT_TIMEOUT', 'MC_RETRY_TIMEOUT', 'MC_HASH_MD5', 'MC_HASH_FNV1_32', 'MC_HASH_FNV1A_32', 'MC_HASH_CRC_32', 'MC_RETURN_SEND_ERR', 'MC_RETURN_RECV_ERR', 'MC_RETURN_CONN_POLL_ERR', 'MC_RETURN_POLL_TIMEOUT_ERR', 'MC_RETURN_POLL_ERR', 'MC_RETURN_MC_SERVER_ERR', 'MC_RETURN_PROGRAMMING_ERR', 'MC_RETURN_INVALID_KEY_ERR', 'MC_RETURN_INCOMPLETE_BUFFER_ERR', 'MC_RETURN_OK', ]
bsd-3-clause
Python
778f284c2208438b7bc26226cc295f80de6343e0
Use loop.add_signal_handler for handling SIGWINCH.
jonathanslenders/libpymux
libpymux/utils.py
libpymux/utils.py
import array import asyncio import fcntl import signal import termios def get_size(stdout): # Thanks to fabric (fabfile.org), and # http://sqizit.bartletts.id.au/2011/02/14/pseudo-terminals-in-python/ """ Get the size of this pseudo terminal. :returns: A (rows, cols) tuple. """ #assert stdout.isatty() # Buffer for the C call buf = array.array('h', [0, 0, 0, 0 ]) # Do TIOCGWINSZ (Get) #fcntl.ioctl(stdout.fileno(), termios.TIOCGWINSZ, buf, True) fcntl.ioctl(0, termios.TIOCGWINSZ, buf, True) # Return rows, cols return buf[0], buf[1] def set_size(stdout_fileno, rows, cols): """ Set terminal size. (This is also mainly for internal use. Setting the terminal size automatically happens when the window resizes. However, sometimes the process that created a pseudo terminal, and the process that's attached to the output window are not the same, e.g. in case of a telnet connection, or unix domain socket, and then we have to sync the sizes by hand.) """ # Buffer for the C call buf = array.array('h', [rows, cols, 0, 0 ]) # Do: TIOCSWINSZ (Set) fcntl.ioctl(stdout_fileno, termios.TIOCSWINSZ, buf) def alternate_screen(write): class Context: def __enter__(self): # Enter alternate screen buffer write(b'\033[?1049h') def __exit__(self, *a): # Exit alternate screen buffer and make cursor visible again. write(b'\033[?1049l') write(b'\033[?25h') return Context() def call_on_sigwinch(callback, loop=None): """ Set a function to be called when the SIGWINCH signal is received. (Normally, on terminal resize.) """ if loop is None: loop = asyncio.get_event_loop() def sigwinch_handler(): loop.call_soon(callback) loop.add_signal_handler(signal.SIGWINCH, sigwinch_handler)
import array import asyncio import fcntl import signal import termios def get_size(stdout): # Thanks to fabric (fabfile.org), and # http://sqizit.bartletts.id.au/2011/02/14/pseudo-terminals-in-python/ """ Get the size of this pseudo terminal. :returns: A (rows, cols) tuple. """ #assert stdout.isatty() # Buffer for the C call buf = array.array('h', [0, 0, 0, 0 ]) # Do TIOCGWINSZ (Get) #fcntl.ioctl(stdout.fileno(), termios.TIOCGWINSZ, buf, True) fcntl.ioctl(0, termios.TIOCGWINSZ, buf, True) # Return rows, cols return buf[0], buf[1] def set_size(stdout_fileno, rows, cols): """ Set terminal size. (This is also mainly for internal use. Setting the terminal size automatically happens when the window resizes. However, sometimes the process that created a pseudo terminal, and the process that's attached to the output window are not the same, e.g. in case of a telnet connection, or unix domain socket, and then we have to sync the sizes by hand.) """ # Buffer for the C call buf = array.array('h', [rows, cols, 0, 0 ]) # Do: TIOCSWINSZ (Set) fcntl.ioctl(stdout_fileno, termios.TIOCSWINSZ, buf) def alternate_screen(write): class Context: def __enter__(self): # Enter alternate screen buffer write(b'\033[?1049h') def __exit__(self, *a): # Exit alternate screen buffer and make cursor visible again. write(b'\033[?1049l') write(b'\033[?25h') return Context() def call_on_sigwinch(callback): """ Set a function to be called when the SIGWINCH signal is received. (Normally, on terminal resize.) """ def sigwinch_handler(n, frame): loop = asyncio.get_event_loop() loop.call_soon(callback) signal.signal(signal.SIGWINCH, sigwinch_handler)
bsd-2-clause
Python
fa57fa679b575ce871af3c4769828f400e6ab28b
bump version 2.1.3 for issue #70
industrydive/premailer,BlokeOne/premailer-1,lavr/premailer,peterbe/premailer,peterbe/premailer,lavr/premailer,industrydive/premailer,graingert/premailer,ionelmc/premailer,graingert/premailer,ionelmc/premailer,BlokeOne/premailer-1,peterbe/premailer,kengruven/premailer,kengruven/premailer
premailer/__init__.py
premailer/__init__.py
from premailer import Premailer, transform __version__ = '2.1.3'
from premailer import Premailer, transform __version__ = '2.1.2'
bsd-3-clause
Python
0782ba56218e825dea5b76cbf030a522932bcfd6
Remove unnecessary (and debatable) comment.
nathania/networkx,dhimmel/networkx,RMKD/networkx,sharifulgeo/networkx,chrisnatali/networkx,goulu/networkx,farhaanbukhsh/networkx,jni/networkx,jakevdp/networkx,ionanrozenfeld/networkx,debsankha/networkx,nathania/networkx,tmilicic/networkx,kernc/networkx,ltiao/networkx,michaelpacer/networkx,bzero/networkx,dhimmel/networkx,RMKD/networkx,kernc/networkx,bzero/networkx,aureooms/networkx,ionanrozenfeld/networkx,sharifulgeo/networkx,yashu-seth/networkx,bzero/networkx,OrkoHunter/networkx,blublud/networkx,harlowja/networkx,ghdk/networkx,dmoliveira/networkx,dhimmel/networkx,beni55/networkx,sharifulgeo/networkx,blublud/networkx,RMKD/networkx,farhaanbukhsh/networkx,nathania/networkx,jni/networkx,farhaanbukhsh/networkx,aureooms/networkx,jfinkels/networkx,Sixshaman/networkx,harlowja/networkx,debsankha/networkx,blublud/networkx,jakevdp/networkx,andnovar/networkx,dmoliveira/networkx,wasade/networkx,debsankha/networkx,SanketDG/networkx,chrisnatali/networkx,NvanAdrichem/networkx,chrisnatali/networkx,ionanrozenfeld/networkx,dmoliveira/networkx,kernc/networkx,JamesClough/networkx,jcurbelo/networkx,ghdk/networkx,jakevdp/networkx,jni/networkx,aureooms/networkx,cmtm/networkx,harlowja/networkx,ghdk/networkx
networkx/classes/ordered.py
networkx/classes/ordered.py
""" OrderedDict variants of the default base classes. """ try: # Python 2.7+ from collections import OrderedDict except ImportError: # Oython 2.6 try: from ordereddict import OrderedDict except ImportError: OrderedDict = None from .graph import Graph from .multigraph import MultiGraph from .digraph import DiGraph from .multidigraph import MultiDiGraph __all__ = [] if OrderedDict is not None: __all__.extend([ 'OrderedGraph', 'OrderedDiGraph', 'OrderedMultiGraph', 'OrderedMultiDiGraph' ]) class OrderedGraph(Graph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedDiGraph(DiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedMultiGraph(MultiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedMultiDiGraph(MultiDiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict
""" OrderedDict variants of the default base classes. These classes are especially useful for doctests and unit tests. """ try: # Python 2.7+ from collections import OrderedDict except ImportError: # Oython 2.6 try: from ordereddict import OrderedDict except ImportError: OrderedDict = None from .graph import Graph from .multigraph import MultiGraph from .digraph import DiGraph from .multidigraph import MultiDiGraph __all__ = [] if OrderedDict is not None: __all__.extend([ 'OrderedGraph', 'OrderedDiGraph', 'OrderedMultiGraph', 'OrderedMultiDiGraph' ]) class OrderedGraph(Graph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedDiGraph(DiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedMultiGraph(MultiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict class OrderedMultiDiGraph(MultiDiGraph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict
bsd-3-clause
Python
8f78c04f6e2f21deb02a285fc78c5da907f0287b
Delete extra print()
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
nn/file/cnn_dailymail_rc.py
nn/file/cnn_dailymail_rc.py
import functools import tensorflow as tf from .. import flags from ..flags import FLAGS class _RcFileReader: def __init__(self): # 0 -> null, 1 -> unknown self._word_indices = flags.word_indices def read(self, filename_queue): key, value = tf.WholeFileReader().read(filename_queue) return (key, *self._read_record(value)) def _read_record(self, string): def read_record(string): context, question, answer = string.split("\n\n")[1:4] context = self._map_words_to_indices(context) question = self._map_words_to_indices(question) return (context, question, self._map_word_to_index(answer), len(context), len(question)) context, question, answer, context_length, question_length = tf.py_func( read_record, [string], [tf.int32, tf.int32, tf.int32, tf.int32, tf.int32], name="read_rc_data") context_length.set_shape([]) question_length.set_shape([]) return (tf.reshape(context, [context_length]), tf.reshape(question, [question_length]), tf.reshape(answer, [])) def _map_word_to_index(): return word_indices[word] if word in self._word_indices else 1 # unknown def _map_document_to_indices(self, document): return np.array([self._map_word_to_index(word) for word in document.split()], dtype=np.int32) def read_files(filename_queue): tensors = _RcFileReader().read(filename_queue) return tf.contrib.training.bucket_by_sequence_length( tf.shape(tensors[1])[0], list(tensors), FLAGS.batch_size, [int(num) for num in FLAGS.length_boundaries.split(",")], num_threads=FLAGS.num_threads_per_queue, capacity=FLAGS.queue_capacity, dynamic_pad=True, allow_smaller_final_batch=True)[1]
import functools import tensorflow as tf from .. import flags from ..flags import FLAGS class _RcFileReader: def __init__(self): # 0 -> null, 1 -> unknown self._word_indices = flags.word_indices def read(self, filename_queue): key, value = tf.WholeFileReader().read(filename_queue) return (key, *self._read_record(value)) def _read_record(self, string): def read_record(string): context, question, answer = string.split("\n\n")[1:4] context = self._map_words_to_indices(context) question = self._map_words_to_indices(question) return (context, question, self._map_word_to_index(answer), len(context), len(question)) context, question, answer, context_length, question_length = tf.py_func( read_record, [string], [tf.int32, tf.int32, tf.int32, tf.int32, tf.int32], name="read_rc_data") context_length.set_shape([]) question_length.set_shape([]) print(tf.reshape(context, [context_length]).get_shape()) return (tf.reshape(context, [context_length]), tf.reshape(question, [question_length]), tf.reshape(answer, [])) def _map_word_to_index(): return word_indices[word] if word in self._word_indices else 1 # unknown def _map_document_to_indices(self, document): return np.array([self._map_word_to_index(word) for word in document.split()], dtype=np.int32) def read_files(filename_queue): tensors = _RcFileReader().read(filename_queue) return tf.contrib.training.bucket_by_sequence_length( tf.shape(tensors[1])[0], list(tensors), FLAGS.batch_size, [int(num) for num in FLAGS.length_boundaries.split(",")], num_threads=FLAGS.num_threads_per_queue, capacity=FLAGS.queue_capacity, dynamic_pad=True, allow_smaller_final_batch=True)[1]
unlicense
Python
16bf079d1b139db08988fdb3cc1ff818cecfc12e
Add ModelTranslationAdminMixin.
ulule/django-linguist
linguist/admin.py
linguist/admin.py
# -*- coding: utf-8 -*- from django.contrib import admin from .models import Translation class ModelTranslationAdminMixin(object): """ Mixin for model admin classes. """ pass class TranslationAdmin(admin.ModelAdmin): """ Translation model admin options. """ pass admin.site.register(Translation, TranslationAdmin)
# -*- coding: utf-8 -*- from django.contrib import admin from .models import Translation class TranslationAdmin(admin.ModelAdmin): pass admin.site.register(Translation, TranslationAdmin)
mit
Python
7594763e5e6167c15fa7898b13283e875c13c099
Update BotPMError.py
DecoraterBot-devs/DecoraterBot
resources/Dependencies/DecoraterBotCore/BotPMError.py
resources/Dependencies/DecoraterBotCore/BotPMError.py
# coding=utf-8 """ DecoraterBotCore ~~~~~~~~~~~~~~~~~~~ Core to DecoraterBot :copyright: (c) 2015-2017 Decorater :license: MIT, see LICENSE for more details. """ import discord __all__ = ['BotPMError'] class BotPMError: """ Class for PMing bot errors. """ def __init__(self, bot): self.bot = bot def construct_reply(self, message): """Constructs an bot reply.""" svr_name = message.channel.server.name cnl_name = message.channel.name msginfo = 'Missing the Send Message Permssions in the ' \ '{0} server on the {1} channel.' unabletosendmessageerror = msginfo.format(svr_name, cnl_name) return unabletosendmessageerror async def resolve_send_message_error(self, ctx): """ Relolves Errors when Sending messages. :param ctx: Merssage Context. :return: Nothing. """ await self.resolve_send_message_error_old( ctx.message) async def resolve_send_message_error_old(self, message): """ Relolves Errors when Sending messages. :param message: Merssage. :return: Nothing. """ unabletosendmessageerror = self.construct_reply( message) try: await self.bot.send_message( message.author, content=unabletosendmessageerror) except discord.errors.Forbidden: return
# coding=utf-8 """ DecoraterBotCore ~~~~~~~~~~~~~~~~~~~ Core to DecoraterBot :copyright: (c) 2015-2017 Decorater :license: MIT, see LICENSE for more details. """ import discord __all__ = ['BotPMError'] class BotPMError: """ Class for PMing bot errors. """ def __init__(self, bot): self.bot = bot def construct_reply(self, message): """Constructs an bot reply.""" svr_name = message.channel.server.name cnl_name = message.channel.name msginfo = 'Missing the Send Message Permssions in the ' \ '{0} server on the {1} channel.' unabletosendmessageerror = msginfo.format(svr_name, cnl_name) return unabletosendmessageerror async def resolve_send_message_error(self, ctx): """ Relolves Errors when Sending messages. :param ctx: Merssage Context. :return: Nothing. """ await self.resolve_send_message_error_old( ctx.message) async def resolve_send_message_error_old(self, message): """ Relolves Errors when Sending messages. :param message: Merssage. :return: Nothing. """ unabletosendmessageerror = self.construct_reply( message) try: await bot.send_message( message.author, content=unabletosendmessageerror) except discord.errors.Forbidden: return
mit
Python
cb7f6efbbbe640a2c360f7dc93cb2bc87b2e0ab2
fix example
RaymondKlass/entity-extract
entity_extract/examples/pos_extraction.py
entity_extract/examples/pos_extraction.py
#from entity_extract.extractor.extractors import PosExtractor from entity_extract.extractor.utilities import SentSplit, Tokenizer from entity_extract.extractor.extractors import PosExtractor from entity_extract.extractor.pos_tagger import PosTagger # Initialize Services sentSplitter = SentSplit() tokenizer = Tokenizer() tagger = PosTagger() #p = PosExtractor() sents = sentSplitter.split('This is a sentence about the pie in the sky. If would be interesting. If only there was') for sent in sents: tokens = tokenizer.tokenize(sent) tags = tagger.tag(tokens) print tags
#from entity_extract.extractor.extractors import PosExtractor from entity_extract.extractor.utilities import SentSplit, Tokenizer from entity_extract.extractor.extractors import PosExtractor from entity_extract.extractor.pos_tagger import PosTagger #p = PosExtractor() sents = p.SentPlit('This is a sentence about the pie in the sky. If would be interesting. If only there was') for sent in sents: tokens = Tokenizer.tokenize(sent) tags = PosTagger(tokens) print tags
mit
Python
2e6c80717099fb6c6ca59d9d6193807b1aabfa8b
Update docstring
e4r7hbug/git_update
git_update/actions.py
git_update/actions.py
"""Git repo actions.""" import logging import os import pathlib import click from git import InvalidGitRepositoryError, Repo from git.exc import GitCommandError LOG = logging.getLogger(__name__) def crawl(path): """Crawl the path for possible Git directories. Args: path (str): Original path to crawl. """ main_dir = pathlib.Path(path) if not main_dir.is_dir(): main_dir = main_dir.parent main_dir = main_dir.resolve() LOG.info('Finding directories in %s', main_dir) dir_list = [directory for directory in main_dir.iterdir() if directory.is_dir() and directory.parts[-1] != '.git'] LOG.debug('List of directories: %s', dir_list) for directory in dir_list: update_repo(os.path.join(main_dir, directory)) def check_changes(current, fetch_info_list, branch_list): """Check for changes in local branches and remote. Args: current: Dict(reference: commit) from before `git pull` operation. fetch_info_list: List of remote references from `git pull`. branch_list: List of branches in repository. """ log = logging.getLogger(__name__) for fetch_info in fetch_info_list: log.debug('Checking for change in %s', fetch_info.name) try: if current[fetch_info.ref] != fetch_info.commit: log.info('%s has updates, %s..%s', fetch_info.name, current[fetch_info.ref], fetch_info.commit) except KeyError: log.info('New reference %s', fetch_info.name) for branch in branch_list: log.debug('Checking for change in %s', branch.name) if current[branch] != branch.commit: log.info('%s updated, %s..%s', branch.name, current[branch], branch.commit) return True def update_repo(directory): """Update a repository. Returns: False if bad repository. True if everything worked. """ log = logging.getLogger(__name__) try: repo = Repo(directory) current = {ref: ref.commit for ref in repo.refs} click.secho('Updating {0}'.format(repo.git_dir), fg='blue') remote = repo.remote() fetch_info_list = remote.pull() except InvalidGitRepositoryError: log.warning('%s is not a valid repository.', directory) return False except ValueError: log.warning('Check remotes for %s: %s', directory, repo.remotes) return False except GitCommandError as error: log.fatal('Pull failed. %s', error) return False check_changes(current, fetch_info_list, repo.branches) return True
"""Git repo actions.""" import logging import os import pathlib import click from git import InvalidGitRepositoryError, Repo from git.exc import GitCommandError LOG = logging.getLogger(__name__) def crawl(path): """Crawl the path for possible Git directories.""" main_dir = pathlib.Path(path) if not main_dir.is_dir(): main_dir = main_dir.parent main_dir = main_dir.resolve() LOG.info('Finding directories in %s', main_dir) dir_list = [directory for directory in main_dir.iterdir() if directory.is_dir() and directory.parts[-1] != '.git'] LOG.debug('List of directories: %s', dir_list) for directory in dir_list: update_repo(os.path.join(main_dir, directory)) def check_changes(current, fetch_info_list, branch_list): """Check for changes in local branches and remote. Args: current: Dict(reference: commit) from before `git pull` operation. fetch_info_list: List of remote references from `git pull`. branch_list: List of branches in repository. """ log = logging.getLogger(__name__) for fetch_info in fetch_info_list: log.debug('Checking for change in %s', fetch_info.name) try: if current[fetch_info.ref] != fetch_info.commit: log.info('%s has updates, %s..%s', fetch_info.name, current[fetch_info.ref], fetch_info.commit) except KeyError: log.info('New reference %s', fetch_info.name) for branch in branch_list: log.debug('Checking for change in %s', branch.name) if current[branch] != branch.commit: log.info('%s updated, %s..%s', branch.name, current[branch], branch.commit) return True def update_repo(directory): """Update a repository. Returns: False if bad repository. True if everything worked. """ log = logging.getLogger(__name__) try: repo = Repo(directory) current = {ref: ref.commit for ref in repo.refs} click.secho('Updating {0}'.format(repo.git_dir), fg='blue') remote = repo.remote() fetch_info_list = remote.pull() except InvalidGitRepositoryError: log.warning('%s is not a valid repository.', directory) return False except ValueError: log.warning('Check remotes for %s: %s', directory, repo.remotes) return False except GitCommandError as error: log.fatal('Pull failed. %s', error) return False check_changes(current, fetch_info_list, repo.branches) return True
mit
Python
ef1f303072307f259e8555e0148c29677b4f7d6f
Fix approve permissions typing
facebook/FBSimulatorControl,facebook/FBSimulatorControl,facebook/FBSimulatorControl,facebook/FBSimulatorControl,facebook/FBSimulatorControl
idb/ipc/approve.py
idb/ipc/approve.py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. from typing import Set, Dict, Any from idb.grpc.types import CompanionClient from idb.grpc.idb_pb2 import ApproveRequest MAP: Dict[str, Any] = { "photos": ApproveRequest.PHOTOS, "camera": ApproveRequest.CAMERA, "contacts": ApproveRequest.CONTACTS, } async def client( client: CompanionClient, bundle_id: str, permissions: Set[str] ) -> None: await client.stub.approve( ApproveRequest( bundle_id=bundle_id, permissions=[MAP[permission] for permission in permissions], ) )
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. from typing import Set, Dict # noqa F401 from idb.grpc.types import CompanionClient from idb.grpc.idb_pb2 import ApproveRequest MAP = { # type: Dict[str, ApproveRequest.Permission] "photos": ApproveRequest.PHOTOS, "camera": ApproveRequest.CAMERA, "contacts": ApproveRequest.CONTACTS, } async def client( client: CompanionClient, bundle_id: str, permissions: Set[str] ) -> None: print(f"Sending {[MAP[permission] for permission in permissions]}") await client.stub.approve( ApproveRequest( bundle_id=bundle_id, permissions=[MAP[permission] for permission in permissions], ) )
mit
Python
7cb9703b1af4138e8f1a036245125d723add55a3
Fix error handling to return sensible HTTP error codes.
4bic/grano,CodeForAfrica/grano,4bic-attic/grano,granoproject/grano
grano/views/__init__.py
grano/views/__init__.py
from colander import Invalid from flask import request from werkzeug.exceptions import HTTPException from grano.core import app from grano.lib.serialisation import jsonify from grano.views.base_api import blueprint as base_api from grano.views.entities_api import blueprint as entities_api from grano.views.relations_api import blueprint as relations_api from grano.views.schemata_api import blueprint as schemata_api from grano.views.sessions_api import blueprint as sessions_api from grano.views.projects_api import blueprint as projects_api from grano.views.accounts_api import blueprint as accounts_api from grano.views.files_api import blueprint as files_api from grano.views.imports_api import blueprint as imports_api from grano.views.pipelines_api import blueprint as pipelines_api from grano.views.log_entries_api import blueprint as log_entries_api from grano.views.permissions_api import blueprint as permissions_api from grano.views.auth import check_auth @app.errorhandler(401) @app.errorhandler(403) @app.errorhandler(404) @app.errorhandler(410) @app.errorhandler(500) def handle_exceptions(exc): if isinstance(exc, HTTPException): message = exc.get_description(request.environ) message = message.replace('<p>', '').replace('</p>', '') body = { 'status': exc.code, 'name': exc.name, 'message': message } headers = exc.get_headers(request.environ) else: body = { 'status': 500, 'name': exc.__class__.__name__, 'message': unicode(exc) } headers = {} return jsonify(body, status=body.get('status'), headers=headers) @app.errorhandler(Invalid) def handle_invalid(exc): body = { 'status': 400, 'name': 'Invalid Data', 'message': unicode(exc), 'errors': exc.asdict() } return jsonify(body, status=400) app.register_blueprint(base_api) app.register_blueprint(entities_api) app.register_blueprint(relations_api) app.register_blueprint(schemata_api) app.register_blueprint(sessions_api) app.register_blueprint(projects_api) app.register_blueprint(accounts_api) app.register_blueprint(files_api) app.register_blueprint(permissions_api) app.register_blueprint(imports_api) app.register_blueprint(pipelines_api) app.register_blueprint(log_entries_api)
from colander import Invalid from flask import request from grano.core import app from grano.lib.serialisation import jsonify from grano.views.base_api import blueprint as base_api from grano.views.entities_api import blueprint as entities_api from grano.views.relations_api import blueprint as relations_api from grano.views.schemata_api import blueprint as schemata_api from grano.views.sessions_api import blueprint as sessions_api from grano.views.projects_api import blueprint as projects_api from grano.views.accounts_api import blueprint as accounts_api from grano.views.files_api import blueprint as files_api from grano.views.imports_api import blueprint as imports_api from grano.views.pipelines_api import blueprint as pipelines_api from grano.views.log_entries_api import blueprint as log_entries_api from grano.views.permissions_api import blueprint as permissions_api from grano.views.auth import check_auth @app.errorhandler(401) @app.errorhandler(403) @app.errorhandler(404) @app.errorhandler(410) @app.errorhandler(500) def handle_exceptions(exc): if not hasattr(exc, 'get_description'): message = exc.get_description(request.environ) message = message.replace('<p>', '').replace('</p>', '') body = { 'status': exc.code, 'name': exc.name, 'message': message } headers = exc.get_headers(request.environ) else: body = { 'status': 500, 'name': exc.__class__.__name__, 'message': unicode(exc) } headers = {} return jsonify(body, status=exc.code, headers=headers) @app.errorhandler(Invalid) def handle_invalid(exc): body = { 'status': 400, 'name': 'Invalid Data', 'message': unicode(exc), 'errors': exc.asdict() } return jsonify(body, status=400) app.register_blueprint(base_api) app.register_blueprint(entities_api) app.register_blueprint(relations_api) app.register_blueprint(schemata_api) app.register_blueprint(sessions_api) app.register_blueprint(projects_api) app.register_blueprint(accounts_api) app.register_blueprint(files_api) app.register_blueprint(permissions_api) app.register_blueprint(imports_api) app.register_blueprint(pipelines_api) app.register_blueprint(log_entries_api)
mit
Python