repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
iteloo/the_island | backend/zombie.py | 1 | 2067 | from backend import message
from backend import helpers
from backend import player
import collections
class Zombie(message.MessageDelegate):
_currentId = 0
def __init__(self, *args, **kwargs):
# call super
super().__init__(*args, **kwargs)
# general vars
self.id = self._currentId
Zombie._currentId += 1
def __str__(self):
return "Zombie%d" % self.id
### server-side methods ###
@staticmethod
def echo(callback, *args, **kwargs):
"""Echo args back to callback
For testing purposes only.
"""
callback(*args, **kwargs)
@staticmethod
def server_info(callback: collections.Callable) -> None:
"""Return information about the current running version of the server"""
from backend.server import RUN_DATE, VERSION
callback(start_date=RUN_DATE, version=VERSION)
def name_entered(self, name: str) -> None:
from backend import game_controller
# attempt to identify player object
p = game_controller.universal_controller.player_with_id(name)
# todo: handle case where player with same id is actually playing
# case 3: if new player
if not p:
# make new player object
p = player.Player(name)
# register player with game controller
game_controller.universal_controller.register_player(p)
# replace zombie by player object, this will properly set the handler on player
self._message_handler.delegate = p
# this zombie should now be living a free life
# case 1: if existing player that is already in game
if p and p.current_game:
p.current_game.unstash_player(p)
# case 2: if existing player not in a game
# case 3: if new player
elif (not p) or (p and not p.current_game):
# display main menu
p.display_main_menu()
### message delegate methods ###
def on_open(self):
pass
def on_close(self):
pass | mit | -4,727,551,552,570,379,000 | 26.573333 | 87 | 0.61103 | false | 4.342437 | false | false | false |
feeilk1991/promenad | elections/2008/primary/states/super/makemini.py | 6 | 6946 | #!/usr/bin/env python
# reader.py - vote reader for Super Tuesday
import csv
import os
import re
import time
import urllib
import states
from template import *
import private
from candidates import candidates
parties = {
'dem': { 'name':'Democrats' },
'gop': { 'name':'Republicans' }
}
def fetchData():
urllib.urlretrieve( private.csvFeedUrl, 'miniresults/text_output_for_mapping.csv' )
pass
def readVotes():
print 'Processing vote data'
#reader = csv.reader( open( 'test.csv', 'rb' ) )
reader = csv.reader( open( 'miniresults/text_output_for_mapping.csv', 'rb' ) )
header = []
while header == []:
header = reader.next()
#print header
for row in reader:
if len(row) < 2: continue
if row[1] != '*': continue
setData( header, row )
def setData( header, row ):
state = states.byAbbr[ row[0] ]
setVotes( state, header, row )
def getPrecincts( row ):
return {
'reporting': int(row[3]),
'total': int(row[2])
}
def setVotes( entity, header, row ):
for col in xrange( 4, len(header) ):
if col >= len(row) or row[col] == '': continue
name = header[col]
if name == 'guiliani': name = 'giuliani'
candidate = candidates['byname'][name]
party = candidate['party']
p = entity['parties'][party]
if 'precincts' not in p: p['precincts'] = getPrecincts( row )
if 'votes' not in p: p['votes'] = {}
p['votes'][name] = int(row[col])
def linkParty( party, match ):
name = parties[party]['name']
if party == match:
return T('''
<span style="font-weight:bold;">
%(name)s
</span>
''', { 'name': name } )
else:
return T('''
<a href="#" onclick="refresh('%(party)s'); return false;">
%(name)s
</a>
''', { 'name': name, 'party': party } )
def makeMini():
short = makeMiniVersion( 'short', 'Election Coverage', 'CA NY IL MA' )
long = makeMiniVersion( 'long', 'Results', 'AL AK AZ AR CA CO CT DE GA ID IL KS MA MN MO MT NJ NM NY ND OK TN UT WV' )
map = makeMiniVersion( 'map', 'Results', 'AL AK AZ AR CA CO CT DE GA ID IL KS MA MN MO MT NJ NM NY ND OK TN UT WV' )
def makeMiniVersion( kind, title, statenames ):
writeMiniParty( kind, title, statenames, 'dem', 'clinton obama' )
writeMiniParty( kind, title, statenames,'gop' , 'huckabee mccain paul romney' )
def writeMiniParty( kind, title, statenames, partyname, names ):
text = makeMiniParty( kind, title, statenames, partyname, names )
write( 'miniresults/miniresults-%s-%s.html' %( kind, partyname ), text )
def makeMiniParty( kind, title, statenames, partyname, names ):
statelist = statenames.split()
names = names.split()
style = 'font-weight:normal; background-color:#E0E0E0;'
head = [ '<th style="text-align:left; %s">State</th>' % style ]
for name in names:
head.append( T('''
<th style="%(style)s">
%(name)s
</th>
''', {
'name': candidates['byname'][name]['lastName'],
'style': style
} ) )
rows = []
for stateabbr in statelist:
if stateabbr == 'WV' and partyname == 'dem': continue
state = states.byAbbr[stateabbr]
cols = []
winner = { 'name': None, 'votes': 0 }
party = state['parties'][partyname]
if 'votes' not in party: continue
votes = party['votes']
for name in votes:
if name == 'total-d' or name == 'total-r':
total = party['total'] = votes[name]
else:
vote = votes[name]
if vote > winner['votes']:
winner = { 'name': name, 'votes': vote }
precincts = party['precincts']
for name in names:
win = check = ''
if name == winner['name']:
if partyname == 'dem':
win = 'color:white; background-color:#3366CC;'
else:
win = 'color:white; background-color:#AA0031;'
if precincts['reporting'] == precincts['total']:
check = '<img src="http://googlemaps.github.io/js-v2-samples/elections/2008/images/checkmark.gif" style="width:7px; height:6px; margin:0 3px 2px 0" />'
if name in votes and total > 0:
percent = '%d%%' % percentage( float(votes[name]) / float(total) )
else:
percent = '--'
cols.append( T('''
<td style="width:%(width)s%%; text-align:center; %(win)s">
<div>
%(check)s
%(percent)s
</div>
</td>
''', {
'width': 80 / len(names),
'win': win,
'check': check,
'percent': percent
}) )
reporting = percentage( float(precincts['reporting']) / float(precincts['total']) )
rows.append( T('''
<tr style="background-color:#F1EFEF;">
<td style="width:20%%;">
<div>
<span>
%(state)s
</span>
<span style="font-size:11px; color:#666666;">
%(reporting)s%%
</span>
</div>
</td>
%(cols)s
</tr>
''', {
'state': stateabbr,
'reporting': reporting,
'cols': ''.join(cols)
}) )
if kind == 'short':
details = S('''
<a href="http://news.google.com/?ned=us&topic=el" target="_top" style="color:green;">
Full election coverage and results »
</a>
''')
else:
details = ''
if kind == 'map':
follow = '<span id="spanFollow" style="display:none;"><input type="checkbox" checked="checked" id="chkFollow" /><label for="chkFollow">Follow</label></span>'
viewmap = ''
else:
follow = ''
viewmap = S('''
<a href="http://maps.google.com/decision2008" target="_blank" style="color:green;">
View on a map »
</a>
''')
return T('''
<div style="font-family:arial,sans-serif; font-size:13px;">
<div style="margin-bottom:4px;">
<table style="width:100%%;">
<tbody>
<tr style="vertical-align: baseline;">
<td>
<div style="font-size:16px; font-weight:bold;">
%(title)s
</div>
</td>
<td style="text-align:center;">
<div style="font-size:13px;">
%(follow)s
</div>
</td>
</td>
<td style="text-align:right;">
<div style="font-size:13px;">
%(dem)s | %(gop)s
</div>
</td>
</tr>
</tbody>
</table>
</div>
<table style="width:100%%; font-size:13px;">
<thead>
%(head)s
</thead>
<tbody>
%(rows)s
</tbody>
</table>
<div>
%(details)s
%(viewmap)s
</div>
</div>
''', {
'title': title + ': ',
'follow': follow,
'dem': linkParty( 'dem', partyname ),
'gop': linkParty( 'gop', partyname ),
'head': ''.join(head),
'rows': ''.join(rows),
'details': details,
'viewmap': viewmap
})
def percentage( n ):
pct = int( round( 100.0 * float(n) ) )
if pct == 100 and n < 1: pct = 99
return pct
def write( name, text ):
#print 'Writing ' + name
f = open( name, 'w' )
f.write( text )
f.close()
def update():
print 'Retrieving data...'
fetchData()
print 'Parsing data...'
readVotes()
print 'Creating Miniresults HTML...'
makeMini()
print 'Checking in Miniresults HTML...'
os.system( 'svn ci -m "Miniresults update" miniresults/*' )
print 'Done!'
def main():
while 1:
update()
print 'Waiting 10 minute...'
time.sleep( 600 )
if __name__ == "__main__":
main()
| apache-2.0 | -4,240,004,164,748,181,000 | 25.310606 | 159 | 0.589116 | false | 2.776179 | false | false | false |
spacelis/tcrawl | tcrawl/api/pic_service_api.py | 1 | 2164 | #!python
# -*- coding: utf-8 -*-
"""File: pic_service_api.py
Description:
Online picture services
History:
0.1.2 support tweetpoto.com (plixi.com)
0.1.0 The first version.
"""
__version__ = '0.1.0'
__author__ = 'SpaceLis'
from tcrawl.api import api_call, urlsplit
def get_twit_pic(**kargs):
"""Retrieve the picture from TwitPic"""
twitpage = api_call(*urlsplit(kargs['url'])).read()
anchor = '<img class="photo" id="photo-display" src="'
start = twitpage.index(anchor) + len(anchor)
end = twitpage.index('"', start)
imgurl = twitpage[start:end]
return api_call(*urlsplit(imgurl)).read()
def get_yfrog_pic(**kargs):
"""Retrieve the picture from YFrog
"""
host, path, secure = urlsplit(kargs['url'])
pic = api_call(host, path +':iphone', secure).read()
return pic
def get_twitgoo_pic(**kargs):
"""Retrieve the picture from TwitGoo
"""
host, path, secure = urlsplit(kargs['url'])
pic = api_call(host, path +'/img', secure).read()
return pic
def get_tweetphoto_pic(**kargs):
"""Retrieve the picture from TweetPhoto or Plixi.com
"""
pic_page = api_call(*urlsplit(kargs['url'])).read()
anchor = '" alt="" id="photo"'
end = pic_page.find(anchor)
start = pic_page.rfind('"', 0, end) + 1
imgurl = pic_page[start:end]
return api_call(*urlsplit(imgurl)).read()
# a list of usable picture service support by this crawling module
_SERVICEPROVIDERS = {'twitpic.com':get_twit_pic, \
'yfrog.com':get_yfrog_pic, \
'tweetphoto.com': get_tweetphoto_pic, \
'plixi.com': get_tweetphoto_pic}
def get_pic(**kargs):
""" Retrieving Pictures from the right site
"""
urlpart = kargs['url'].split('/')
pic_api = _SERVICEPROVIDERS[urlpart[2]]
return pic_api(**kargs)
def test():
"""A test
"""
fout = open('test.jpg', 'wb')
#print >> fout, get_twitgoo_pic(url = 'http://twitgoo.com/216kxf')
print >> fout, get_tweetphoto_pic(url = 'http://tweetphoto.com/36367177')
if __name__ == '__main__':
test()
| mit | -8,709,013,480,889,322,000 | 29.362319 | 77 | 0.5878 | false | 3.201183 | false | false | false |
wavefrontHQ/python-client | wavefront_api_client/api/saved_search_api.py | 1 | 24018 | # coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class SavedSearchApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_saved_search(self, **kwargs): # noqa: E501
"""Create a saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_search(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_saved_search_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_saved_search_with_http_info(**kwargs) # noqa: E501
return data
def create_saved_search_with_http_info(self, **kwargs): # noqa: E501
"""Create a saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_search_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_saved_search" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_saved_search(self, id, **kwargs): # noqa: E501
"""Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_entity_type_saved_searches(self, entitytype, **kwargs): # noqa: E501
"""Get all saved searches for a specific entity type for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_entity_type_saved_searches(entitytype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entitytype: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501
else:
(data) = self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501
return data
def get_all_entity_type_saved_searches_with_http_info(self, entitytype, **kwargs): # noqa: E501
"""Get all saved searches for a specific entity type for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_entity_type_saved_searches_with_http_info(entitytype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entitytype: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entitytype', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_entity_type_saved_searches" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entitytype' is set
if ('entitytype' not in params or
params['entitytype'] is None):
raise ValueError("Missing the required parameter `entitytype` when calling `get_all_entity_type_saved_searches`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entitytype' in params:
path_params['entitytype'] = params['entitytype'] # noqa: E501
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/type/{entitytype}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_saved_searches(self, **kwargs): # noqa: E501
"""Get all saved searches for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_searches(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_saved_searches_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_saved_searches_with_http_info(**kwargs) # noqa: E501
return data
def get_all_saved_searches_with_http_info(self, **kwargs): # noqa: E501
"""Get all saved searches for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_searches_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_saved_searches" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_saved_search(self, id, **kwargs): # noqa: E501
"""Get a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def get_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_saved_search(self, id, **kwargs): # noqa: E501
"""Update a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
def update_saved_search_with_http_info(self, id, **kwargs): # noqa: E501
"""Update a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_search_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedSearch body: Example Body: <pre>{ \"query\": { \"foo\": \"{\\\"searchTerms\\\":[{\\\"type\\\":\\\"freetext\\\",\\\"value\\\":\\\"foo\\\"}]}\" }, \"entityType\": \"DASHBOARD\" }</pre>
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_saved_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_saved_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedsearch/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| apache-2.0 | 7,068,327,250,175,143,000 | 37.73871 | 409 | 0.570947 | false | 4.109153 | false | false | false |
PaulPetring/FritzBox | FritzBox.py | 1 | 9158 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Paul Petring
from pprint import pprint
from lxml import etree
import hashlib
import requests
import json
from lxml import html
import datetime
from datetime import datetime, timedelta
import pytz
class FritzBox:
"""
allows to interact with Fritzbox OS 6.30 - 6.60 by using HTTP requests
"""
_url="" #URL of the FritzBox
_username="" #username of the FritzBox
_password="" #password of the FritzBox
_sid="" #current session identifier
_last_calls=[] #calls buffer
_last_phonebook_entries=[] #devices buffer
_last_devices=[] #devices buffer
_request_session = requests.Session() #request session object
_request_headers = { #default headers, feel free to modify these
'Referer': 'http://fritz.box/',
'Pragma' : 'no-cache',
'User-Agent': 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36',
'Accept-Language': 'en-US,en;q=0.8,de;q=0.6',
'Accept-Encoding': 'gzip, deflate, sdch',
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
}
def __init__(self,password="",url="http://fritz.box",username="",login=True):
"""
@param password of your fritz.box (no username support yet)
@param url of your fritzbox (defaults to "http://fritz.box")
"""
self._url = url
self._password = password
self._username = username
if(login):
self._sid = self.login()
if(self._url!="http://fritz.box"):
self._request_headers["Referer"] = self._url + "/"
def getSID(self):
""" returnes current SID status and challenge
required for logging into the FritzBox
"""
status_url = self._url + "/login_sid.lua"
r = self._request_session.get(status_url)
"""Expected response:
<?xml version="1.0" encoding="utf-8"?>
<SessionInfo>
<SID>0000000000000000</SID>
<Challenge>443a0e07</Challenge>
<BlockTime>0</BlockTime>
<Rights></Rights>
</SessionInfo>
"""
#Parsing XML
parser = etree.XMLParser(recover=True)
root = etree.fromstring(str(r.content), parser=parser)
ret_sid=root.find('SID').text
challenge=root.find('Challenge').text
return (ret_sid,challenge)
def login(self):
"""
performs an login by getting the FirtzBox session challenge
hashing it in combination with the provided password and
returning the newly achieved session identifier
"""
sid_status = self.getSID()
self._sid = sid_status[0]
challenge = sid_status[1]
if(sid_status[0]=="0000000000000000"): # login procedure required
#following the login javascript of OS 6.30+
#encoding it to utf-16E does the trick to get the correct hash
cp_str = challenge + "-" + self._password
md5_str = hashlib.md5(cp_str.encode("utf-16LE")).hexdigest()
response = challenge + "-" + md5_str
#preparing POST statement
post = "response="+response+"&lp=&username="+self._username
data = dict(response=response, lp='',username=self._username)
r = self._request_session.post(self._url, data=data, allow_redirects=True,headers=self._request_headers)
#extracting SID from response (mostly in the last few lines of response)
self._sid = r.content[r.content.find('"sid":'):]
self._sid = self._sid[8:self._sid.find("});")-2]
return self._sid;
def get_devices(self,device_type="active"):
"""
returns a list of the current home network devices as FritzBoxDevice objects
@device_type defaults to active devices, else returns inactive (passive) devices of home network
"""
data = dict(xhr=1, sid=self._sid, lang='en',page='netDev',type="cleanup")
r = self._request_session.post(self._url+"/data.lua", data=data, allow_redirects=True,headers=self._request_headers)
#r.content should contain valid json string with active and passive devices
parsed=json.loads(r.content)
ret_list=[]
if(device_type=="active"):
for active in parsed["data"]["active"]:
ret_list.append(FritzBoxDevice(active))
else:
for passive in parsed["data"]["passive"]:
ret_list.append(FritzBoxDevice(passive))
return ret_list
def get_foncalls(self):
"""
returns a list of last 400(?) fon calls as FritzBoxCall objects
"""
data = dict(sid=self._sid)
r = self._request_session.post(self._url+"/fon_num/foncalls_list.lua?sid="+self._sid+"&csv=", data=data, allow_redirects=True,headers=self._request_headers)
#r.content contains semicolon separated values, surrounding head and tail line
ret_list = []
for line in r.content.split('\n')[2:-1]:
ret_list.append(FritzBoxCall(line))
_last_calls = ret_list
return ret_list
def get_fonbook(self):
""" #downloading it from the button ended in timeout
data = dict(sid=self._sid,PhonebookId=0,PhonebookExportName="Telefonbuch",PhonebookExport="")
print data
r = self._request_session.post(self._url+"/cgi-bin/firmwarecfg", data=data, allow_redirects=True,headers=self._request_headers)
print(r.content)
"""
# as a workaround we parse the delivered table
data = dict(sid=self._sid,xhr=1,page="bookLi",no_sidrenew="",lang="en")
r = self._request_session.post(self._url+"/data.lua", data=data, allow_redirects=True,headers=self._request_headers)
tree = html.fromstring(r.content.decode('utf-8'))
tree_names = tree.xpath('//table[@id="uiInnerTable"]/tr')
ret_list = []
for name_row in tree_names[:-1]: #removing the "no entries-entry
entry = FritzBoxFonBookEntry( )
entry.name = ''.join(name_row.xpath('td[@class="tname"]/text()')).encode('utf-8')
entry.numbers = name_row.xpath('td[@class="tnum"]/text()') #string list!
entry.type = ''.join(name_row.xpath('td[@class="ttype"]/text()')).encode('utf-8')
entry.code = ''.join(name_row.xpath('td[@class="tcode"]/text()')).encode('utf-8')
entry.vanity = ''.join(name_row.xpath('td[@class="tvanity"]/text()')).encode('utf-8')
entry.imp = ''.join(name_row.xpath('td[@class="timp"]/text()')).encode('utf-8')
ret_list.append(entry)
self._last_phonebook_entries = ret_list
return ret_list
class FritzBoxFonBookEntry:
name = ""
numbers = []
type = ""
vanity = ""
code = ""
imp = ""
def __init__(self, name="", numbers="",type="",code="",vanity="",imp=""):
self.name=name
self.numbers=numbers
self.type=type
self.code=code
self.vanity=vanity
self.imp=imp
def __repr__(self): #debug purposes
return str(self.name) #+ " " +''.join(str(e) for e in self.numbers)
def __str__(self): #debug purposes
return str(self.name) #+ " " +''.join(str(e) for e in self.numbers)
class FritzBoxCall:
call_type="" #int
date="" #dateTime
caller_name="" #name of the caller set by FritzBox fon book
caller_number="" #number of the caller as string, as it can be anonymous
fon="" #name of the called internal device
number="" #number of the called internal devices
duration="" #duration as python timespan
UID="" #unique identifier of the call
def __init__(self,csv_line):
parts=csv_line.split(';')
self.call_type = int(parts[0])
parse_date = datetime.now(pytz.timezone('Europe/Berlin'))
tz = pytz.timezone('Europe/Berlin')
self.date = parse_date.strptime(parts[1] + " CET", "%d.%m.%y %H:%M %Z")
tzoffset = tz.utcoffset(self.date)
self.date = self.date-tzoffset
self.caller_name = parts[2]
self.caller_number = parts[3]
self.fon = parts[4]
self.number = parts[5]
t = datetime.strptime(parts[6],"%H:%M")
self.duration = timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
self.UID = self.get_UID()
def get_UID(self):
return hashlib.md5(self.date.isoformat()+self.caller_number).hexdigest()
def __repr__(self): #debug purposes
return str(self.date) + " " +self.caller_name + " " +self.caller_number + " " +str(self.duration)
def __str__(self): #debug purposes
return str(self.date) + " " +self.caller_name + " " +self.caller_number + " " +str(self.duration)
class FritzBoxDevice:
mac="" #mac adress as string
ipv6="" #ipv6 adress of the device as string
state="" # state as string
name="" # name as string
port="" # port as string
summarypropertie="" # summarypropertie as string (no typo! missing r is real)
classes="" # classes as string
url="" # url as string
type="" # type as string (lan|wlan etc)
ipv4="" # ipv4 as string
UID="" #UID as string
def __init__(self,parsed_json):
"""
expected parsed json and inits values as string
"""
self.mac=parsed_json["mac"]
self.ipv6=parsed_json["ipv6"]
self.UID=parsed_json["UID"]
self.state=parsed_json["state"]
self.port=parsed_json["port"]
self.name=parsed_json["name"]
self.summarypropertie=parsed_json["summarypropertie"]
self.classes=parsed_json["classes"]
self.url=parsed_json["url"]
self.type=parsed_json["type"]
self.ipv4=parsed_json["ipv4"]
self.UID=self.get_UID()
def get_UID(self):
if self.UID:
return self.UID
return str(self) #if vpn UID seems to be empty
def __repr__(self): #debug purposes
return self.UID + " " +self.ipv4 + " " +self.type + " " +self.name
def __str__(self): #debug purposes
return self.UID + " " +self.ipv4 + " " +self.type + " " +self.name
| gpl-3.0 | 1,970,126,370,268,101,000 | 33.171642 | 158 | 0.672527 | false | 3.015476 | false | false | false |
healerkx/PySQLKits | mysqlvfk/mysqlvfk.py | 1 | 7204 | #
import MySQLdb, os, re, json
from functools import *
from tableinfo import *
from sys import argv
from graph import *
from extra import *
from defines import *
import readline
from optparse import OptionParser
usage = """
Usage:
python3 relations.py --source=<database> [--options]
<source> format:
username:password@host[:port]/database
python3 relations.py root:root@localhost/mydb
"""
def fetch_database_info(extra_info, user, password, server, db):
"""
Fetch database info and mixin extra info from json config
"""
host = server
port = 3306
if ':' in server:
host, port = server.split(':')
port = int(port)
db = MySQLdb.connect(host=host, user=user, passwd=password, db=db, port=port, charset="utf8")
print("#Reading database scheme")
ct = db.cursor()
ct.execute("SHOW TABLES")
table_info_list = []
id_table_map = {} # Stores id-field names => tableInfo mapping
for (table,) in ct.fetchall():
ct.execute("SHOW FULL COLUMNS FROM " + table)
fields = ct.fetchall()
table_info = TableInfo(table, fields, extra_info)
id_fields = table_info.get_id_fields()
for id_field_name in id_fields:
if id_field_name not in id_table_map:
id_table_map[id_field_name] = [table_info]
else:
id_table_map[id_field_name].append(table_info)
table_info_list.append(table_info)
ct.close()
return table_info_list, id_table_map, db
def calc_tables_relations(tables, id_table_map):
"""
Calc the tables' relations
"""
for table in tables:
primary_key = table.primary_key[0]
if primary_key not in id_table_map:
continue
follower_tables = id_table_map[primary_key]
for follower_table in follower_tables:
table.add_follower_table(follower_table)
def update_logic_foreign_key(table_info_list, table_info, uncertain_id, keys, extra):
keys = keys.split(',')
for key in keys:
key = key.strip()
table_name, field_name = key.split(".")
if table_name not in map(lambda x: x.table_name, table_info_list):
raise Exception("Table `%s` not found" % red_text(table_name))
this_table_info = list(filter(lambda x: x.table_name==table_name, table_info_list))[0]
if field_name not in this_table_info.id_fields and field_name != this_table_info.primary_key[0]:
raise Exception("Field `%s`.`%s` not found" % (red_text(table_name), red_text(field_name)))
extra.set_virtual_foreign_key(table_info, uncertain_id, table_name, field_name)
extra.update_table_extra_info()
return True
def query_uncertain_id_fields(table_info_list, extra):
"""
"""
for table_info in table_info_list:
id_fields = table_info.get_id_fields()
depends = table_info.depends
if len(id_fields) == len(depends):
continue
depends_ids = list(map(lambda x: x[0], depends.keys()))
uncertain_ids = list(set(id_fields) - set(depends_ids))
if len(uncertain_ids) == 0:
continue
index = 0
while index < len(uncertain_ids):
uncertain_id = uncertain_ids[index]
try:
print("Could you point out `%s`.`%s` corresponds to which primary key?"
% (green_text(table_info.table_name), green_text(uncertain_id)))
keys = input('')
if len(keys) > 0 and '.' in keys:
if update_logic_foreign_key(table_info_list, table_info, uncertain_id, keys, extra):
index += 1
elif keys == 'i':
# Ignore it this time
index += 1
elif keys == 'n':
# It's not an Id.
index += 1
elif keys == 'e':
# The fields means an id from extra system
extra.set_virtual_foreign_key(table_info, uncertain_id, '', '')
extra.update_table_extra_info()
index += 1
except Exception as e:
print(e)
# show all tables' followers and depends
def print_relations(results):
for table in results:
print(table)
for f in table.followers:
print("\t", f)
# print("\t", '-' * 30)
# for d in table.depends:
# print("\t", d)
print("=" * 40, end='\n\n')
def init_graph_from_relations(results):
graph = Graph()
for table in results:
graph.add_vertex(table.table_name, table)
for table in results:
for follow in table.followers:
graph.add_edge(table.table_name, follow.table_name)
return graph
def plot(graph, filename="social_network.png"):
from igraph import plot
layout = graph.layout("circle")
visual_style = dict()
visual_style["vertex_size"] = 20
visual_style["vertex_label_size"] = 30
visual_style["vertex_label_dist"] = 2
visual_style["vertex_color"] = "white"
visual_style["vertex_label_color"] = "blue"
visual_style["vertex_label"] = graph.vs["name"]
visual_style["edge_width"] = 2
visual_style["layout"] = layout
visual_style["bbox"] = (1200, 1000)
visual_style["margin"] = 100
plot(graph, filename, **visual_style)
def calc_database_table_relations(db_args):
extra = ExtraTableInfo(db_args[3])
extra_info = extra.load_table_extra_info()
table_info_list, id_table_map, db = fetch_database_info(extra_info, *db_args)
calc_tables_relations(table_info_list, id_table_map)
return table_info_list, extra
def main(options, other_args):
# For local test
u = re.compile("(.*):(.*)@(.*)/(.*)")
a = u.match(options.source)
db_args = a.groups()
table_info_list, extra = calc_database_table_relations(db_args)
print("Press [i] to ignore this time, [n] means not an id(key), [e] means an id from an external system.")
print("")
try:
query_uncertain_id_fields(table_info_list, extra)
except KeyboardInterrupt as e:
print('Ignore all uncertain foreign keys')
table_info_list, extra = calc_database_table_relations(db_args)
if options.graph:
graph = init_graph_from_relations(table_info_list)
plot(graph, options.graph)
if options.way:
begin_point, end_point = options.way.split(',')
paths = graph.all_paths(begin_point, end_point)
count = 1
for path in paths:
print('-' * 5, "Way %d" % count, '-' * 5)
graph.prints(path)
count += 1
#
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-s", "--source", action="store", dest="source", help="Provide source database")
parser.add_option("-g", "--graph", action="store", dest="graph", help="Render the relations in a graph")
parser.add_option("-w", "--way", action="store", dest="way", help="Provide a way from a begin point to the end point")
options, args = parser.parse_args()
main(options, argv[2:])
| mit | 731,856,548,659,580,700 | 32.821596 | 122 | 0.585647 | false | 3.698152 | false | false | false |
jchmura/suchary-download | functions.py | 1 | 1546 | from datetime import datetime
import json
from os.path import isfile
def output_json(obj):
if isinstance(obj, datetime):
if obj.utcoffset() is not None:
obj = obj - obj.utcoffset()
return obj.strftime('%Y-%m-%d %H:%M:%S')
return str(obj)
def input_json(obj):
new_dic = {}
for key in obj:
try:
if float(key) == int(float(key)):
new_key = int(key)
else:
new_key = float(key)
new_dic[new_key] = obj[key]
continue
except ValueError:
pass
try:
new_dic[str(key)] = datetime.strptime(obj[key], '%Y-%m-%d %H:%M:%S')
continue
except (TypeError, ValueError):
pass
new_dic[str(key)] = obj[key]
return new_dic
def load_saved(file):
if isfile(file):
try:
saved = json.load(open(file, 'r'), object_hook=input_json)
except ValueError:
saved = []
else:
saved = []
ids = set()
for suchar in saved:
ids.add(suchar['id'])
return saved, ids
def convert_to_date_time(date):
year = int(date[:4])
month = int(date[5:7])
day = int(date[8:10])
hour = int(date[11:13])
minute = int(date[14:16])
second = int(date[17:19])
suchar_date = datetime(year, month, day, hour, minute, second)
return suchar_date
def create_suchar_to_save(id, date, votes, body):
dst = {'id': id, 'date': date, 'votes': votes, 'body': body}
return dst | mit | 7,406,977,418,833,609,000 | 21.75 | 80 | 0.529754 | false | 3.427938 | false | false | false |
XaF/rteval | rteval/sysinfo/services.py | 2 | 5348 | # -*- coding: utf-8 -*-
#
# Copyright 2009 - 2013 Clark Williams <williams@redhat.com>
# Copyright 2009 - 2013 David Sommerseth <davids@redhat.com>
# Copyright 2012 - 2013 Raphaël Beamonte <raphael.beamonte@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# For the avoidance of doubt the "preferred form" of this code is one which
# is in an open unpatent encumbered format. Where cryptographic key signing
# forms part of the process of creating an executable the information
# including keys needed to generate an equivalently functional executable
# are deemed to be part of the source code.
#
import sys, subprocess, os, glob, fnmatch, libxml2
from rteval.sysinfo.tools import getcmdpath
from rteval.Log import Log
class SystemServices(object):
def __init__(self, logger=None):
self.__logger = logger
self.__init = "unknown"
def __log(self, logtype, msg):
if self.__logger:
self.__logger.log(logtype, msg)
def __get_services_sysvinit(self):
reject = ('functions', 'halt', 'killall', 'single', 'linuxconf', 'kudzu',
'skeleton', 'README', '*.dpkg-dist', '*.dpkg-old', 'rc', 'rcS',
'single', 'reboot', 'bootclean.sh')
for sdir in ('/etc/init.d', '/etc/rc.d/init.d'):
if os.path.isdir(sdir):
servicesdir = sdir
break
if not servicesdir:
raise RuntimeError, "No services dir (init.d) found on your system"
self.__log(Log.DEBUG, "Services located in %s, going through each service file to check status" % servicesdir)
ret_services = {}
for service in glob.glob(os.path.join(servicesdir, '*')):
servicename = os.path.basename(service)
if not [1 for p in reject if fnmatch.fnmatch(servicename, p)] and os.access(service, os.X_OK):
cmd = '%s -qs "\(^\|\W\)status)" %s' % (getcmdpath('grep'), service)
c = subprocess.Popen(cmd, shell=True)
c.wait()
if c.returncode == 0:
cmd = ['env', '-i', 'LANG="%s"' % os.environ['LANG'], 'PATH="%s"' % os.environ['PATH'], 'TERM="%s"' % os.environ['TERM'], service, 'status']
c = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
c.wait()
if c.returncode == 0 and (c.stdout.read() or c.stderr.read()):
ret_services[servicename] = 'running'
else:
ret_services[servicename] = 'not running'
else:
ret_services[servicename] = 'unknown'
return ret_services
def __get_services_systemd(self):
ret_services = {}
cmd = '%s list-unit-files -t service --no-legend' % getcmdpath('systemctl')
self.__log(Log.DEBUG, "cmd: %s" % cmd)
c = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for p in c.stdout:
# p are lines like "servicename.service status"
v = p.strip().split()
ret_services[v[0].split('.')[0]] = v[1]
return ret_services
def services_get(self):
cmd = [getcmdpath('ps'), '-ocomm=', '1']
c = subprocess.Popen(cmd, stdout=subprocess.PIPE)
self.__init = c.stdout.read().strip()
if self.__init == 'systemd':
self.__log(Log.DEBUG, "Using systemd to get services status")
return self.__get_services_systemd()
elif self.__init == 'init':
self.__init = 'sysvinit'
self.__log(Log.DEBUG, "Using sysvinit to get services status")
return self.__get_services_sysvinit()
else:
raise RuntimeError, "Unknown init system (%s)" % self.__init
return {}
def MakeReport(self):
srvs = self.services_get()
rep_n = libxml2.newNode("Services")
rep_n.newProp("init", self.__init)
for s in srvs:
srv_n = libxml2.newNode("Service")
srv_n.newProp("state", srvs[s])
srv_n.addContent(s)
rep_n.addChild(srv_n)
return rep_n
def unit_test(rootdir):
from pprint import pprint
try:
syssrv = SystemServices()
pprint(syssrv.services_get())
srv_xml = syssrv.MakeReport()
xml_d = libxml2.newDoc("1.0")
xml_d.setRootElement(srv_xml)
xml_d.saveFormatFileEnc("-", "UTF-8", 1)
return 0
except Exception, e:
print "** EXCEPTION: %s" % str(e)
return 1
if __name__ == '__main__':
sys.exit(unit_test(None))
| gpl-2.0 | -1,912,815,886,614,964,500 | 38.316176 | 160 | 0.588928 | false | 3.690131 | false | false | false |
FIXFlyer/pyflyer | flyer/protocol.py | 1 | 1618 | #! /usr/bin/env python
#-----------------------------------------------------------------------
# COPYRIGHT_BEGIN
# Copyright (C) 2017, FixFlyer, LLC.
# All rights reserved.
# COPYRIGHT_END
#-----------------------------------------------------------------------
"""Flyer remote protocol definitions."""
# Carriage return + linefeed, used in properties serialisation.
FLYER_CRLF = "\r\n"
# SOH, used as field separator in FIX-style serialisation.
FLYER_SOH = "\x01"
# End-of-message marker for Flyer protocol messages.
FLYER_EOF = "EOF"
PAYLOAD_EVENT_ID = 0
RESEND_EVENT_ID = 104
SESSION_LOGON_EVENT_ID = 105
SESSION_LOGOUT_EVENT_ID = 106
RESTORE_EVENT_ID = 111
LOGON_RESPONSE_EVENT_ID = 200
HEARTBEAT_EVENT_ID = 201
HEARTBEAT_ACK_EVENT_ID = 202
LOGON_REQUEST_EVENT_ID = 203
LOGOUT_REQUEST_EVENT_ID = 204
ERROR_EVENT_ID = 301
HEARTBEAT_MESSAGE_TYPE = 0
PAYLOAD_MESSAGE_TYPE = 1
COMMON_MESSAGE_TYPE = 2
HEARTBEAT_ACK_MESSAGE_TYPE = 3
COMMIT_MESSAGE_TYPE = 4
FLYER_MESSAGE_TYPE_TAG = 50001
FLYER_REQUEST_ID_TAG = 50002
FLYER_FIX_MESSAGE_TYPE_TAG = 50003
FLYER_MESSAGE_SEQUENCE_NUMBER_TAG = 50004
FLYER_SENDER_COMP_ID_TAG = 50005
FLYER_TARGET_COMP_ID_TAG = 50006
FLYER_MESSAGE_TAG = 50007
FLYER_POSS_DUP_TAG = 50008
FLYER_POSS_RESEND_TAG = 50009
FLYER_LAST_APP_SEQ_NO_TAG = 50010
FLYER_EVENT_TYPE_TAG = 50011
FLYER_BEGIN_STRING_TAG = 50012
FLYER_SERIAL_EVENT_DATA_TAG = 50013
FLYER_ROOT_FIELD_TAG = 50014
FLYER_EOF_TAG = 50015
FLYER_CLIENT_MESSAGE_ID_TAG = 50016
FLYER_LAST_OUTGOING_MSG_SEQ_NUM_TAG = 50017
FLYER_APPLVER_ID_TAG = 50018
FLYER_CUSTOMER_APPLVER_ID_TAG = 50019
FLYER_SESSION_QUALIFIER_TAG = 50020
| mit | 7,762,679,507,206,626,000 | 25.52459 | 72 | 0.681088 | false | 2.714765 | false | false | false |
maestro-hybrid-cloud/heat | heat/engine/resources/aws/cfn/wait_condition_handle.py | 8 | 2334 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.engine.resources import signal_responder
from heat.engine.resources import wait_condition as wc_base
from heat.engine import support
class WaitConditionHandle(wc_base.BaseWaitConditionHandle):
"""AWS WaitConditionHandle resource.
the main point of this class is to :
have no dependencies (so the instance can reference it)
generate a unique url (to be returned in the reference)
then the cfn-signal will use this url to post to and
WaitCondition will poll it to see if has been written to.
"""
support_status = support.SupportStatus(version='2014.1')
METADATA_KEYS = (
DATA, REASON, STATUS, UNIQUE_ID
) = (
'Data', 'Reason', 'Status', 'UniqueId'
)
def get_reference_id(self):
if self.resource_id:
wc = signal_responder.WAITCONDITION
return six.text_type(self._get_ec2_signed_url(signal_type=wc))
else:
return six.text_type(self.name)
def metadata_update(self, new_metadata=None):
"""DEPRECATED. Should use handle_signal instead."""
self.handle_signal(details=new_metadata)
def handle_signal(self, details=None):
"""Validate and update the resource metadata.
metadata must use the following format:
{
"Status" : "Status (must be SUCCESS or FAILURE)",
"UniqueId" : "Some ID, should be unique for Count>1",
"Data" : "Arbitrary Data",
"Reason" : "Reason String"
}
"""
if details is None:
return
return super(WaitConditionHandle, self).handle_signal(details)
def resource_mapping():
return {
'AWS::CloudFormation::WaitConditionHandle': WaitConditionHandle,
}
| apache-2.0 | 4,689,049,030,397,421,000 | 32.826087 | 78 | 0.662811 | false | 4.094737 | false | false | false |
YevgeniyaK/python_training | test/test_modify_group.py | 1 | 1640 | from model.group import Group
import random
import pytest
def test_modify_group_name(app, db, check_ui):
if app.group.count() == 0:
with pytest.allure.step("Create new group if group list is empty"):
app.group.create(Group(name="Test"))
with pytest.allure.step("Given a group list"):
old_groups = db.get_group_list()
with pytest.allure.step("choose random group"):
group = random.choice(old_groups)
with pytest.allure.step("modify random group"):
group.name = "new name"
app.group.modify_group_by_id(group.id, group)
with pytest.allure.step("Get group list again"):
new_groups = db.get_group_list()
with pytest.allure.step("Compare length of old list with length of new list "):
assert len(old_groups) == len(new_groups)
if check_ui:
app_groups = app.group.get_group_list()
for new_group in new_groups:
for app_group in app_groups:
if new_group.id == app_group.id:
if new_group.id == group.id:
assert new_group.name == group.name
else:
assert new_group.name == app_group.name
break
#def test_modify_group_header(app):
# if app.group.count() == 0:
# app.group.create(Group(name="Test"))
# old_groups = app.group.get_group_list()
# app.group.modify_first_group(Group(header="new header"))
# new_groups = app.group.get_group_list()
# assert len(old_groups) == len(new_groups)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
| apache-2.0 | 6,308,574,379,492,927,000 | 37.139535 | 91 | 0.606707 | false | 3.46723 | true | false | false |
Ziftr/counterpartyd | lib/util.py | 1 | 37566 | import time
import decimal
import sys
import json
import logging
import apsw
import collections
import inspect
import requests
from datetime import datetime
from dateutil.tz import tzlocal
from operator import itemgetter
import fractions
import warnings
import binascii
import hashlib
from . import (config, exceptions)
D = decimal.Decimal
b26_digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
dhash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
# Obsolete in Python 3.4, with enum module.
BET_TYPE_NAME = {0: 'BullCFD', 1: 'BearCFD', 2: 'Equal', 3: 'NotEqual'}
BET_TYPE_ID = {'BullCFD': 0, 'BearCFD': 1, 'Equal': 2, 'NotEqual': 3}
BLOCK_LEDGER = []
# TODO: This doesn’t timeout properly. (If server hangs, then unhangs, no result.)
def api (method, params):
headers = {'content-type': 'application/json'}
payload = {
"method": method,
"params": params,
"jsonrpc": "2.0",
"id": 0,
}
response = requests.post(config.RPC, data=json.dumps(payload), headers=headers)
if response == None:
raise exceptions.RPCError('Cannot communicate with {} server.'.format(config.XCP_CLIENT))
elif response.status_code != 200:
if response.status_code == 500:
raise exceptions.RPCError('Malformed API call.')
else:
raise exceptions.RPCError(str(response.status_code) + ' ' + response.reason)
response_json = response.json()
if 'error' not in response_json.keys() or response_json['error'] == None:
try:
return response_json['result']
except KeyError:
raise exceptions.RPCError(response_json)
else:
raise exceptions.RPCError('{}'.format(response_json['error']))
def price (numerator, denominator, block_index):
if block_index >= 294500 or config.TESTNET: # Protocol change.
return fractions.Fraction(numerator, denominator)
else:
numerator = D(numerator)
denominator = D(denominator)
return D(numerator / denominator)
def log (db, command, category, bindings):
cursor = db.cursor()
# Slow?!
def output (quantity, asset):
try:
if asset not in ('fraction', 'leverage'):
return str(devise(db, quantity, asset, 'output')) + ' ' + asset
else:
return str(devise(db, quantity, asset, 'output'))
except exceptions.AssetError:
return '<AssetError>'
except decimal.DivisionByZero:
return '<DivisionByZero>'
if command == 'update':
if category == 'order':
logging.debug('Database: set status of order {} to {}.'.format(bindings['tx_hash'], bindings['status']))
elif category == 'bet':
logging.debug('Database: set status of bet {} to {}.'.format(bindings['tx_hash'], bindings['status']))
elif category == 'order_matches':
logging.debug('Database: set status of order_match {} to {}.'.format(bindings['order_match_id'], bindings['status']))
elif category == 'bet_matches':
logging.debug('Database: set status of bet_match {} to {}.'.format(bindings['bet_match_id'], bindings['status']))
# TODO: elif category == 'balances':
# logging.debug('Database: set balance of {} in {} to {}.'.format(bindings['address'], bindings['asset'], output(bindings['quantity'], bindings['asset']).split(' ')[0]))
elif command == 'insert':
if category == 'credits':
logging.debug('Credit: {} to {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event']))
elif category == 'debits':
logging.debug('Debit: {} from {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event']))
elif category == 'sends':
logging.info('Send: {} from {} to {} ({}) [{}]'.format(output(bindings['quantity'], bindings['asset']), bindings['source'], bindings['destination'], bindings['tx_hash'], bindings['status']))
elif category == 'orders':
logging.info('Order: {} ordered {} for {} in {} blocks, with a provided fee of {} {} and a required fee of {} {} ({}) [{}]'.format(bindings['source'], output(bindings['give_quantity'], bindings['give_asset']), output(bindings['get_quantity'], bindings['get_asset']), bindings['expiration'], bindings['fee_provided'] / config.UNIT, config.BTC, bindings['fee_required'] / config.UNIT, config.BTC, bindings['tx_hash'], bindings['status']))
elif category == 'order_matches':
logging.info('Order Match: {} for {} ({}) [{}]'.format(output(bindings['forward_quantity'], bindings['forward_asset']), output(bindings['backward_quantity'], bindings['backward_asset']), bindings['id'], bindings['status']))
elif category == 'btcpays':
logging.info('{} Payment: {} paid {} to {} for order match {} ({}) [{}]'.format(config.BTC, bindings['source'], output(bindings['btc_amount'], config.BTC), bindings['destination'], bindings['order_match_id'], bindings['tx_hash'], bindings['status']))
elif category == 'issuances':
if bindings['transfer']:
logging.info('Issuance: {} transfered asset {} to {} ({}) [{}]'.format(bindings['source'], bindings['asset'], bindings['issuer'], bindings['tx_hash'], bindings['status']))
elif bindings['locked']:
logging.info('Issuance: {} locked asset {} ({}) [{}]'.format(bindings['issuer'], bindings['asset'], bindings['tx_hash'], bindings['status']))
else:
if bindings['divisible']:
divisibility = 'divisible'
unit = config.UNIT
else:
divisibility = 'indivisible'
unit = 1
if bindings['callable'] and (bindings['block_index'] > 283271 or config.TESTNET): # Protocol change.
callability = 'callable from {} for {} XCP/{}'.format(isodt(bindings['call_date']), bindings['call_price'], bindings['asset'])
else:
callability = 'uncallable'
try:
quantity = devise(db, bindings['quantity'], None, dest='output', divisible=bindings['divisible'])
except Exception as e:
quantity = '?'
logging.info('Issuance: {} created {} of asset {}, which is {} and {}, with description ‘{}’ ({}) [{}]'.format(bindings['issuer'], quantity, bindings['asset'], divisibility, callability, bindings['description'], bindings['tx_hash'], bindings['status']))
elif category == 'broadcasts':
if bindings['locked']:
logging.info('Broadcast: {} locked his feed ({}) [{}]'.format(bindings['source'], bindings['tx_hash'], bindings['status']))
else:
if not bindings['value']: infix = '‘{}’'.format(bindings['text'])
else: infix = '‘{}’ = {}'.format(bindings['text'], bindings['value'])
suffix = ' from ' + bindings['source'] + ' at ' + isodt(bindings['timestamp']) + ' with a fee of {}%'.format(output(D(bindings['fee_fraction_int'] / 1e8) * D(100), 'fraction')) + ' (' + bindings['tx_hash'] + ')' + ' [{}]'.format(bindings['status'])
logging.info('Broadcast: {}'.format(infix + suffix))
elif category == 'bets':
# Last text
broadcasts = list(cursor.execute('''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', bindings['feed_address'])))
try:
last_broadcast = broadcasts[-1]
text = last_broadcast['text']
except IndexError:
text = '<Text>'
# Suffix
end = 'in {} blocks ({}) [{}]'.format(bindings['expiration'], bindings['tx_hash'], bindings['status'])
if 'CFD' not in BET_TYPE_NAME[bindings['bet_type']]:
log_message = 'Bet: {} against {}, by {}, on {} that ‘{}’ will {} {} at {}, {}'.format(output(bindings['wager_quantity'], config.XCP), output(bindings['counterwager_quantity'], config.XCP), bindings['source'], bindings['feed_address'], text, BET_TYPE_NAME[bindings['bet_type']], str(output(bindings['target_value'], 'value').split(' ')[0]), isodt(bindings['deadline']), end)
else:
log_message = 'Bet: {}, by {}, on {} for {} against {}, leveraged {}x, {}'.format(BET_TYPE_NAME[bindings['bet_type']], bindings['source'], bindings['feed_address'],output(bindings['wager_quantity'], config.XCP), output(bindings['counterwager_quantity'], config.XCP), output(bindings['leverage']/ 5040, 'leverage'), end)
logging.info(log_message)
elif category == 'bet_matches':
placeholder = ''
if bindings['target_value'] >= 0: # Only non‐negative values are valid.
placeholder = ' that ' + str(output(bindings['target_value'], 'value'))
if bindings['leverage']:
placeholder += ', leveraged {}x'.format(output(bindings['leverage'] / 5040, 'leverage'))
logging.info('Bet Match: {} for {} against {} for {} on {} at {}{} ({}) [{}]'.format(BET_TYPE_NAME[bindings['tx0_bet_type']], output(bindings['forward_quantity'], config.XCP), BET_TYPE_NAME[bindings['tx1_bet_type']], output(bindings['backward_quantity'], config.XCP), bindings['feed_address'], isodt(bindings['deadline']), placeholder, bindings['id'], bindings['status']))
elif category == 'dividends':
logging.info('Dividend: {} paid {} per unit of {} ({}) [{}]'.format(bindings['source'], output(bindings['quantity_per_unit'], bindings['dividend_asset']), bindings['asset'], bindings['tx_hash'], bindings['status']))
elif category == 'burns':
logging.info('Burn: {} burned {} for {} ({}) [{}]'.format(bindings['source'], output(bindings['burned'], config.BTC), output(bindings['earned'], config.XCP), bindings['tx_hash'], bindings['status']))
elif category == 'cancels':
logging.info('Cancel: {} ({}) [{}]'.format(bindings['offer_hash'], bindings['tx_hash'], bindings['status']))
elif category == 'callbacks':
logging.info('Callback: {} called back {}% of {} ({}) [{}]'.format(bindings['source'], float(D(bindings['fraction']) * D(100)), bindings['asset'], bindings['tx_hash'], bindings['status']))
elif category == 'rps':
log_message = 'RPS: {} opens game with {} possible moves and a wager of {}'.format(bindings['source'], bindings['possible_moves'], output(bindings['wager'], 'XCP'))
logging.info(log_message)
elif category == 'rps_matches':
log_message = 'RPS Match: {} is playing a {}-moves game with {} with a wager of {} ({}) [{}]'.format(bindings['tx0_address'], bindings['possible_moves'], bindings['tx1_address'], output(bindings['wager'], 'XCP'), bindings['id'], bindings['status'])
logging.info(log_message)
elif category == 'rpsresolves':
if bindings['status'] == 'valid':
rps_matches = list(cursor.execute('''SELECT * FROM rps_matches WHERE id = ?''', (bindings['rps_match_id'],)))
assert len(rps_matches) == 1
rps_match = rps_matches[0]
log_message = 'RPS Resolved: {} is playing {} on a {}-moves game with {} with a wager of {} ({}) [{}]'.format(rps_match['tx0_address'], bindings['move'], rps_match['possible_moves'], rps_match['tx1_address'], output(rps_match['wager'], 'XCP'), rps_match['id'], rps_match['status'])
else:
log_message = 'RPS Resolved: {} [{}]'.format(bindings['tx_hash'], bindings['status'])
logging.info(log_message)
elif category == 'order_expirations':
logging.info('Expired order: {}'.format(bindings['order_hash']))
elif category == 'order_match_expirations':
logging.info('Expired Order Match awaiting payment: {}'.format(bindings['order_match_id']))
elif category == 'bet_expirations':
logging.info('Expired bet: {}'.format(bindings['bet_hash']))
elif category == 'bet_match_expirations':
logging.info('Expired Bet Match: {}'.format(bindings['bet_match_id']))
elif category == 'bet_match_resolutions':
# DUPE
cfd_type_id = BET_TYPE_ID['BullCFD'] + BET_TYPE_ID['BearCFD']
equal_type_id = BET_TYPE_ID['Equal'] + BET_TYPE_ID['NotEqual']
if bindings['bet_match_type_id'] == cfd_type_id:
if bindings['settled']:
logging.info('Bet Match Settled: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
else:
logging.info('Bet Match Force‐Liquidated: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
elif bindings['bet_match_type_id'] == equal_type_id:
logging.info('Bet Match Settled: {} won the pot of {}; {} credited to the feed address ({})'.format(bindings['winner'], output(bindings['escrow_less_fee'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id']))
elif category == 'rps_expirations':
logging.info('Expired RPS: {}'.format(bindings['rps_hash']))
elif category == 'rps_match_expirations':
logging.info('Expired RPS Match: {}'.format(bindings['rps_match_id']))
cursor.close()
def message (db, block_index, command, category, bindings, tx_hash=None):
cursor = db.cursor()
# Get last message index.
messages = list(cursor.execute('''SELECT * FROM messages
WHERE message_index = (SELECT MAX(message_index) from messages)'''))
if messages:
assert len(messages) == 1
message_index = messages[0]['message_index'] + 1
else:
message_index = 0
# Not to be misleading…
if block_index == config.MEMPOOL_BLOCK_INDEX:
try:
del bindings['status']
del bindings['block_index']
del bindings['tx_index']
except KeyError:
pass
bindings_string = json.dumps(collections.OrderedDict(sorted(bindings.items())))
cursor.execute('insert into messages values(:message_index, :block_index, :command, :category, :bindings, :timestamp)',
(message_index, block_index, command, category, bindings_string, curr_time()))
# Log only real transactions.
if block_index != config.MEMPOOL_BLOCK_INDEX:
log(db, command, category, bindings)
cursor.close()
def rowtracer(cursor, sql):
"""Converts fetched SQL data into dict-style"""
dictionary = {}
for index, (name, type_) in enumerate(cursor.getdescription()):
dictionary[name] = sql[index]
return dictionary
def exectracer(cursor, sql, bindings):
# This means that all changes to database must use a very simple syntax.
# TODO: Need sanity checks here.
sql = sql.lower()
# Parse SQL.
array = sql.split('(')[0].split(' ')
if 'insert' in sql:
command, category = array[0], array[2]
elif 'update' in sql:
command, category = array[0], array[1]
else:
return True
db = cursor.getconnection()
dictionary = {'command': command, 'category': category, 'bindings': bindings}
# Skip blocks, transactions.
if 'blocks' in sql or 'transactions' in sql: return True
# Record alteration in database.
if category not in ('balances', 'messages', 'mempool'):
if not (command in ('update') and category in ('orders', 'bets', 'rps', 'order_matches', 'bet_matches', 'rps_matches')): # List message manually.
message(db, bindings['block_index'], command, category, bindings)
return True
def connect_to_db(flags=None):
"""Connects to the SQLite database, returning a db Connection object"""
logging.debug('Status: Creating connection to `{}`.'.format(config.DATABASE.split('/').pop()))
if flags == None:
db = apsw.Connection(config.DATABASE)
elif flags == 'SQLITE_OPEN_READONLY':
db = apsw.Connection(config.DATABASE, flags=0x00000001)
else:
raise exceptions.DatabaseError
cursor = db.cursor()
# For speed.
cursor.execute('''PRAGMA count_changes = OFF''')
# For integrity, security.
cursor.execute('''PRAGMA foreign_keys = ON''')
cursor.execute('''PRAGMA defer_foreign_keys = ON''')
# So that writers don’t block readers.
if flags != 'SQLITE_OPEN_READONLY':
cursor.execute('''PRAGMA journal_mode = WAL''')
# Make case sensitive the LIKE operator.
# For insensitive queries use 'UPPER(fieldname) LIKE value.upper()''
cursor.execute('''PRAGMA case_sensitive_like = ON''')
rows = list(cursor.execute('''PRAGMA foreign_key_check'''))
if rows: raise exceptions.DatabaseError('Foreign key check failed.')
# Integrity check
integral = False
for i in range(10): # DUPE
try:
logging.debug('Status: Checking database integrity.')
cursor.execute('''PRAGMA integrity_check''')
rows = cursor.fetchall()
if not (len(rows) == 1 and rows[0][0] == 'ok'):
raise exceptions.DatabaseError('Integrity check failed.')
integral = True
break
except exceptions.DatabaseIntegrityError:
time.sleep(1)
continue
if not integral:
raise exceptions.DatabaseError('Could not perform integrity check.')
cursor.close()
db.setrowtrace(rowtracer)
db.setexectrace(exectracer)
return db
def version_check (db):
try:
host = 'https://counterpartyxcp.github.io/counterpartyd/version.json'
response = requests.get(host, headers={'cache-control': 'no-cache'})
versions = json.loads(response.text)
except Exception as e:
raise exceptions.VersionError('Unable to check version. How’s your Internet access?')
# Check client version.
passed = True
if config.VERSION_MAJOR < versions['minimum_version_major']:
passed = False
elif config.VERSION_MAJOR == versions['minimum_version_major']:
if config.VERSION_MINOR < versions['minimum_version_minor']:
passed = False
elif config.VERSION_MINOR == versions['minimum_version_minor']:
if config.VERSION_REVISION < versions['minimum_version_revision']:
passed = False
if not passed:
explanation = 'Your version of counterpartyd is v{}, but, as of block {}, the minimum version is v{}.{}.{}. Reason: ‘{}’. Please upgrade to the latest version and restart the server.'.format(
config.VERSION_STRING, versions['block_index'], versions['minimum_version_major'], versions['minimum_version_minor'],
versions['minimum_version_revision'], versions['reason'])
if last_block(db)['block_index'] >= versions['block_index']:
raise exceptions.VersionUpdateRequiredError(explanation)
else:
warnings.warn(explanation)
logging.debug('Status: Version check passed.')
return
def database_check (db, blockcount):
"""Checks {} database to see if the {} server has caught up with Bitcoind.""".format(config.XCP_NAME, config.XCP_CLIENT)
if last_block(db)['block_index'] + 1 < blockcount:
raise exceptions.DatabaseError('{} database is behind Bitcoind. Is the {} server running?'.format(config.XCP_NAME, config.XCP_CLIENT))
return
def isodt (epoch_time):
return datetime.fromtimestamp(epoch_time, tzlocal()).isoformat()
def curr_time():
return int(time.time())
def date_passed(date):
return date <= time.time()
def sortkeypicker(keynames):
"""http://stackoverflow.com/a/1143719"""
negate = set()
for i, k in enumerate(keynames):
if k[:1] == '-':
keynames[i] = k[1:]
negate.add(k[1:])
def getit(adict):
composite = [adict[k] for k in keynames]
for i, (k, v) in enumerate(zip(keynames, composite)):
if k in negate:
composite[i] = -v
return composite
return getit
def last_block (db):
cursor = db.cursor()
blocks = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = (SELECT MAX(block_index) from blocks)'''))
if blocks:
assert len(blocks) == 1
last_block = blocks[0]
else:
raise exceptions.DatabaseError('No blocks found.')
cursor.close()
return last_block
def last_message (db):
cursor = db.cursor()
messages = list(cursor.execute('''SELECT * FROM messages WHERE message_index = (SELECT MAX(message_index) from messages)'''))
if messages:
assert len(messages) == 1
last_message = messages[0]
else:
raise exceptions.DatabaseError('No messages found.')
cursor.close()
return last_message
def asset_id (asset):
# Special cases.
if asset == config.BTC: return 0
elif asset == config.XCP: return 1
if asset[0] == 'A': raise exceptions.AssetNameError('starts with ‘A’')
# Checksum
"""
if not checksum.verify(asset):
raise exceptions.AssetNameError('invalid checksum')
else:
asset = asset[:-1] # Strip checksum character.
"""
# Convert the Base 26 string to an integer.
n = 0
for c in asset:
n *= 26
if c not in b26_digits:
raise exceptions.AssetNameError('invalid character:', c)
digit = b26_digits.index(c)
n += digit
if n < 26**3:
raise exceptions.AssetNameError('too short')
return n
def asset_name (asset_id):
if asset_id == 0: return config.BTC
elif asset_id == 1: return config.XCP
if asset_id < 26**3:
raise exceptions.AssetIDError('too low')
# Divide that integer into Base 26 string.
res = []
n = asset_id
while n > 0:
n, r = divmod (n, 26)
res.append(b26_digits[r])
asset_name = ''.join(res[::-1])
"""
return asset_name + checksum.compute(asset_name)
"""
return asset_name
def debit (db, block_index, address, asset, quantity, action=None, event=None):
debit_cursor = db.cursor()
assert asset != config.BTC # Never BTC.
assert type(quantity) == int
assert quantity >= 0
if asset == config.BTC:
raise exceptions.BalanceError('Cannot debit bitcoins from a {} address!'.format(config.XCP_NAME))
debit_cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (address, asset))
balances = debit_cursor.fetchall()
if not len(balances) == 1: old_balance = 0
else: old_balance = balances[0]['quantity']
if old_balance < quantity:
raise exceptions.BalanceError('Insufficient funds.')
balance = round(old_balance - quantity)
balance = min(balance, config.MAX_INT)
assert balance >= 0
bindings = {
'quantity': balance,
'address': address,
'asset': asset
}
sql='update balances set quantity = :quantity where (address = :address and asset = :asset)'
debit_cursor.execute(sql, bindings)
# Record debit.
bindings = {
'block_index': block_index,
'address': address,
'asset': asset,
'quantity': quantity,
'action': action,
'event': event
}
sql='insert into debits values(:block_index, :address, :asset, :quantity, :action, :event)'
debit_cursor.execute(sql, bindings)
debit_cursor.close()
BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity))
def credit (db, block_index, address, asset, quantity, action=None, event=None):
credit_cursor = db.cursor()
assert asset != config.BTC # Never BTC.
assert type(quantity) == int
assert quantity >= 0
credit_cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (address, asset))
balances = credit_cursor.fetchall()
if len(balances) == 0:
assert balances == []
#update balances table with new balance
bindings = {
'address': address,
'asset': asset,
'quantity': quantity,
}
sql='insert into balances values(:address, :asset, :quantity)'
credit_cursor.execute(sql, bindings)
elif len(balances) > 1:
assert False
else:
old_balance = balances[0]['quantity']
assert type(old_balance) == int
balance = round(old_balance + quantity)
balance = min(balance, config.MAX_INT)
bindings = {
'quantity': balance,
'address': address,
'asset': asset
}
sql='update balances set quantity = :quantity where (address = :address and asset = :asset)'
credit_cursor.execute(sql, bindings)
# Record credit.
bindings = {
'block_index': block_index,
'address': address,
'asset': asset,
'quantity': quantity,
'action': action,
'event': event
}
sql='insert into credits values(:block_index, :address, :asset, :quantity, :action, :event)'
credit_cursor.execute(sql, bindings)
credit_cursor.close()
BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity))
def devise (db, quantity, asset, dest, divisible=None):
# For output only.
def norm(num, places):
# Round only if necessary.
num = round(num, places)
fmt = '{:.' + str(places) + 'f}'
num = fmt.format(num)
return num.rstrip('0')+'0' if num.rstrip('0')[-1] == '.' else num.rstrip('0')
# TODO: remove price, odds
if asset in ('leverage', 'value', 'fraction', 'price', 'odds'):
if dest == 'output':
return norm(quantity, 6)
elif dest == 'input':
# Hackish
if asset == 'leverage':
return round(quantity)
else:
return float(quantity) # TODO: Float?!
if asset in ('fraction',):
return norm(fraction(quantity, 1e8), 6)
if divisible == None:
if asset in (config.BTC, config.XCP):
divisible = True
else:
cursor = db.cursor()
cursor.execute('''SELECT * FROM issuances \
WHERE (status = ? AND asset = ?)''', ('valid', asset))
issuances = cursor.fetchall()
cursor.close()
if not issuances: raise exceptions.AssetError('No such asset: {}'.format(asset))
divisible = issuances[0]['divisible']
if divisible:
if dest == 'output':
quantity = D(quantity) / D(config.UNIT)
if quantity == quantity.to_integral():
return str(quantity) + '.0' # For divisible assets, display the decimal point.
else:
return norm(quantity, 8)
elif dest == 'input':
quantity = D(quantity) * config.UNIT
if quantity == quantity.to_integral():
return int(quantity)
else:
raise exceptions.QuantityError('Divisible assets have only eight decimal places of precision.')
else:
return quantity
else:
quantity = D(quantity)
if quantity != round(quantity):
raise exceptions.QuantityError('Fractional quantities of indivisible assets.')
return round(quantity)
def holders(db, asset):
holders = []
cursor = db.cursor()
# Balances
cursor.execute('''SELECT * FROM balances \
WHERE asset = ?''', (asset,))
for balance in list(cursor):
holders.append({'address': balance['address'], 'address_quantity': balance['quantity'], 'escrow': None})
# Funds escrowed in orders. (Protocol change.)
cursor.execute('''SELECT * FROM orders \
WHERE give_asset = ? AND status = ?''', (asset, 'open'))
for order in list(cursor):
holders.append({'address': order['source'], 'address_quantity': order['give_remaining'], 'escrow': order['tx_hash']})
# Funds escrowed in pending order matches. (Protocol change.)
cursor.execute('''SELECT * FROM order_matches \
WHERE (forward_asset = ? AND status = ?)''', (asset, 'pending'))
for order_match in list(cursor):
holders.append({'address': order_match['tx0_address'], 'address_quantity': order_match['forward_quantity'], 'escrow': order_match['id']})
cursor.execute('''SELECT * FROM order_matches \
WHERE (backward_asset = ? AND status = ?)''', (asset, 'pending'))
for order_match in list(cursor):
holders.append({'address': order_match['tx1_address'], 'address_quantity': order_match['backward_quantity'], 'escrow': order_match['id']})
# Bets and RPS (and bet/rps matches) only escrow XCP.
if asset == config.XCP:
cursor.execute('''SELECT * FROM bets \
WHERE status = ?''', ('open',))
for bet in list(cursor):
holders.append({'address': bet['source'], 'address_quantity': bet['wager_remaining'], 'escrow': bet['tx_hash']})
cursor.execute('''SELECT * FROM bet_matches \
WHERE status = ?''', ('pending',))
for bet_match in list(cursor):
holders.append({'address': bet_match['tx0_address'], 'address_quantity': bet_match['forward_quantity'], 'escrow': bet_match['id']})
holders.append({'address': bet_match['tx1_address'], 'address_quantity': bet_match['backward_quantity'], 'escrow': bet_match['id']})
cursor.execute('''SELECT * FROM rps \
WHERE status = ?''', ('open',))
for rps in list(cursor):
holders.append({'address': rps['source'], 'address_quantity': rps['wager'], 'escrow': rps['tx_hash']})
cursor.execute('''SELECT * FROM rps_matches \
WHERE status IN (?, ?, ?)''', ('pending', 'pending and resolved', 'resolved and pending'))
for rps_match in list(cursor):
holders.append({'address': rps_match['tx0_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']})
holders.append({'address': rps_match['tx1_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']})
cursor.close()
return holders
def xcp_supply (db):
cursor = db.cursor()
# Add burns.
cursor.execute('''SELECT * FROM burns \
WHERE status = ?''', ('valid',))
burn_total = sum([burn['earned'] for burn in cursor.fetchall()])
# Subtract issuance fees.
cursor.execute('''SELECT * FROM issuances\
WHERE status = ?''', ('valid',))
issuance_fee_total = sum([issuance['fee_paid'] for issuance in cursor.fetchall()])
# Subtract dividend fees.
cursor.execute('''SELECT * FROM dividends\
WHERE status = ?''', ('valid',))
dividend_fee_total = sum([dividend['fee_paid'] for dividend in cursor.fetchall()])
cursor.close()
return burn_total - issuance_fee_total - dividend_fee_total
def supplies (db):
cursor = db.cursor()
supplies = {config.XCP: xcp_supply(db)}
cursor.execute('''SELECT * from issuances \
WHERE status = ?''', ('valid',))
for issuance in list(cursor):
asset = issuance['asset']
quantity = issuance['quantity']
if asset in supplies.keys():
supplies[asset] += quantity
else:
supplies[asset] = quantity
cursor.close()
return supplies
def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5):
try:
r = requests.get(url, timeout=fetch_timeout)
except Exception as e:
raise GetURLError("Got get_url request error: %s" % e)
else:
if r.status_code != 200 and abort_on_error:
raise GetURLError("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text))
result = json.loads(r.text) if is_json else r.text
return result
def dhash_string(text):
return binascii.hexlify(hashlib.sha256(hashlib.sha256(bytes(text, 'utf-8')).digest()).digest()).decode()
### Bitcoin Addresses ###
def validate_address(address, block_index):
# Get array of pubkeyhashes to check.
if is_multisig(address):
if not (config.TESTNET and block_index >= config.FIRST_MULTISIG_BLOCK_TESTNET):
raise MultiSigAddressError('Multi‐signature addresses are currently disabled.')
pubkeyhashes = pubkeyhash_array(address)
else:
pubkeyhashes = [address]
# Check validity by attempting to decode.
for pubkeyhashes in pubkeyhashes:
base58_check_decode(pubkeyhashes, config.ADDRESSVERSION)
def base58_encode(binary):
# Convert big‐endian bytes to integer
n = int('0x0' + binascii.hexlify(binary).decode('utf8'), 16)
# Divide that integer into base58
res = []
while n > 0:
n, r = divmod (n, 58)
res.append(b58_digits[r])
res = ''.join(res[::-1])
return res
def base58_check_encode(original, version):
b = binascii.unhexlify(bytes(original, 'utf-8'))
d = version + b
binary = d + dhash(d)[:4]
res = base58_encode(binary)
# Encode leading zeros as base58 zeros
czero = 0
pad = 0
for c in d:
if c == czero: pad += 1
else: break
address = b58_digits[0] * pad + res
if bytes(original, 'utf-8') != binascii.hexlify(base58_check_decode(address, version)):
raise exceptions.AddressError('encoded address does not decode properly')
return address
def base58_check_decode (s, version):
# Convert the string to an integer
n = 0
for c in s:
n *= 58
if c not in b58_digits:
raise exceptions.InvalidBase58Error('Not a valid base58 character:', c)
digit = b58_digits.index(c)
n += digit
# Convert the integer to bytes
h = '%x' % n
if len(h) % 2:
h = '0' + h
res = binascii.unhexlify(h.encode('utf8'))
# Add padding back.
pad = 0
for c in s[:-1]:
if c == b58_digits[0]: pad += 1
else: break
k = version * pad + res
addrbyte, data, chk0 = k[0:1], k[1:-4], k[-4:]
if addrbyte != version:
raise exceptions.VersionByteError('incorrect version byte')
chk1 = dhash(addrbyte + data)[:4]
if chk0 != chk1:
raise exceptions.Base58ChecksumError('Checksum mismatch: %r ≠ %r' % (chk0, chk1))
return data
### Bitcoin Addresses ###
### Multi‐signature Addresses ###
# NOTE: a `pub` is either a pubkey or a pubkeyhash
class MultiSigAddressError (exceptions.AddressError):
pass
def is_multisig(address):
array = address.split('_')
return (len(array) > 1)
def canonical_address(address):
if is_multisig(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
if not all([base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]):
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
return address
def test_array(signatures_required, pubs, signatures_possible):
try:
signatures_required, signatures_possible = int(signatures_required), int(signatures_possible)
except ValueError:
raise MultiSigAddressError('Signature values not integers.')
if signatures_required < 1 or signatures_required > 3:
raise MultiSigAddressError('Invalid signatures_required.')
if signatures_possible < 2 or signatures_possible > 3:
raise MultiSigAddressError('Invalid signatures_possible.')
if signatures_possible != len(pubs):
raise exceptions.InputError('Incorrect number of pubkeys/pubkeyhashes in multi‐signature address.')
def construct_array(signatures_required, pubs, signatures_possible):
test_array(signatures_required, pubs, signatures_possible)
address = '_'.join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)])
return address
def extract_array(address):
assert is_multisig(address)
array = address.split('_')
signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1]
test_array(signatures_required, pubs, signatures_possible)
return int(signatures_required), pubs, int(signatures_possible)
def pubkeyhash_array(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
if not all([base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]):
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return pubkeyhashes
### Multi‐signature Addresses ###
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | -1,116,486,481,562,769,000 | 41.676906 | 448 | 0.606536 | false | 3.9392 | true | false | false |
belangeo/cecilia4csound | Resources/Control.py | 1 | 76089 | # encoding: utf-8
"""
Copyright 2015 iACT, universite de Montreal, Olivier Belanger, Jean Piche
This file is part of Cecilia4Csound.
Cecilia4Csound is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Cecilia4Csound is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cecilia4Csound. If not, see <http://www.gnu.org/licenses/>.
"""
import wx, os, time, math, sys
from constants import *
import CeciliaLib
from Widgets import *
from types import ListType
from TogglePopup import SamplerPopup, SamplerToggle
from Plugins import *
import wx.lib.scrolledpanel as scrolled
from pyo.lib._wxwidgets import ControlSlider
def powerOf2(value):
for i in range(24):
p2 = int(math.pow(2,(i+1)))
if p2 > value:
break
return p2
def chooseColourFromName(name):
def clip(x):
val = int(x*255)
if val < 0: val = 0
elif val > 255: val = 255
else: val = val
return val
def colour(name):
vals = COLOUR_CLASSES[name]
hue = vals[0]
bright = vals[1]
sat = vals[2]
segment = int(math.floor(hue / 60))
fraction = hue / 60 - segment
t1 = bright * (1 - sat)
t2 = bright * (1 - (sat * fraction))
t3 = bright * (1 - (sat * (1 - fraction)))
if segment == 0:
r, g, b = bright, t3, t1
elif segment == 1:
r, g, b = t2, bright, t1
elif segment == 2:
r, g, b = t1, bright, t3
elif segment == 3:
r, g, b = t1, t2, bright
elif segment == 4:
r, g, b = t3, t1, bright
elif segment == 5:
r, g, b = bright, t1, t2
return wx.Colour(clip(r),clip(g),clip(b))
lineColour = colour(name)
midColour = colour(name)
knobColour = colour(name)
sliderColour = colour(name)
return [lineColour, midColour, knobColour, sliderColour]
class CECControl(scrolled.ScrolledPanel):
def __init__(self, parent, id=-1, size=wx.DefaultSize, style=wx.SIMPLE_BORDER):
scrolled.ScrolledPanel.__init__(self, parent, id, size=size, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.outputFilename = ''
self.cfileinList = []
self.peak = ''
self.time = 0
self.charNumForLabel = 34
self.sizerMain = wx.FlexGridSizer(0,1)
self.sizerMain.Add(Separator(self, (230,1), colour=TITLE_BACK_COLOUR), 1, wx.EXPAND)
##### Control Panel #####
controlPanel = wx.Panel(self, -1)
controlPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
controlSizer = wx.FlexGridSizer(1,3)
self.transportButtons = Transport(controlPanel, outPlayFunction=self.onPlayStop,
outRecordFunction=self.onRec,
backgroundColour=TITLE_BACK_COLOUR,
borderColour=WIDGET_BORDER_COLOUR)
self.clocker = Clocker(controlPanel, backgroundColour=TITLE_BACK_COLOUR, borderColour=WIDGET_BORDER_COLOUR)
controlSizer.Add(self.transportButtons, 0, wx.ALIGN_LEFT | wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5)
fakePanel = wx.Panel(controlPanel, -1, size=(10, self.GetSize()[1]))
fakePanel.SetBackgroundColour(TITLE_BACK_COLOUR)
controlSizer.Add(fakePanel)
controlSizer.Add(self.clocker, 1, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
controlSizer.AddGrowableCol(1)
controlPanel.SetSizer(controlSizer)
self.sizerMain.Add(controlPanel, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 0)
self.sizerMain.Add(Separator(self, (230,1), colour=TITLE_BACK_COLOUR), 1, wx.EXPAND)
self.sizerMain.Add(Separator(self, (230,2), colour=BORDER_COLOUR), 1, wx.EXPAND)
self.sizerMain.AddSpacer((5,1))
self.tabs = TabsPanel(self, outFunction=self.onTogglePanels)
self.sizerMain.Add(self.tabs, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 0)
##### Input Panel #####
self.inOutSeparators = []
isEmpty = self.createInputPanel()
self.sizerMain.Add(self.inputPanel, 1, wx.EXPAND | wx.ALL, 0)
if not isEmpty:
sep = Separator(self, (230,2), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,2), colour=BORDER_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,1), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
###### Output Panel #####
self.createOutputPanel()
self.sizerMain.Add(self.outputPanel, 1, wx.EXPAND | wx.ALL, 0)
sep = Separator(self, (230,2), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,2), colour=BORDER_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
sep = Separator(self, (230,1), colour=BACKGROUND_COLOUR)
self.sizerMain.Add(sep, 1, wx.EXPAND)
self.inOutSeparators.append(sep)
### Plugins panel ###
self.createPluginPanel()
self.sizerMain.Add(self.pluginsPanel, 1, wx.EXPAND | wx.ALL, 0)
self.sizerMain.Show(self.pluginsPanel, False)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
controlPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.inputPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.outputPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.peakLabel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.durationSlider.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.gainSlider.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.vuMeter.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.pluginsPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.SetSizer(self.sizerMain)
self.SetAutoLayout(1)
self.SetupScrolling(scroll_x = False)
wx.CallAfter(self.updateOutputFormat)
def listenSoundfile(self):
CeciliaLib.listenSoundfile(self.outputFilename)
def editSoundfile(self):
CeciliaLib.editSoundfile(self.outputFilename)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win != None:
win = win.GetTopLevelParent()
if win not in [CeciliaLib.getCeciliaEditor(), CeciliaLib.getInterface()]:
win.Raise()
event.Skip()
def onTogglePanels(self, state):
if state == 0:
self.sizerMain.Show(self.pluginsPanel, False, True)
self.sizerMain.Show(self.inputPanel, True, True)
self.sizerMain.Show(self.outputPanel, True, True)
[self.sizerMain.Show(sep, True, True) for sep in self.inOutSeparators]
else:
self.sizerMain.Show(self.pluginsPanel, True, True)
self.sizerMain.Show(self.inputPanel, False, True)
self.sizerMain.Show(self.outputPanel, False, True)
[self.sizerMain.Show(sep, False, True) for sep in self.inOutSeparators]
self.sizerMain.Layout()
def createGrapherLines(self, plugin):
knobs = [plugin.knob1, plugin.knob2, plugin.knob3]
grapher = CeciliaLib.getGrapher()
choice = grapher.toolbar.getPopupChoice()
choice.extend([knob.getLongLabel() for knob in knobs])
grapher.toolbar.setPopupChoice(choice)
tableNum = CeciliaLib.getSliderTableNum()
for knob in knobs:
tableNum += 1
knob.setTable(tableNum)
func = '0 %f 1 %f' % (knob.GetValue(), knob.GetValue())
func = [float(v.replace('"', '')) for v in func.split()]
func = [[func[i*2] * CeciliaLib.getTotalTime(), func[i*2+1]] for i in range(len(func) / 2)]
mini = knob.getRange()[0]
maxi = knob.getRange()[1]
colour = chooseColourFromName('red')
label = knob.getLongLabel()
log = knob.getLog()
name = knob.getName()
size = 8192
sl = knob
grapher.plotter.createLine(func, (mini, maxi), colour, label, log, name, tableNum, size, sl, '')
grapher.plotter.getData()[-1].setShow(0)
grapher.plotter.draw()
CeciliaLib.setSliderTableNum(tableNum+1)
def removeGrapherLines(self, plugin):
knobs = [plugin.knob1, plugin.knob2, plugin.knob3]
tmp = [knob.getLongLabel() for knob in knobs]
names = [knob.getName() for knob in knobs]
grapher = CeciliaLib.getGrapher()
choice = grapher.toolbar.getPopupChoice()
for label in tmp:
if label in choice:
choice.remove(label)
grapher.toolbar.setPopupChoice(choice)
grapher.plotter.removeLines(names)
def replacePlugin1(self, i, new):
self.pluginsParams[0][self.oldPlugins[0]] = self.plugin1.getParams()
oldPlugin1 = self.plugin1
if self.oldPlugins[0] != 0:
self.removeGrapherLines(oldPlugin1)
if new == 'None':
self.plugin1 = NonePlugin(self.pluginsPanel, self.replacePlugin1, 0)
CeciliaLib.setPlugins(None, 0)
elif new == 'Reverb':
self.plugin1 = ReverbPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Filter':
self.plugin1 = FilterPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Chorus':
self.plugin1 = ChorusPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Para EQ':
self.plugin1 = EQPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == '3 Bands EQ':
self.plugin1 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Compress':
self.plugin1 = CompressPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Gate':
self.plugin1 = GatePlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Disto':
self.plugin1 = DistoPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'AmpMod':
self.plugin1 = AmpModPlugin(self.pluginsPanel, self.replacePlugin1, 0)
elif new == 'Phaser':
self.plugin1 = PhaserPlugin(self.pluginsPanel, self.replacePlugin1, 0)
if new != 'None':
CeciliaLib.setPlugins(self.plugin1, 0)
self.createGrapherLines(self.plugin1)
ind = PLUGINS_CHOICE.index(self.plugin1.getName())
self.oldPlugins[0] = ind
self.plugin1.setParams(self.pluginsParams[0][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin1, self.plugin1)
else:
item = self.pluginSizer.GetItem(oldPlugin1)
item.DeleteWindows()
self.pluginSizer.Insert(2, self.plugin1, 0)
self.pluginsPanel.Layout()
def replacePlugin2(self, i, new):
self.pluginsParams[1][self.oldPlugins[1]] = self.plugin2.getParams()
oldPlugin2 = self.plugin2
if self.oldPlugins[1] != 0:
self.removeGrapherLines(oldPlugin2)
if new == 'None':
self.plugin2 = NonePlugin(self.pluginsPanel, self.replacePlugin2, 1)
CeciliaLib.setPlugins(None, 1)
elif new == 'Reverb':
self.plugin2 = ReverbPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Filter':
self.plugin2 = FilterPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Chorus':
self.plugin2 = ChorusPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Para EQ':
self.plugin2 = EQPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == '3 Bands EQ':
self.plugin2 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Compress':
self.plugin2 = CompressPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Gate':
self.plugin2 = GatePlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Disto':
self.plugin2 = DistoPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'AmpMod':
self.plugin2 = AmpModPlugin(self.pluginsPanel, self.replacePlugin2, 1)
elif new == 'Phaser':
self.plugin2 = PhaserPlugin(self.pluginsPanel, self.replacePlugin2, 1)
if new != 'None':
CeciliaLib.setPlugins(self.plugin2, 1)
self.createGrapherLines(self.plugin2)
ind = PLUGINS_CHOICE.index(self.plugin2.getName())
self.oldPlugins[1] = ind
self.plugin2.setParams(self.pluginsParams[1][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin2, self.plugin2)
else:
item = self.pluginSizer.GetItem(oldPlugin2)
item.DeleteWindows()
self.pluginSizer.Insert(8, self.plugin2, 0)
self.pluginsPanel.Layout()
def replacePlugin3(self, i, new):
self.pluginsParams[2][self.oldPlugins[2]] = self.plugin3.getParams()
oldPlugin3 = self.plugin3
if self.oldPlugins[2] != 0:
self.removeGrapherLines(oldPlugin3)
if new == 'None':
self.plugin3 = NonePlugin(self.pluginsPanel, self.replacePlugin3, 2)
CeciliaLib.setPlugins(None, 2)
elif new == 'Reverb':
self.plugin3 = ReverbPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Filter':
self.plugin3 = FilterPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Chorus':
self.plugin3 = ChorusPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Para EQ':
self.plugin3 = EQPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == '3 Bands EQ':
self.plugin3 = EQ3BPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Compress':
self.plugin3 = CompressPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Gate':
self.plugin3 = GatePlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Disto':
self.plugin3 = DistoPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'AmpMod':
self.plugin3 = AmpModPlugin(self.pluginsPanel, self.replacePlugin3, 2)
elif new == 'Phaser':
self.plugin3 = PhaserPlugin(self.pluginsPanel, self.replacePlugin3, 2)
if new != 'None':
CeciliaLib.setPlugins(self.plugin3, 2)
self.createGrapherLines(self.plugin3)
ind = PLUGINS_CHOICE.index(self.plugin3.getName())
self.oldPlugins[2] = ind
self.plugin3.setParams(self.pluginsParams[2][ind])
if CeciliaLib.getPlatform() == 'darwin':
self.pluginSizer.Replace(oldPlugin3, self.plugin3)
else:
item = self.pluginSizer.GetItem(oldPlugin3)
item.DeleteWindows()
self.pluginSizer.Insert(13, self.plugin3, 0)
self.pluginsPanel.Layout()
def setPlugins(self, pluginsDict):
for key in pluginsDict.keys():
if key == 0:
self.replacePlugin1(None, pluginsDict[key][0])
self.plugin1.setParams(pluginsDict[key][1])
self.plugin1.setStates(pluginsDict[key][2])
elif key == 1:
self.replacePlugin2(None, pluginsDict[key][0])
self.plugin2.setParams(pluginsDict[key][1])
self.plugin2.setStates(pluginsDict[key][2])
elif key == 2:
self.replacePlugin3(None, pluginsDict[key][0])
self.plugin3.setParams(pluginsDict[key][1])
self.plugin3.setStates(pluginsDict[key][2])
def updateTime(self, time):
self.setTime(time)
self.GetParent().grapher.plotter.drawCursor(time)
def updateAmps(self, amps):
self.vuMeter.setAmplitude(amps)
def createInputPanel(self):
isEmpty = True
self.inputPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
inputSizer = wx.FlexGridSizer(5,1)
self.cfileinList = []
samplersList = []
widgets = CeciliaLib.getInterfaceWidgets()
for w in range(len(widgets)):
if widgets[w]['type'] == 'cfilein':
cFileIn = Cfilein(self.inputPanel, label=widgets[w].get('label', ''), name=widgets[w]['name'])
self.cfileinList.append(cFileIn)
elif widgets[w]['type'] == 'csampler':
cSampler = CSampler(self.inputPanel, label=widgets[w].get('label', ''), name=widgets[w]['name'])
self.cfileinList.append(cSampler)
samplersList.append(cSampler)
CeciliaLib.setUserSamplers(samplersList)
if self.cfileinList != []:
isEmpty = False
# Section title
inputTextPanel = wx.Panel(self.inputPanel, -1, style=wx.NO_BORDER)
inputTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
inputTextSizer = wx.FlexGridSizer(1,1)
inputText = wx.StaticText(inputTextPanel, -1, 'INPUT')
inputText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
inputText.SetBackgroundColour(TITLE_BACK_COLOUR)
inputText.SetForegroundColour(SECTION_TITLE_COLOUR)
inputTextSizer.Add(inputText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
inputTextSizer.AddGrowableCol(0)
inputTextPanel.SetSizer(inputTextSizer)
inputSizer.Add(inputTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT | wx.ALL, 0)
for i in range(len(self.cfileinList)):
inputSizer.Add(self.cfileinList[i], 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP, -1)
if i != len(self.cfileinList)-1:
inputSizer.Add(Separator(self.inputPanel, size=(230,1)), 1, wx.EXPAND)
inputSizer.AddGrowableCol(0)
self.inputPanel.SetSizer(inputSizer)
return isEmpty
def createOutputPanel(self):
self.outputPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
self.outputPanel.SetBackgroundColour(BACKGROUND_COLOUR)
outputSizer = wx.FlexGridSizer(0,1)
outputTextPanel = wx.Panel(self.outputPanel, -1, style=wx.NO_BORDER)
outputTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
outputTextSizer = wx.FlexGridSizer(1,1)
outputText = wx.StaticText(outputTextPanel, -1, 'OUTPUT')
outputText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
outputText.SetBackgroundColour(TITLE_BACK_COLOUR)
outputText.SetForegroundColour(SECTION_TITLE_COLOUR)
outputTextSizer.Add(outputText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
outputTextSizer.AddGrowableCol(0)
outputTextPanel.SetSizer(outputTextSizer)
outputSizer.Add(outputTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT | wx.ALL, 0)
outputSizer.AddSpacer((5,7))
outLine1 = wx.BoxSizer(wx.HORIZONTAL)
# File Name Label
self.filenameLabel = OutputLabel(self.outputPanel, label='', size=(130,20),
colour=CONTROLLABEL_BACK_COLOUR, outFunction=self.onSelectOutputFilename)
self.filenameLabel.SetToolTip(CECTooltip(TT_OUTPUT))
self.filenameLabel.setItalicLabel('File name')
outLine1.Add(self.filenameLabel, 0, wx.LEFT | wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL, 0)
outLine1.AddSpacer((25,1))
outToolbox = ToolBox(self.outputPanel,
tools=['play','edit','recycle'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onReuseOutputFile])
outLine1.Add(outToolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
outputSizer.Add(outLine1, 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM, 7)
# Duration Static Text
durationText = wx.StaticText(self.outputPanel, -1, 'Duration (sec) :')
durationText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
durationText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
outputSizer.Add(durationText, 0, wx.ALIGN_LEFT | wx.LEFT, 9)
# Duration Slider
outputSizer.AddSpacer((3,1))
self.durationSlider = ControlSlider(self.outputPanel,
0.001, 3600, CeciliaLib.getDefaultTotalTime(),
size=(220,15),
log=True,
backColour=BACKGROUND_COLOUR,
outFunction=self.setTotalTime)
self.durationSlider.setSliderHeight(10)
self.durationSlider.SetToolTip(CECTooltip(TT_DUR_SLIDER))
outputSizer.Add(self.durationSlider, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
# Gain Static Text
gainText = wx.StaticText(self.outputPanel, -1, 'Gain (dB) :')
gainText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
gainText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
outputSizer.Add(gainText, 0, wx.ALIGN_LEFT | wx.LEFT, 9)
# Gain Slider
outputSizer.AddSpacer((3,1))
self.gainSlider = ControlSlider(self.outputPanel,
-48, 18, 0,
size=(220,15),
log=False,
backColour=BACKGROUND_COLOUR,
outFunction=self.onChangeGain)
self.gainSlider.setSliderHeight(10)
self.gainSlider.SetToolTip(CECTooltip(TT_GAIN_SLIDER))
CeciliaLib.setGainSlider(self.gainSlider)
outputSizer.Add(self.gainSlider, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
# VU Meter
self.meterSizer = wx.BoxSizer()
self.vuMeter = VuMeter(self.outputPanel)
self.meterSizer.Add(self.vuMeter, 0, wx.EXPAND | wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 8)
# Format choice
self.lineSizer = wx.BoxSizer(wx.HORIZONTAL)
formatList, selectedNchnls = self.defineFormatsList()
formatSizer = wx.BoxSizer(wx.VERTICAL)
self.formatText = wx.StaticText(self.outputPanel, -1, 'Format :')
self.formatText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
self.formatText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
formatSizer.Add(self.formatText, 0, wx.ALIGN_LEFT | wx.LEFT, 2)
self.formatChoice = CustomMenu(self.outputPanel,
choice=formatList,
init=selectedNchnls,
outFunction=self.onFormatChange,
colour=CONTROLLABEL_BACK_COLOUR)
self.formatChoice.SetToolTip(CECTooltip(TT_CHANNELS))
formatSizer.Add(self.formatChoice, 0, wx.ALIGN_LEFT | wx.TOP, 1)
self.lineSizer.Add(formatSizer, 0, wx.ALIGN_LEFT | wx.RIGHT, 10)
# Peak
peakSizer = wx.BoxSizer(wx.VERTICAL)
self.peakText = wx.StaticText(self.outputPanel, -1, 'Peak :')
self.peakText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
self.peakText.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
peakSizer.Add(self.peakText, 0, wx.ALIGN_LEFT | wx.LEFT, 2)
self.peakLabel = PeakLabel(self.outputPanel,
label=self.peak,
size=(100,20),
font=None,
colour=CONTROLLABEL_BACK_COLOUR,
gainSlider=self.gainSlider)
self.peakLabel.SetToolTip(CECTooltip(TT_PEAK))
peakSizer.Add(self.peakLabel, 0, wx.ALIGN_LEFT | wx.TOP, 1)
self.lineSizer.Add(peakSizer, 0, wx.ALIGN_LEFT | wx.LEFT, 10)
outputSizer.Add(self.meterSizer, 1, wx.EXPAND)
outputSizer.Add(self.lineSizer, 0, wx.ALIGN_LEFT | wx.LEFT | wx.BOTTOM, 7)
outputSizer.AddGrowableRow(9)
self.outputPanel.SetSizer(outputSizer)
outputTextPanel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
outToolbox.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
def createPluginPanel(self):
self.oldPlugins = [0,0,0]
for i in range(3):
CeciliaLib.setPlugins(None, i)
self.pluginsParams = { 0: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]],
1: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]],
2: [[0,0,0,0], [.25,1,.5,1], [1,1000,1,1], [.5,.2,.5,1], [1000,500,1,1], [1,1,1,1], [-20,3,0,1], [-20,0,.005,1], [3,1000,0.5,1], [8,1,0,1], [100,.5,1,1]]}
self.pluginsPanel = wx.Panel(self, -1, style=wx.NO_BORDER)
self.pluginsPanel.SetBackgroundColour(BACKGROUND_COLOUR)
self.pluginSizer = wx.BoxSizer(wx.VERTICAL)
pluginTextPanel = wx.Panel(self.pluginsPanel, -1, style=wx.NO_BORDER)
pluginTextPanel.SetBackgroundColour(TITLE_BACK_COLOUR)
pluginTextSizer = wx.BoxSizer(wx.HORIZONTAL)
pluginText = wx.StaticText(pluginTextPanel, -1, 'POST-PROCESSING')
pluginText.SetFont(wx.Font(SECTION_TITLE_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
pluginText.SetBackgroundColour(TITLE_BACK_COLOUR)
pluginText.SetForegroundColour(SECTION_TITLE_COLOUR)
pluginTextSizer.Add(pluginText, 0, wx.ALIGN_RIGHT | wx.ALL, 3)
pluginTextPanel.SetSizer(pluginTextSizer)
self.pluginSizer.Add(pluginTextPanel, 1, wx.EXPAND| wx.ALIGN_RIGHT, 0) # 1
self.pluginSizer.AddSpacer((5,3)) # 2
self.plugin1 = NonePlugin(self.pluginsPanel, self.replacePlugin1, 0)
self.pluginSizer.Add(self.plugin1, 0) # 3
self.pluginSizer.AddSpacer((5,7)) # 4
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 5
self.pluginSizer.AddSpacer((5,3)) # 6
self.plugin2 = NonePlugin(self.pluginsPanel, self.replacePlugin2, 1)
self.pluginSizer.Add(self.plugin2, 0) # 7
self.pluginSizer.AddSpacer((5,7)) # 8
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 9
self.pluginSizer.AddSpacer((5,3)) # 10
self.plugin3 = NonePlugin(self.pluginsPanel, self.replacePlugin3, 2)
self.pluginSizer.Add(self.plugin3, 0) # 11
self.pluginSizer.AddSpacer((5,7)) # 12
self.pluginSizer.Add(Separator(self.pluginsPanel, (230,2), colour=BORDER_COLOUR), 0, wx.EXPAND) # 13
self.pluginSizer.AddSpacer((5,1)) # 14
self.pluginsPanel.SetSizer(self.pluginSizer)
def getCfileinList(self):
return self.cfileinList
def getCfileinFromName(self, name):
good = None
for cfilein in self.cfileinList:
if name == cfilein.getName():
good = cfilein
break
return good
def defineFormatsList(self):
formats=[]
self.formatDict=dict()
selectedNCHNLS = ''
for format in CeciliaLib.getSupportedFormats().items():
if format[0]!='Custom...':
self.formatDict[format[1]] = format[0]
if self.formatDict.has_key(CeciliaLib.getNchnls()):
selectedNCHNLS = self.formatDict[CeciliaLib.getNchnls()]
else:
selectedNCHNLS = 'Custom...'
formatsNCHNLS=self.formatDict.keys()
formatsNCHNLS.sort()
for i in formatsNCHNLS:
formats.append(self.formatDict[i])
formats.append('Custom...')
return formats, selectedNCHNLS
def getTime(self):
return self.time
def setTime(self,curTime=0):
self.time = curTime
self.clocker.setTime(curTime)
def resetMeter(self):
self.updatePeak(0)
self.resetVuMeter()
def onPlayStop(self, value):
if value:
CeciliaLib.setOutputFile('dac')
CeciliaLib.startCeciliaSound()
else:
CeciliaLib.stopCeciliaSound()
def onRec(self, value):
if value:
if CeciliaLib.getAutoRenameFlag() and self.outputFilename != '':
filename = CeciliaLib.autoRename(self.outputFilename)
self.filenameLabel.setLabel(CeciliaLib.shortenName(os.path.split(filename)[1],self.charNumForLabel))
if self.outputFilename == '':
filename = self.onSelectOutputFilename()
if filename == None:
CeciliaLib.stopCeciliaSound()
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(False)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(False)
return
self.outputFilename = filename
CeciliaLib.setOutputFile(filename)
CeciliaLib.startCeciliaSound()
self.updatePeak(0)
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(True)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(True)
else:
CeciliaLib.stopCeciliaSound()
CeciliaLib.getCeciliaEditor().transportButtons.setRecord(False)
CeciliaLib.getCeciliaEditor().transportButtons.setPlay(False)
def onSelectOutputFilename(self):
if CeciliaLib.getFileType() == 'wav':
wildcard = "Wave file|*.wave;*.WAV;*.WAVE;*.Wav;*.Wave*.wav|" \
"All files|*.*"
elif CeciliaLib.getFileType() == 'aiff':
wildcard = "AIFF file|*.aiff;*.aifc;*.AIF;*.AIFF;*.Aif;*.Aiff*.aif|" \
"All files|*.*"
file = CeciliaLib.saveFileDialog(self, wildcard, type='Save audio')
if file != None:
CeciliaLib.setSaveAudioFilePath(os.path.split(file)[0])
self.filenameLabel.setLabel(CeciliaLib.shortenName(os.path.split(file)[1],self.charNumForLabel))
self.outputFilename = file
return file
def updateOutputFormat(self):
self.vuMeter.updateNchnls()
x, y = self.meterSizer.GetPosition()
w, h = self.vuMeter.GetSize()
self.meterSizer.SetMinSize((w, h+8))
self.meterSizer.SetDimension(x, y, w, h+8)
w2, h2 = self.lineSizer.GetSize()
self.lineSizer.SetDimension(7, y+h+10, w2, h2)
self.Layout()
wx.CallAfter(self.Refresh)
def onFormatChange(self, idx, choice):
if choice == 'Custom...':
nchnls = CeciliaLib.dialogSelectCustomNchnls(self)
if nchnls==None:
nchnls = CeciliaLib.getNchnls()
if nchnls in self.formatDict.keys():
self.formatChoice.setStringSelection(self.formatDict[nchnls])
else:
self.formatChoice.setStringSelection('Custom...')
return
if not nchnls in self.formatDict.keys():
CeciliaLib.setCustomSupportedFormats(nchnls)
self.formatChoice.setStringSelection('Custom...')
else:
self.formatChoice.setStringSelection(self.formatDict[nchnls])
else:
nchnls = CeciliaLib.getSupportedFormats()[choice]
CeciliaLib.setNchnls(nchnls)
self.updateOutputFormat()
def onReuseOutputFile(self):
if os.path.isfile(self.outputFilename):
if self.cfileinList != []:
self.cfileinList[0].updateMenuFromPath(self.outputFilename)
def setTotalTime(self, time):
if self.cfileinList != [] and time == 0:
dur = self.cfileinList[0].getDuration()
CeciliaLib.setTotalTime(time)
self.durationSlider.SetValue(dur)
else:
CeciliaLib.setTotalTime(time)
def updateDurationSlider(self):
self.durationSlider.SetValue(CeciliaLib.getTotalTime())
def updateNchnls(self):
nchnls = CeciliaLib.getNchnls()
if nchnls==1:
format = 'Mono'
elif nchnls==2:
format = 'Stereo'
elif nchnls==4:
format = 'Quad'
elif nchnls==6:
format = '5.1'
elif nchnls==8:
format = 'Octo'
else:
format = 'Custom...'
self.formatChoice.setStringSelection(format)
self.updateOutputFormat()
def onChangeGain(self, gain):
CeciliaLib.getCsound().setChannel("masterVolume", gain)
def updatePeak(self, peak):
self.peak = peak - 90.
label = ''
if self.peak > 0:
label += '+'
label += '%2.2f dB' % self.peak
self.peakLabel.setLabel(label)
def resetVuMeter(self):
self.vuMeter.resetMax()
def getCfileinList(self):
return self.cfileinList
class Cfilein(wx.Panel):
def __init__(self, parent, id=-1, label='', size=(-1,-1), style = wx.NO_BORDER, name=''):
wx.Panel.__init__(self, parent, id, size=size, style=style, name=name)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.label = label
self.name = name
self.duration = None
self.chnls = None
self.type = None
self.samprate = None
self.bitrate = None
self.filePath = ''
self.folderInfo = None
mainSizer = wx.FlexGridSizer(4,1)
mainSizer.AddSpacer((200,4))
# Static label for the popup menu
textLabel = wx.StaticText(self, -1, "%s :" % self.label)
textLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textLabel.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
mainSizer.Add(textLabel, 0, wx.LEFT, 9)
# Popup menu
line2 = wx.BoxSizer(wx.HORIZONTAL)
self.fileMenu = FolderPopup(self, path=None, init='', outFunction=self.onSelectSound,
emptyFunction=self.onLoadFile, backColour=CONTROLLABEL_BACK_COLOUR, tooltip=TT_SEL_SOUND)
line2.Add(self.fileMenu, 0, wx.ALIGN_CENTER | wx.TOP, 1)
line2.AddSpacer((25,5))
self.toolbox = ToolBox(self, tools=['play','edit','open'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onShowSampler])
line2.Add(self.toolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
mainSizer.Add(line2, 1, wx.LEFT, 8)
mainSizer.AddSpacer((5,2))
self.createSamplerFrame()
self.SetSizer(mainSizer)
CeciliaLib.getUserInputs()[self.name] = dict()
CeciliaLib.getUserInputs()[self.name]['type'] = 'cfilein'
CeciliaLib.getUserInputs()[self.name]['path'] = ''
def listenSoundfile(self):
CeciliaLib.listenSoundfile(self.filePath)
def editSoundfile(self):
CeciliaLib.editSoundfile(self.filePath)
def createSamplerFrame(self):
self.samplerFrame = CfileinFrame(self, self.name)
def onShowSampler(self):
if self.samplerFrame.IsShown():
self.samplerFrame.Hide()
else:
pos = wx.GetMousePosition()
framepos = (pos[0]+10, pos[1]+20)
self.samplerFrame.SetPosition(framepos)
self.samplerFrame.Show()
def getDuration(self):
return self.duration
def setTotalTime(self):
if self.duration:
CeciliaLib.getControlPanel().setTotalTime(self.duration)
CeciliaLib.getControlPanel().updateDurationSlider()
def onSelectSound(self, idx, file):
self.filePath = self.folderInfo[file]['path']
self.duration = self.folderInfo[file]['dur']
self.chnls = self.folderInfo[file]['chnls']
self.type = self.folderInfo[file]['type']
self.samprate = self.folderInfo[file]['samprate']
self.bitrate = self.folderInfo[file]['bitrate']
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetRange(0,self.duration)
self.samplerFrame.offsetSlider.SetValue(self.getOffset())
self.samplerFrame.update(path=self.filePath,
dur=self.duration,
type=self.type,
bitDepth=self.bitrate,
chanNum=self.chnls,
sampRate=self.samprate)
nsamps = self.samprate * self.duration
tableSize = powerOf2(nsamps)
fracPart = float(nsamps) / tableSize
CeciliaLib.getUserInputs()[self.name]['gensize%s' % self.name] = tableSize
CeciliaLib.getUserInputs()[self.name]['sr%s' % self.name] = self.samprate
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = self.duration
CeciliaLib.getUserInputs()[self.name]['nchnls%s' % self.name] = self.chnls
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = self.getOffset()
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
def onLoadFile(self, filePath=''):
wildcard = "All files|*.*|" \
"AIFF file|*.aif;*.aiff;*.aifc;*.AIF;*.AIFF;*.Aif;*.Aiff|" \
"Wave file|*.wav;*.wave;*.WAV;*.WAVE;*.Wav;*.Wave"
if filePath == '':
path = CeciliaLib.openAudioFileDialog(self, wildcard, defaultPath=CeciliaLib.getOpenAudioFilePath())
elif not os.path.isfile(filePath):
return
else:
path = filePath
if path:
CeciliaLib.setOpenAudioFilePath(os.path.split(path)[0])
self.updateMenuFromPath(path)
def reset(self):
self.fileMenu.reset()
self.filePath = ''
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
def updateMenuFromPath(self, path):
root = os.path.split(path)[0]
pathList = []
for p in os.listdir(root):
pathList.append(os.path.join(root,p))
self.folderInfo = CeciliaLib.getCsound().getSoundsFromList(pathList)
files = self.folderInfo.keys()
files.sort()
self.fileMenu.setChoice(files)
self.fileMenu.setLabel(CeciliaLib.ensureNFD(os.path.split(path)[1]))
def onOffsetSlider(self, value):
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = value
if self.duration != None:
newMaxDur = self.duration - value
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = newMaxDur
try:
self.samplerFrame.loopOutSlider.setRange(0, newMaxDur)
except:
pass
def setOffset(self, value):
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = value
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetValue(value)
def getOffset(self):
try:
off = CeciliaLib.getUserInputs()[self.name]['off%s' % self.name]
except:
off = self.samplerFrame.offsetSlider.GetValue()
return off
def getName(self):
return self.name
class CSampler(Cfilein):
def __init__(self, parent, id=-1, label='', size=(-1,-1), style = wx.NO_BORDER, name=''):
wx.Panel.__init__(self, parent, id, size=size, style=style, name=name)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.frameOpen = False
self.samplerFrame = None
self.folderInfo = None
self.label = label
self.name = name
self.duration = 0.
self.chnls = 0
self.outputChnls = 1
self.gainMod = None
self.transMod = None
self.startPos = None
self.type = ''
self.samprate = 0
self.bitrate = 0
self.tableNums = [CeciliaLib.getSamplerSliderTableNum() + i for i in range(5)]
CeciliaLib.setSamplerSliderTableNum(self.tableNums[-1]+1)
self.filePath = ''
mainSizer = wx.FlexGridSizer(4,1)
mainSizer.AddSpacer((200,4))
# Static label for the popup menu
textLabel = wx.StaticText(self, -1, "%s :" % self.label)
textLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textLabel.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
mainSizer.Add(textLabel, 0, wx.LEFT, 9)
# Popup menu
line2 = wx.BoxSizer(wx.HORIZONTAL)
self.fileMenu = FolderPopup(self, path=None, init='', outFunction=self.onSelectSound,
emptyFunction=self.onLoadFile, backColour=CONTROLLABEL_BACK_COLOUR, tooltip=TT_SEL_SOUND)
line2.Add(self.fileMenu, 0, wx.ALIGN_CENTER | wx.TOP, 1)
line2.AddSpacer((25,5))
self.toolbox = ToolBox(self, tools=['play','edit','open'],
outFunction=[self.listenSoundfile,
self.editSoundfile,
self.onShowSampler],
openSampler=True)
self.toolbox.setOpen(False)
line2.Add(self.toolbox, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 2)
mainSizer.Add(line2, 1, wx.LEFT, 8)
mainSizer.AddSpacer((5,2))
self.createSamplerFrame()
self.SetSizer(mainSizer)
CeciliaLib.getUserInputs()[self.name] = dict()
CeciliaLib.getUserInputs()[self.name]['type'] = 'csampler'
CeciliaLib.getUserInputs()[self.name]['path'] = ''
def setOutputChnls(self, chnls):
self.outputChnls = chnls
def getOutputChnls(self):
return self.outputChnls
def setGainAndTrans(self, values):
if values == []:
self.gainMod = ''
self.transMod = ''
self.startPos = ''
elif len(values) == 1:
self.gainMod = values[0]
self.transMod = ''
self.startPos = ''
elif len(values) == 2:
self.gainMod = values[0]
self.transMod = values[1]
self.startPos = ''
else:
self.gainMod = values[0]
self.transMod = values[1]
self.startPos = values[2]
def createSamplerFrame(self):
self.samplerFrame = SamplerFrame(self, self.name, self.tableNums)
def onShowSampler(self):
if self.samplerFrame.IsShown():
self.samplerFrame.Hide()
else:
pos = wx.GetMousePosition()
framepos = (pos[0]+10, pos[1]+20)
self.samplerFrame.SetPosition(framepos)
self.samplerFrame.Show()
def onSelectSound(self, idx, file):
self.filePath = self.folderInfo[file]['path']
self.duration = self.folderInfo[file]['dur']
self.chnls = self.folderInfo[file]['chnls']
self.type = self.folderInfo[file]['type']
self.samprate = self.folderInfo[file]['samprate']
self.bitrate = self.folderInfo[file]['bitrate']
self.samplerFrame.offsetSlider.Enable()
self.samplerFrame.offsetSlider.SetRange(0,self.duration)
self.samplerFrame.offsetSlider.SetValue(self.getOffset())
self.samplerFrame.update(path=self.filePath,
dur=self.duration,
type=self.type,
bitDepth=self.bitrate,
chanNum=self.chnls,
sampRate=self.samprate)
nsamps = self.samprate * self.duration
tableSize = powerOf2(nsamps)
fracPart = float(nsamps) / tableSize
CeciliaLib.getUserInputs()[self.name]['gensize%s' % self.name] = tableSize
CeciliaLib.getUserInputs()[self.name]['sr%s' % self.name] = self.samprate
CeciliaLib.getUserInputs()[self.name]['dur%s' % self.name] = self.duration
CeciliaLib.getUserInputs()[self.name]['nchnls%s' % self.name] = self.chnls
CeciliaLib.getUserInputs()[self.name]['off%s' % self.name] = self.getOffset()
CeciliaLib.getUserInputs()[self.name]['path'] = self.filePath
if CeciliaLib.getGrapher():
for line in CeciliaLib.getGrapher().plotter.getData():
if line.getName() == self.samplerFrame.loopInSlider.getCName() or \
line.getName() == self.samplerFrame.loopOutSlider.getCName():
line.changeYrange((0, self.duration))
def getSamplerInfo(self):
info = {}
info['loopMode'] = self.samplerFrame.getLoopMode()
info['startFromLoop'] = self.samplerFrame.getStartFromLoop()
info['loopX'] = self.samplerFrame.getLoopX()
info['loopIn'] = self.samplerFrame.getLoopIn()
info['loopOut'] = self.samplerFrame.getLoopOut()
info['gain'] = self.samplerFrame.getGain()
info['transp'] = self.samplerFrame.getTransp()
return info
def getSamplerFrame(self):
return self.samplerFrame
def getText(self):
offset = self.getOffset()
sndnchnls = self.chnls
nchnls = self.getOutputChnls()
udoText = ''
loadTableText = ''
if self.samplerFrame.loopMenu.popup.getIndex() == 0:
tableLen = powerOf2(self.duration*self.samprate)
else:
tableLen = 0
for i in range(sndnchnls):
chnNum = i + 1
genNum = CeciliaLib.getSamplerTableNum() + i
path = CeciliaLib.convertWindowsPath(CeciliaLib.getUserInputs()[self.name]['path'])
loadTableText += 'gitab%d ftgen %d, 0, %d, -1, "%s", %f, 0, %d\n' % (chnNum, genNum, tableLen, path, offset, chnNum)
if not self.gainMod and not self.transMod:
udoText += '\nopcode Sampler_%s, ' % self.name + 'a'*nchnls + ', S\n'
udoText += 'Spath xin\n\n'
elif self.gainMod and not self.transMod and not self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Sk\n'
udoText += 'Spath, kgainMod xin\n\n'
elif self.gainMod and self.transMod and not self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Skk\n'
udoText += 'Spath, kgainMod, ktransMod xin\n\n'
elif self.gainMod and self.transMod and self.startPos:
udoText += 'opcode Sampler_%s, ' % self.name + 'a'*nchnls + ', Skki\n'
udoText += 'Spath, kgainMod, ktransMod, istartpos xin\n\n'
udoText += 'iHalfSr = sr / 2.2\n'
udoText += 'iSrAdjust = %f / sr\n\n' % self.samprate
istart = 0
if self.gainMod:
gain = 'ampdbfs(gk%sgain) / 0dbfs * kgainMod' % self.name
else:
gain = 'ampdbfs(gk%sgain) / 0dbfs' % self.name
if self.transMod:
udoText += 'kSamplerTrans = semitone(gk%strans) * ktransMod\n' % self.name
else:
udoText += 'kSamplerTrans = semitone(gk%strans)\n' % self.name
udoText += 'kend = gk%sstart + gk%send\n' % (self.name, self.name)
if not self.startPos:
udoText += 'if gi%sstartpoint == 0 then\n' % self.name
udoText += 'istart = 0\nelse\nistart = i(gk%sstart)\nendif\n' % self.name
else:
udoText += 'istart = istartpos\n'
for i in range(sndnchnls):
chnNum = i + 1
genNum = CeciliaLib.getSamplerTableNum() + i
if self.samplerFrame.loopMenu.popup.getIndex() == 0:
#udoText += 'aSampler%d loscil %s, kSamplerTrans * iSrAdjust, %d, 1\n' % (chnNum, gain, genNum)
udoText += 'iend = nsamp(%d)\n' % genNum
udoText += 'aphase%d lphasor kSamplerTrans * iSrAdjust, 0, iend\n' % chnNum
udoText += 'aSampler%d tablei aphase%d, %d\n' % (chnNum, chnNum, genNum)
udoText += 'aSampler%d = aSampler%d * %s\n' % (chnNum, chnNum, gain)
else:
udoText += 'aSampler%d flooper2 %s, kSamplerTrans * iSrAdjust, gk%sstart * iSrAdjust, kend * iSrAdjust, gk%sxfade * iSrAdjust, %d, istart * iSrAdjust, gi%sloopi-1\n' %(chnNum, gain, self.name, self.name, genNum, self.name)
CeciliaLib.setSamplerTableNum(genNum+1)
udoText += """
if kSamplerTrans < 1 then
kSamplerAlias = iHalfSr/kSamplerTrans
else
kSamplerAlias = iHalfSr
endif
"""
for i in range(sndnchnls):
chnNum = i + 1
udoText += 'aSampler%d tonex aSampler%d, kSamplerAlias, 4\n' % (chnNum, chnNum)
if not self.gainMod:
udoText += 'aSampler%d dcblock aSampler%d\n' % (chnNum, chnNum)
else:
udoText += 'aSampler%d dcblock aSampler%d\n' % (chnNum, chnNum)
samplerslist = [[] for i in range(nchnls)]
outputslist = ['aOut%d' % (i+1) for i in range(nchnls)]
if sndnchnls >= nchnls:
for i in range(sndnchnls):
mod = i % nchnls
samplerslist[mod].append('aSampler%d' % (i+1))
else:
for i in range(nchnls):
mod = i % sndnchnls
samplerslist[i].append('aSampler%d' % (mod+1))
for i in range(nchnls):
if len(samplerslist[i]) > 1:
div = len(samplerslist[i])
udoText += '\naOut%d = (' % (i+1) + '+'.join(samplerslist[i]) + ') / %d' % div
else:
udoText += '\naOut%d = (' % (i+1) + '+'.join(samplerslist[i]) + ')'
udoText += '\n\nxout ' + ', '.join(outputslist)
udoText += '\nendop\n\n'
return udoText, loadTableText
class CfileinFrame(wx.Frame):
def __init__(self, parent, name, pos=wx.DefaultPosition):
style = ( wx.CLIP_CHILDREN | wx.FRAME_NO_TASKBAR | wx.FRAME_SHAPED | wx.NO_BORDER | wx.FRAME_FLOAT_ON_PARENT)
wx.Frame.__init__(self, parent, title='', pos=pos, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.name = name
self.SetClientSize((385, 143))
panel = wx.Panel(self, -1)
w, h = self.GetSize()
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
# Header
self.title = FrameLabel(panel, '', size=(w-2, 50))
box.Add(self.title, 0, wx.ALL, 1)
box.AddSpacer((200,2))
#toolbox
toolsBox = wx.BoxSizer(wx.HORIZONTAL)
tools = ToolBox(panel, size=(80,20), tools=['play', 'edit', 'time' ],
outFunction=[self.parent.listenSoundfile,
self.parent.editSoundfile,
self.parent.setTotalTime])
toolsBox.Add(tools, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 320)
box.Add(toolsBox, 0, wx.TOP, 5)
# Static label for the offset slider
textOffset = wx.StaticText(panel, -1, '%s Offset :' % self.parent.label)
textOffset.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textOffset.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
box.Add(textOffset, 0, wx.LEFT, 20)
box.AddSpacer((200,2))
# Offset slider
self.offsetSlider = ControlSlider(self, minvalue=0, maxvalue=100, size=(222,15), init=0,
outFunction=self.parent.onOffsetSlider, backColour=BACKGROUND_COLOUR)
self.offsetSlider.setSliderHeight(10)
self.offsetSlider.Disable()
box.Add(self.offsetSlider, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 20)
box.AddSpacer((200,10))
self.close = CloseBox(panel, outFunction=self.close)
box.Add(self.close, 0, wx.LEFT, 330)
box.AddSpacer((200,7))
panel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.title.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
panel.SetSizerAndFit(box)
self.Show(False)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win.GetTopLevelParent() in [self, CeciliaLib.getCeciliaEditor()]:
pass
else:
win = CeciliaLib.getInterface()
win.Raise()
def close(self):
self.Hide()
self.GetParent().toolbox.setOpen(False)
def update(self, path, dur, type, bitDepth, chanNum, sampRate):
self.path = path
self.dur = dur
self.type = type
self.bitDepth = bitDepth
self.chanNum = chanNum
self.sampRate = sampRate
soundInfoText = self.createHeader()
self.title.setLabel(soundInfoText)
def createHeader(self):
if self.sampRate > 1000:
self.sampRate = self.sampRate / 1000.
header = '%s\n' % CeciliaLib.shortenName(self.path,48)
header += '%0.2f sec - %s - %dBit - %d ch. - %2.1fkHz' % (self.dur, self.type, self.bitDepth, self.chanNum, self.sampRate)
return header
class SamplerFrame(wx.Frame):
def __init__(self, parent, name, tableNums, pos=wx.DefaultPosition, size=(390, 295)):
style = ( wx.CLIP_CHILDREN | wx.FRAME_NO_TASKBAR | wx.FRAME_SHAPED | wx.NO_BORDER | wx.FRAME_FLOAT_ON_PARENT)
wx.Frame.__init__(self, parent, title='', pos=pos, style=style)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.SetClientSize(size)
self.size = size
self.name = name
self.tableNums = tableNums
self.loopList = ['Off', 'Forward', 'Backward', 'Back and Forth']
panel = wx.Panel(self, -1)
w, h = size
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
# Header
self.title = FrameLabel(panel, '', size=(w-2, 50))
box.Add(self.title, 0, wx.ALL, 1)
# Static label for the offset slider
textOffset = wx.StaticText(panel, -1, '%s Offset :' % self.parent.label)
textOffset.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, face=FONT_FACE))
textOffset.SetForegroundColour(TEXT_LABELFORWIDGET_COLOUR)
box.Add(textOffset, 0, wx.LEFT, 20)
box.AddSpacer((200,2))
# Offset slider
offBox = wx.BoxSizer(wx.HORIZONTAL)
self.offsetSlider = ControlSlider(panel, minvalue=0, maxvalue=100, size=(345,15), init=0,
outFunction=self.parent.onOffsetSlider, backColour=BACKGROUND_COLOUR)
self.offsetSlider.SetToolTip(CECTooltip(TT_SAMPLER_OFFSET))
self.offsetSlider.setSliderHeight(10)
self.offsetSlider.Disable()
offBox.Add(self.offsetSlider, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 20)
box.Add(offBox)
box.AddSpacer((200,10))
#Loop type + toolbox
loopBox = wx.FlexGridSizer(1,5,5,5)
loopLabel = wx.StaticText(panel, -1, "Loop")
loopLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
loopLabel.SetForegroundColour("#FFFFFF")
loopBox.Add(loopLabel, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 10)
self.loopMenu = SamplerPopup(panel, self.loopList, self.loopList[1], self.name)
self.loopMenu.popup.SetToolTip(CECTooltip(TT_SAMPLER_LOOP))
loopBox.Add(self.loopMenu.popup, 0, wx.ALIGN_CENTER_VERTICAL, 20)
startLabel = wx.StaticText(panel, -1, "Start from loop")
startLabel.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
startLabel.SetForegroundColour("#FFFFFF")
loopBox.Add(startLabel, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 20)
self.startToggle = SamplerToggle(panel, 0, self.name)
self.startToggle.toggle.SetToolTip(CECTooltip(TT_SAMPLER_START))
loopBox.Add(self.startToggle.toggle, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT , 30)
tools = ToolBox(panel, size=(80,20), tools=['play', 'edit', 'time' ],
outFunction=[self.parent.listenSoundfile,
self.parent.editSoundfile,
self.parent.setTotalTime])
loopBox.Add(tools, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT)
loopBox.AddGrowableCol(2)
box.Add(loopBox, 0, wx.ALL, 10)
# Sliders
slidersBox = wx.FlexGridSizer(5, 4, 5, 5)
self.loopInSlider = SamplerSlider(panel, self.name, "Loop In", "sec", 0, 1, 0, self.tableNums[0])
self.loopInSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_LOOP_IN))
slidersBox.AddMany([(self.loopInSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopInSlider.buttons, 0, wx.CENTER),
(self.loopInSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopInSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.loopOutSlider = SamplerSlider(panel, self.name, "Loop Time", "sec", 0, 1, 1, self.tableNums[1])
self.loopOutSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_LOOP_DUR))
slidersBox.AddMany([(self.loopOutSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopOutSlider.buttons, 0, wx.CENTER),
(self.loopOutSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopOutSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.loopXSlider = SamplerSlider(panel, self.name, "Loop X", "sec", 0, 1, .05, self.tableNums[2])
self.loopXSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_CROSSFADE))
slidersBox.AddMany([(self.loopXSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.loopXSlider.buttons, 0, wx.CENTER),
(self.loopXSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.loopXSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.gainSlider = SamplerSlider(panel, self.name, "Gain", "dB", -48, 18, 0, self.tableNums[3])
self.gainSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_GAIN))
slidersBox.AddMany([(self.gainSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.gainSlider.buttons, 0, wx.CENTER),
(self.gainSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.gainSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
self.transpSlider = SamplerSlider(panel, self.name, "Transpo", "cents", -48, 48, 0, self.tableNums[4], integer=False)
self.transpSlider.slider.SetToolTip(CECTooltip(TT_SAMPLER_TRANSPO))
slidersBox.AddMany([(self.transpSlider.labelText, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT),
(self.transpSlider.buttons, 0, wx.CENTER),
(self.transpSlider.slider, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5),
(self.transpSlider.unit, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_LEFT)])
box.Add(slidersBox, 0, wx.EXPAND | wx.ALL, 6)
self.close = CloseBox(panel, outFunction=self.close)
box.Add(self.close, 0, wx.LEFT, 330)
box.AddSpacer((200,7))
sliderlist = [self.loopInSlider, self.loopOutSlider, self.loopXSlider, self.gainSlider, self.transpSlider]
samplerSliders = CeciliaLib.getSamplerSliders()
CeciliaLib.setSamplerSliders(samplerSliders + sliderlist)
userSliders = CeciliaLib.getUserSliders()
CeciliaLib.setUserSliders(userSliders + sliderlist)
samplerTogPop = CeciliaLib.getSamplerTogglePopup()
CeciliaLib.setSamplerTogglePopup(samplerTogPop + [self.loopMenu, self.startToggle])
panel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
self.title.Bind(wx.EVT_LEAVE_WINDOW, self.OnLooseFocus)
panel.SetSizerAndFit(box)
self.Show(False)
def OnLooseFocus(self, event):
win = wx.FindWindowAtPointer()
if win.GetTopLevelParent() in [self, CeciliaLib.getCeciliaEditor()]:
pass
else:
win = CeciliaLib.getInterface()
win.Raise()
def close(self):
self.Hide()
self.GetParent().toolbox.setOpen(False)
def update(self, path, dur, type, bitDepth, chanNum, sampRate):
self.path = path
self.dur = dur
self.type = type
self.bitDepth = bitDepth
self.chanNum = chanNum
self.sampRate = sampRate
soundInfoText = self.createHeader()
self.title.setLabel(soundInfoText)
self.loopInSlider.setRange(0, self.dur)
self.loopInSlider.setValue(0)
self.loopOutSlider.setRange(0, self.dur)
self.loopOutSlider.setValue(self.dur)
self.loopXSlider.setRange(0, self.dur)
def createHeader(self):
if self.sampRate > 1000:
self.sampRate = self.sampRate / 1000.
header = '%s\n' % CeciliaLib.shortenName(self.path,48)
header += '%0.2f sec - %s - %dBit - %d ch. - %2.1fkHz' % (self.dur, self.type, self.bitDepth, self.chanNum, self.sampRate)
return header
def setLoopMode(self, index):
self.loopMenu.popup.setByIndex(index)
def getLoopMode(self):
return self.loopMenu.getValue()
def setStartFromLoop(self, value):
self.startToggle.setValue(value)
def getStartFromLoop(self):
return self.startToggle.getValue()
def setLoopX(self, values):
self.loopXSlider.setValue(values[0])
self.loopXSlider.setPlay(values[1])
def getLoopX(self):
return [self.loopXSlider.getValue(), self.loopXSlider.getPlay()]
def setLoopIn(self, values):
self.loopInSlider.setValue(values[0])
self.loopInSlider.setPlay(values[1])
def getLoopIn(self):
return [self.loopInSlider.getValue(), self.loopInSlider.getPlay()]
def setLoopOut(self, values):
self.loopOutSlider.setValue(values[0])
self.loopOutSlider.setPlay(values[1])
def getLoopOut(self):
return [self.loopOutSlider.getValue(), self.loopOutSlider.getPlay()]
def setGain(self, values):
self.gainSlider.setValue(values[0])
self.gainSlider.setPlay(values[1])
def getGain(self):
return [self.gainSlider.getValue(), self.gainSlider.getPlay()]
def setTransp(self, values):
self.transpSlider.setValue(values[0])
self.transpSlider.setPlay(values[1])
def getTransp(self):
return [self.transpSlider.getValue(), self.transpSlider.getPlay()]
class SamplerPlayRecButtons(wx.Panel):
def __init__(self, parent, id=wx.ID_ANY, pos=(0,0), size=(40,20)):
wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY, pos=pos, size=size)
self.SetMaxSize(self.GetSize())
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.playColour = SLIDER_PLAY_COLOUR_HOT
self.recColour = SLIDER_REC_COLOUR_HOT
self.playOver = False
self.recOver = False
self.playOverWait = True
self.recOverWait = True
self.play = False
self.rec = False
if CeciliaLib.getPlatform() == "win32":
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def setOverWait(self, which):
if which == 0:
self.playOverWait = False
elif which == 1:
self.recOverWait = False
def checkForOverReady(self, pos):
if not wx.Rect(2, 2, 17, 17).Contains(pos):
self.playOverWait = True
if not wx.Rect(21, 2, 38, 17).Contains(pos):
self.recOverWait = True
def setPlay(self, x):
if x == 0:
self.play = False
self.playColour = SLIDER_PLAY_COLOUR_HOT
elif x == 1:
self.play = True
self.playColour = SLIDER_PLAY_COLOUR_NO_BIND
wx.CallAfter(self.Refresh)
def setRec(self, x):
if x == 0:
self.rec = False
self.recColour = SLIDER_REC_COLOUR_HOT
else:
self.rec = True
self.recColour = SLIDER_REC_COLOUR_PRESSED
def MouseDown(self, evt):
pos = evt.GetPosition()
if wx.Rect(2, 2, 17, 17).Contains(pos):
if self.play:
self.play = False
self.playColour = SLIDER_PLAY_COLOUR_HOT
else:
self.play = True
self.playColour = SLIDER_PLAY_COLOUR_NO_BIND
self.setOverWait(0)
elif wx.Rect(21, 2, 38, 17).Contains(pos):
if self.rec:
self.rec = False
self.recColour = SLIDER_REC_COLOUR_HOT
else:
self.rec = True
self.recColour = SLIDER_REC_COLOUR_PRESSED
self.setOverWait(1)
self.playOver = False
self.recOver = False
wx.CallAfter(self.Refresh)
self.CaptureMouse()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def OnMotion(self, evt):
pos = evt.GetPosition()
if wx.Rect(2, 2, 17, 17).Contains(pos) and self.playOverWait:
self.playOver = True
self.recOver = False
elif wx.Rect(21, 2, 38, 17).Contains(pos) and self.recOverWait:
self.playOver = False
self.recOver = True
self.checkForOverReady(pos)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnLeave(self, evt):
self.playOver = False
self.recOver = False
self.playOverWait = True
self.recOverWait = True
wx.CallAfter(self.Refresh)
evt.Skip()
def OnPaint(self, evt):
w,h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush(BACKGROUND_COLOUR, wx.SOLID))
dc.Clear()
dc.SetPen(wx.Pen(BACKGROUND_COLOUR, width=0, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw triangle
if self.playOver: playColour = SLIDER_PLAY_COLOUR_OVER
else: playColour = self.playColour
gc.SetPen(wx.Pen(playColour, width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush(playColour, wx.SOLID))
tri = [(14,h/2), (9,6), (9,h-6), (14,h/2)]
gc.DrawLines(tri)
dc.SetPen(wx.Pen('#333333', width=1, style=wx.SOLID))
dc.DrawLine(w/2,4,w/2,h-4)
# Draw circle
if self.recOver: recColour = SLIDER_REC_COLOUR_OVER
else: recColour = self.recColour
gc.SetPen(wx.Pen(recColour, width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush(recColour, wx.SOLID))
gc.DrawEllipse(w/4+w/2-4, h/2-4, 8, 8)
evt.Skip()
def getPlay(self):
return self.play
def getRec(self):
return self.rec
class SamplerSlider:
def __init__(self, parent, name, label, unit, mini, maxi, init, tableNum, integer=False):
self.name = name
self.tableNum = tableNum
self.automationLength = None
self.automationData = []
self.path = None
self.label = name + ' ' + label
self.cname = {'Loop In': name+'start', 'Loop Time': name+'end',
'Loop X': name+'xfade', 'Gain': name+'gain', 'Transpo': name+'trans'}[label]
self.labelText = wx.StaticText(parent, -1, label)
self.labelText.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
self.labelText.SetForegroundColour("#FFFFFF")
self.buttons = SamplerPlayRecButtons(parent)
self.slider = ControlSlider(parent, mini, maxi, init, size=(236, 15),
integer=integer, outFunction=self.sendValue,
backColour=BACKGROUND_COLOUR)
self.slider.setSliderHeight(10)
self.unit = wx.StaticText(parent, -1, unit)
self.unit.SetFont(wx.Font(TEXT_LABELFORWIDGET_FONT, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, face=FONT_FACE))
self.unit.SetForegroundColour("#FFFFFF")
def getLog(self):
return False
def getMinValue(self):
return self.slider.getMinValue()
def getMaxValue(self):
return self.slider.getMaxValue()
def setAutomationLength(self, x):
self.automationLength = x
def getAutomationLength(self):
return self.automationLength
def sendValue(self, value):
if self.getPlay() == False or self.getRec() == True:
CeciliaLib.getCsound().setChannel("%s_value" % self.getCName(), value)
def setRange(self, minval, maxval):
self.slider.SetRange(minval, maxval)
self.setValue(self.getValue())
def setValue(self, val):
self.slider.SetValue(val)
def getValue(self):
return self.slider.GetValue()
def getLabel(self):
return self.label
def getCName(self):
return self.cname
def getName(self):
return self.name
def setPlay(self, x):
self.buttons.setPlay(x)
def getPlay(self):
return self.buttons.getPlay()
def setRec(self, x):
self.buttons.setRec(x)
def getRec(self):
return self.buttons.getRec()
def getValue(self):
return self.slider.GetValue()
def getTable(self):
return self.tableNum
def getPath(self):
return self.path
def setAutomationData(self, data):
# convert values on scaling
temp = []
log = self.getLog()
minval = self.getMinValue()
maxval = self.getMaxValue()
automationlength = self.getAutomationLength()
frac = automationlength / CeciliaLib.getTotalTime()
virtuallength = len(data) / frac
data.extend([data[-1]] * int(((1 - frac) * virtuallength)))
totallength = float(len(data))
oldpos = 0
oldval = data[0]
if log:
maxOnMin = maxval / minval
torec = math.log10(oldval/minval) / math.log10(maxOnMin)
else:
maxMinusMin = maxval - minval
torec = (oldval - minval) / maxMinusMin
temp.append([0.0, torec])
for i, val in enumerate(data):
length = (i - oldpos) / totallength
pos = oldpos / totallength + length
if log:
torec = math.log10(val/minval) / math.log10(maxOnMin)
else:
torec = (val - minval) / maxMinusMin
temp.append([pos, torec])
oldval = val
oldpos = i
self.automationData = temp
def getAutomationData(self):
return [[x[0],x[1]] for x in self.automationData]
def getCeciliaText(self):
orchtext = ''
scotext = ''
if self.cname.find('start') != -1 or self.cname.find('end') != -1 or self.cname.find('xfade') != -1:
porta = 0.
else:
porta = .05
orchtext += 'instr Cecilia_%s\n' % self.cname
orchtext += 'ksliderValue init %f\n' % self.getValue()
if self.getPlay():
orchtext += 'ksliderDown init 0\n'
orchtext += 'kreadValue init %f\n' % self.getValue()
orchtext += 'ksliderValue chnget "%s_value"\n' % self.cname
if self.getPlay():
orchtext += 'ksliderDown chnget "%s_down"\n' % self.cname
orchtext += '\nkreadValue \t oscil1i \t 0, 1, p3, %d\n' % self.getTable()
orchtext += '\nif ksliderDown == 1 then\n'
orchtext += 'gk%s port ksliderValue, %f, %f\n' % (self.cname, porta, self.getValue())
if self.getPlay():
orchtext += 'else\n'
orchtext += 'gk%s = kreadValue\n' % self.cname
orchtext += 'endif\n\n'
if self.getRec():
self.path = os.path.join(AUTOMATION_SAVE_PATH, '%s.auto' % self.cname)
orchtext += 'dumpk gk%s, "%s", 8, 4/kr\n\n' % (self.cname, self.path)
orchtext += 'endin\n\n'
scotext += 'i "Cecilia_%s" 0 %f\n' % (self.cname, CeciliaLib.getTotalTime())
return orchtext, scotext
| gpl-3.0 | -1,204,763,819,855,194,000 | 41.555369 | 238 | 0.583711 | false | 3.504468 | false | false | false |
qPCR4vir/orange3 | Orange/widgets/visualize/owscatterplot.py | 1 | 20625 | import numpy as np
from PyQt4.QtCore import Qt, QTimer
from PyQt4 import QtGui
from PyQt4.QtGui import QApplication
from sklearn.neighbors import NearestNeighbors
from sklearn.metrics import r2_score
import Orange
from Orange.data import Table, Domain, StringVariable, ContinuousVariable, \
DiscreteVariable
from Orange.canvas import report
from Orange.data.sql.table import SqlTable, AUTO_DL_LIMIT
from Orange.preprocess.score import ReliefF, RReliefF
from Orange.widgets import gui
from Orange.widgets.settings import \
DomainContextHandler, Setting, ContextSetting, SettingProvider
from Orange.widgets.visualize.owscatterplotgraph import OWScatterPlotGraph
from Orange.widgets.visualize.utils import VizRankDialogAttrPair
from Orange.widgets.widget import OWWidget, Default, AttributeList
def font_resize(font, factor, minsize=None, maxsize=None):
font = QtGui.QFont(font)
fontinfo = QtGui.QFontInfo(font)
size = fontinfo.pointSizeF() * factor
if minsize is not None:
size = max(size, minsize)
if maxsize is not None:
size = min(size, maxsize)
font.setPointSizeF(size)
return font
class ScatterPlotVizRank(VizRankDialogAttrPair):
captionTitle = "Score plots"
K = 10
def check_preconditions(self):
if not super().check_preconditions():
return False
if not self.master.data.domain.class_var:
self.information(33, "Data with a class variable is required.")
return False
self.master.information(33)
return True
def iterate_states(self, initial_state):
# If we put initialization of `self.attrs` to `initialize`,
# `score_heuristic` would be run on every call to `set_data`.
if initial_state is None: # on the first call, compute order
self.attrs = self.score_heuristic()
yield from super().iterate_states(initial_state)
def compute_score(self, state):
graph = self.master.graph
ind12 = [graph.data_domain.index(self.attrs[x]) for x in state]
valid = graph.get_valid_list(ind12)
X = graph.scaled_data[ind12, :][:, valid].T
Y = self.master.data.Y[valid]
if X.shape[0] < self.K:
return
n_neighbors = min(self.K, len(X) - 1)
knn = NearestNeighbors(n_neighbors=n_neighbors).fit(X)
ind = knn.kneighbors(return_distance=False)
if self.master.data.domain.has_discrete_class:
return -np.sum(Y[ind] == Y.reshape(-1, 1))
else:
return -r2_score(Y, np.mean(Y[ind], axis=1)) * \
(len(Y) / len(self.master.data))
def score_heuristic(self):
X = self.master.graph.scaled_data.T
Y = self.master.data.Y
mdomain = self.master.data.domain
dom = Domain([ContinuousVariable(str(i)) for i in range(X.shape[1])],
mdomain.class_vars)
data = Table(dom, X, Y)
relief = ReliefF if isinstance(dom.class_var, DiscreteVariable) \
else RReliefF
weights = relief(n_iterations=100, k_nearest=self.K)(data)
attrs = sorted(zip(weights, mdomain.attributes), reverse=True)
return [a for _, a in attrs]
class OWScatterPlot(OWWidget):
name = 'Scatter Plot'
description = 'Scatterplot visualization with explorative analysis and intelligent data visualization enhancements.'
icon = "icons/ScatterPlot.svg"
inputs = [("Data", Table, "set_data", Default),
("Data Subset", Table, "set_subset_data"),
("Features", AttributeList, "set_shown_attributes")]
outputs = [("Selected Data", Table, Default),
("Other Data", Table),
("Features", Table)]
settingsHandler = DomainContextHandler()
auto_send_selection = Setting(True)
auto_sample = Setting(True)
toolbar_selection = Setting(0)
attr_x = ContextSetting("")
attr_y = ContextSetting("")
graph = SettingProvider(OWScatterPlotGraph)
jitter_sizes = [0, 0.1, 0.5, 1, 2, 3, 4, 5, 7, 10]
graph_name = "graph.plot_widget.plotItem"
def __init__(self):
super().__init__()
box = gui.vBox(self.mainArea, True, margin=0)
self.graph = OWScatterPlotGraph(self, box, "ScatterPlot")
box.layout().addWidget(self.graph.plot_widget)
plot = self.graph.plot_widget
axispen = QtGui.QPen(self.palette().color(QtGui.QPalette.Text))
axis = plot.getAxis("bottom")
axis.setPen(axispen)
axis = plot.getAxis("left")
axis.setPen(axispen)
self.data = None # Orange.data.Table
self.subset_data = None # Orange.data.Table
self.data_metas_X = None # self.data, where primitive metas are moved to X
self.sql_data = None # Orange.data.sql.table.SqlTable
self.attribute_selection_list = None # list of Orange.data.Variable
self.__timer = QTimer(self, interval=1200)
self.__timer.timeout.connect(self.add_data)
common_options = dict(
labelWidth=50, orientation=Qt.Horizontal, sendSelectedValue=True,
valueType=str)
box = gui.vBox(self.controlArea, "Axis Data")
self.cb_attr_x = gui.comboBox(box, self, "attr_x", label="Axis x:",
callback=self.update_attr,
**common_options)
self.cb_attr_y = gui.comboBox(box, self, "attr_y", label="Axis y:",
callback=self.update_attr,
**common_options)
self.vizrank = ScatterPlotVizRank(self)
vizrank_box = gui.hBox(box)
gui.separator(vizrank_box, width=common_options["labelWidth"])
self.vizrank_button = gui.button(
vizrank_box, self, "Score Plots", callback=self.vizrank.reshow,
tooltip="Find informative projections", enabled=False)
self.vizrank.pairSelected.connect(self.set_attr)
gui.separator(box)
gui.valueSlider(
box, self, value='graph.jitter_size', label='Jittering: ',
values=self.jitter_sizes, callback=self.reset_graph_data,
labelFormat=lambda x:
"None" if x == 0 else ("%.1f %%" if x < 1 else "%d %%") % x)
gui.checkBox(
gui.indentedBox(box), self, 'graph.jitter_continuous',
'Jitter continuous values', callback=self.reset_graph_data)
self.sampling = gui.auto_commit(
self.controlArea, self, "auto_sample", "Sample", box="Sampling",
callback=self.switch_sampling, commit=lambda: self.add_data(1))
self.sampling.setVisible(False)
box = gui.vBox(self.controlArea, "Points")
self.cb_attr_color = gui.comboBox(
box, self, "graph.attr_color", label="Color:",
emptyString="(Same color)", callback=self.update_colors,
**common_options)
self.cb_attr_label = gui.comboBox(
box, self, "graph.attr_label", label="Label:",
emptyString="(No labels)", callback=self.graph.update_labels,
**common_options)
self.cb_attr_shape = gui.comboBox(
box, self, "graph.attr_shape", label="Shape:",
emptyString="(Same shape)", callback=self.graph.update_shapes,
**common_options)
self.cb_attr_size = gui.comboBox(
box, self, "graph.attr_size", label="Size:",
emptyString="(Same size)", callback=self.graph.update_sizes,
**common_options)
g = self.graph.gui
box2 = g.point_properties_box(self.controlArea, box)
box = gui.vBox(self.controlArea, "Plot Properties")
g.add_widgets([g.ShowLegend, g.ShowGridLines], box)
gui.checkBox(
box, self, value='graph.tooltip_shows_all',
label='Show all data on mouse hover')
self.cb_class_density = gui.checkBox(
box, self, value='graph.class_density', label='Show class density',
callback=self.update_density)
gui.checkBox(
box, self, 'graph.label_only_selected',
'Label only selected points', callback=self.graph.update_labels)
self.zoom_select_toolbar = g.zoom_select_toolbar(
gui.vBox(self.controlArea, "Zoom/Select"), nomargin=True,
buttons=[g.StateButtonsBegin, g.SimpleSelect, g.Pan, g.Zoom,
g.StateButtonsEnd, g.ZoomReset]
)
buttons = self.zoom_select_toolbar.buttons
buttons[g.Zoom].clicked.connect(self.graph.zoom_button_clicked)
buttons[g.Pan].clicked.connect(self.graph.pan_button_clicked)
buttons[g.SimpleSelect].clicked.connect(self.graph.select_button_clicked)
buttons[g.ZoomReset].clicked.connect(self.graph.reset_button_clicked)
self.controlArea.layout().addStretch(100)
self.icons = gui.attributeIconDict
p = self.graph.plot_widget.palette()
self.graph.set_palette(p)
gui.auto_commit(self.controlArea, self, "auto_send_selection",
"Send Selection", "Send Automatically")
def zoom(s):
"""Zoom in/out by factor `s`."""
viewbox = plot.getViewBox()
# scaleBy scales the view's bounds (the axis range)
viewbox.scaleBy((1 / s, 1 / s))
def fit_to_view():
viewbox = plot.getViewBox()
viewbox.autoRange()
zoom_in = QtGui.QAction(
"Zoom in", self, triggered=lambda: zoom(1.25)
)
zoom_in.setShortcuts([QtGui.QKeySequence(QtGui.QKeySequence.ZoomIn),
QtGui.QKeySequence(self.tr("Ctrl+="))])
zoom_out = QtGui.QAction(
"Zoom out", self, shortcut=QtGui.QKeySequence.ZoomOut,
triggered=lambda: zoom(1 / 1.25)
)
zoom_fit = QtGui.QAction(
"Fit in view", self,
shortcut=QtGui.QKeySequence(Qt.ControlModifier | Qt.Key_0),
triggered=fit_to_view
)
self.addActions([zoom_in, zoom_out, zoom_fit])
# def settingsFromWidgetCallback(self, handler, context):
# context.selectionPolygons = []
# for curve in self.graph.selectionCurveList:
# xs = [curve.x(i) for i in range(curve.dataSize())]
# ys = [curve.y(i) for i in range(curve.dataSize())]
# context.selectionPolygons.append((xs, ys))
# def settingsToWidgetCallback(self, handler, context):
# selections = getattr(context, "selectionPolygons", [])
# for (xs, ys) in selections:
# c = SelectionCurve("")
# c.setData(xs,ys)
# c.attach(self.graph)
# self.graph.selectionCurveList.append(c)
def reset_graph_data(self, *_):
self.graph.rescale_data()
self.update_graph()
def set_data(self, data):
self.information(1)
self.__timer.stop()
self.sampling.setVisible(False)
self.sql_data = None
if isinstance(data, SqlTable):
if data.approx_len() < 4000:
data = Table(data)
else:
self.information(1, "Large SQL table (showing a sample)")
self.sql_data = data
data_sample = data.sample_time(0.8, no_cache=True)
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.sampling.setVisible(True)
if self.auto_sample:
self.__timer.start()
if data is not None and (len(data) == 0 or len(data.domain) == 0):
data = None
if self.data and data and self.data.checksum() == data.checksum():
return
self.closeContext()
same_domain = (self.data and data and
data.domain.checksum() == self.data.domain.checksum())
self.data = data
self.data_metas_X = self.move_primitive_metas_to_X(data)
if not same_domain:
self.init_attr_values()
self.vizrank.initialize()
self.vizrank_button.setEnabled(
self.data is not None and self.data.domain.class_var is not None
and len(self.data.domain.attributes) > 1 and len(self.data) > 1)
self.openContext(self.data)
def add_data(self, time=0.4):
if self.data and len(self.data) > 2000:
return self.__timer.stop()
data_sample = self.sql_data.sample_time(time, no_cache=True)
if data_sample:
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.data = Table.concatenate((self.data, data), axis=0)
self.data_metas_X = self.move_primitive_metas_to_X(self.data)
self.handleNewSignals()
def switch_sampling(self):
self.__timer.stop()
if self.auto_sample and self.sql_data:
self.add_data()
self.__timer.start()
def move_primitive_metas_to_X(self, data):
if data is not None:
new_attrs = [a for a in data.domain.attributes + data.domain.metas
if a.is_primitive()]
new_metas = [m for m in data.domain.metas if not m.is_primitive()]
data = Table.from_table(Domain(new_attrs, data.domain.class_vars,
new_metas), data)
return data
def set_subset_data(self, subset_data):
self.warning(0)
if isinstance(subset_data, SqlTable):
if subset_data.approx_len() < AUTO_DL_LIMIT:
subset_data = Table(subset_data)
else:
self.warning(0, "Data subset does not support large Sql tables")
subset_data = None
self.subset_data = self.move_primitive_metas_to_X(subset_data)
# called when all signals are received, so the graph is updated only once
def handleNewSignals(self):
self.graph.new_data(self.data_metas_X, self.subset_data)
if self.attribute_selection_list and \
all(attr in self.graph.data_domain
for attr in self.attribute_selection_list):
self.attr_x = self.attribute_selection_list[0].name
self.attr_y = self.attribute_selection_list[1].name
self.attribute_selection_list = None
self.update_graph()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
self.unconditional_commit()
def set_shown_attributes(self, attributes):
if attributes and len(attributes) >= 2:
self.attribute_selection_list = attributes[:2]
else:
self.attribute_selection_list = None
def get_shown_attributes(self):
return self.attr_x, self.attr_y
def init_attr_values(self):
self.cb_attr_x.clear()
self.cb_attr_y.clear()
self.attr_x = None
self.attr_y = None
self.cb_attr_color.clear()
self.cb_attr_color.addItem("(Same color)")
self.cb_attr_label.clear()
self.cb_attr_label.addItem("(No labels)")
self.cb_attr_shape.clear()
self.cb_attr_shape.addItem("(Same shape)")
self.cb_attr_size.clear()
self.cb_attr_size.addItem("(Same size)")
if not self.data:
return
for var in self.data.domain.metas:
if not var.is_primitive():
self.cb_attr_label.addItem(self.icons[var], var.name)
for attr in self.data.domain.variables:
self.cb_attr_x.addItem(self.icons[attr], attr.name)
self.cb_attr_y.addItem(self.icons[attr], attr.name)
self.cb_attr_color.addItem(self.icons[attr], attr.name)
if attr.is_discrete:
self.cb_attr_shape.addItem(self.icons[attr], attr.name)
else:
self.cb_attr_size.addItem(self.icons[attr], attr.name)
self.cb_attr_label.addItem(self.icons[attr], attr.name)
for var in self.data.domain.metas:
if var.is_primitive():
self.cb_attr_x.addItem(self.icons[var], var.name)
self.cb_attr_y.addItem(self.icons[var], var.name)
self.cb_attr_color.addItem(self.icons[var], var.name)
if var.is_discrete:
self.cb_attr_shape.addItem(self.icons[var], var.name)
else:
self.cb_attr_size.addItem(self.icons[var], var.name)
self.cb_attr_label.addItem(self.icons[var], var.name)
self.attr_x = self.cb_attr_x.itemText(0)
if self.cb_attr_y.count() > 1:
self.attr_y = self.cb_attr_y.itemText(1)
else:
self.attr_y = self.cb_attr_y.itemText(0)
if self.data.domain.class_var:
self.graph.attr_color = self.data.domain.class_var.name
else:
self.graph.attr_color = ""
self.graph.attr_shape = ""
self.graph.attr_size = ""
self.graph.attr_label = ""
def set_attr(self, attr_x, attr_y):
self.attr_x, self.attr_y = attr_x.name, attr_y.name
self.update_attr()
def update_attr(self):
self.update_graph()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
self.send_features()
def update_colors(self):
self.graph.update_colors()
self.cb_class_density.setEnabled(self.graph.can_draw_density())
def update_density(self):
self.update_graph(reset_view=False)
def update_graph(self, reset_view=True, **_):
self.graph.zoomStack = []
if not self.graph.have_data:
return
self.graph.update_data(self.attr_x, self.attr_y, reset_view)
def selection_changed(self):
self.send_data()
def send_data(self):
selected = unselected = None
# TODO: Implement selection for sql data
if isinstance(self.data, SqlTable):
selected = unselected = self.data
elif self.data is not None:
selection = self.graph.get_selection()
selected = self.data[selection]
unselection = np.full(len(self.data), True, dtype=bool)
unselection[selection] = False
unselected = self.data[unselection]
self.send("Selected Data", selected)
self.send("Other Data", unselected)
def send_features(self):
features = None
if self.attr_x or self.attr_y:
dom = Domain([], metas=(StringVariable(name="feature"),))
features = Table(dom, [[self.attr_x], [self.attr_y]])
features.name = "Features"
self.send("Features", features)
def commit(self):
self.send_data()
self.send_features()
def closeEvent(self, ce):
self.vizrank.close()
super().closeEvent(ce)
def hideEvent(self, he):
self.vizrank.hide()
super().hideEvent(he)
def get_widget_name_extension(self):
if self.data is not None:
return "{} vs {}".format(self.combo_value(self.cb_attr_x),
self.combo_value(self.cb_attr_y))
def send_report(self):
disc_attr = False
if self.data:
domain = self.data.domain
disc_attr = domain[self.attr_x].is_discrete or \
domain[self.attr_y].is_discrete
caption = report.render_items_vert((
("Color", self.combo_value(self.cb_attr_color)),
("Label", self.combo_value(self.cb_attr_label)),
("Shape", self.combo_value(self.cb_attr_shape)),
("Size", self.combo_value(self.cb_attr_size)),
("Jittering", (self.graph.jitter_continuous or disc_attr) and
self.graph.jitter_size)))
self.report_plot()
if caption:
self.report_caption(caption)
def onDeleteWidget(self):
super().onDeleteWidget()
self.graph.plot_widget.getViewBox().deleteLater()
self.graph.plot_widget.clear()
def test_main(argv=None):
import sys
if argv is None:
argv = sys.argv
argv = list(argv)
a = QApplication(argv)
if len(argv) > 1:
filename = argv[1]
else:
filename = "iris"
ow = OWScatterPlot()
ow.show()
ow.raise_()
data = Orange.data.Table(filename)
ow.set_data(data)
ow.set_subset_data(data[:30])
ow.handleNewSignals()
rval = a.exec()
ow.set_data(None)
ow.set_subset_data(None)
ow.handleNewSignals()
ow.saveSettings()
ow.onDeleteWidget()
return rval
if __name__ == "__main__":
test_main()
| bsd-2-clause | -2,149,829,242,724,676,000 | 37.551402 | 120 | 0.592921 | false | 3.665363 | false | false | false |
ramusus/django-twitter-api | twitter_api/parser.py | 1 | 1282 | import json
import re
from bs4 import BeautifulSoup
from oauth_tokens.models import AccessToken
HEADERS = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0',
'Accept_Language': 'en'
}
IDS_RE = re.compile('data-tweet-id="(\d+)"')
def get_replies(status):
"Return all replies ids of tweet"
replies_ids = set()
url = 'https://twitter.com/i/%s/conversation/%s' % (status.author.screen_name, status.pk)
ar = AccessToken.objects.get_token('twitter').auth_request
headers = dict(HEADERS)
headers['X-Requested-With'] = 'XMLHttpRequest'
resp = ar.authorized_request(url=status.get_url(), headers=headers)
params = {'max_position': BeautifulSoup(resp.content).find('div', **{'class': 'stream-container'})['data-min-position']}
while True:
r = ar.authorized_request(url=url, params=params, headers=headers)
response = r.json()
if 'descendants' in response:
response = response['descendants']
ids = IDS_RE.findall(response['items_html'])
[replies_ids.add(id) for id in ids]
if response['has_more_items'] and len(ids):
params = {'max_position': response['min_position']}
else:
break
return list(replies_ids)
| bsd-3-clause | 4,366,527,782,937,351,000 | 30.268293 | 124 | 0.641186 | false | 3.455526 | false | false | false |
bomjacob/htxaarhuslan | main/admin/food.py | 1 | 2598 | from collections import Counter, OrderedDict
from django.contrib import admin
from django.http import HttpResponse
from main.models import FoodOrder
@admin.register(FoodOrder)
class FoodOrderAdmin(admin.ModelAdmin):
list_filter = ('time', 'paid', 'collected')
list_display = ('pk', 'time', 'get_lan', 'order', 'get_profile', 'price', 'paid', 'collected')
list_display_links = ('pk', 'time', 'order')
search_fields = ('lanprofile__profile__user__first_name', 'lanprofile__profile__user__username', 'order')
def get_queryset(self, request):
return (super().get_queryset(request)
.select_related('lanprofile')
.select_related('lanprofile__profile')
.select_related('lanprofile__profile__user'))
def get_profile(self, food_order):
return food_order.lanprofile.profile
get_profile.short_description = 'profil'
get_profile.admin_order_field = 'lanprofile__profile'
def get_lan(self, food_order):
return food_order.lanprofile.lan
get_lan.short_description = 'lan'
get_lan.admin_order_field = 'lanprofile__lan'
actions = ['paid', 'not_paid', 'collected', 'not_collected', 'generate_summary']
def paid(self, request, queryset):
queryset.update(paid=True)
paid.short_description = "Makér som betalt."
def not_paid(self, request, queryset):
queryset.update(paid=False)
not_paid.short_description = "Markér som ikke betalt."
def collected(self, request, queryset):
queryset.update(collected=True)
collected.short_description = "Makér som afhentet."
def not_collected(self, request, queryset):
queryset.update(collected=False)
not_collected.short_description = "Markér som ikke afhentet."
def generate_summary(self, request, queryset):
out = Counter()
for order in queryset:
out[str(order)] += 1
out = OrderedDict(sorted(out.items(), key=lambda x: x[0]))
texts, last = [], ''
for key, value in out.items():
splitted = [x.strip() for x in key.split('-')]
if splitted[0] != last:
texts.append('')
last = splitted[0]
key = ' - '.join(splitted[1:])
texts.append('{} stk. {}'.format(value, key))
texts = texts[1:]
response = HttpResponse('\r\n'.join(texts), content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename="Madbestillinger.txt"'
return response
generate_summary.short_description = "Vis oversigt."
| mit | -1,193,600,915,261,688,600 | 32.688312 | 109 | 0.628373 | false | 3.737752 | false | false | false |
intel-hpdd/intel-manager-for-lustre | chroma_core/lib/storage_plugin/manager.py | 1 | 14650 | # Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""This module defines StoragePluginManager which loads and provides
access to StoragePlugins and their StorageResources"""
import sys
import traceback
from django.conf import settings
from chroma_core.lib.storage_plugin.api import relations
from chroma_core.lib.storage_plugin.base_resource import (
BaseStorageResource,
BaseScannableResource,
ResourceProgrammingError,
)
from chroma_core.lib.storage_plugin.base_plugin import BaseStoragePlugin
from chroma_core.lib.storage_plugin.log import storage_plugin_log
from chroma_core.lib.util import all_subclasses
from chroma_core.models.storage_plugin import StoragePluginRecord
from chroma_core.models.storage_plugin import StorageResourceRecord, StorageResourceClass
class PluginNotFound(Exception):
def __str__(self):
return "PluginNotFound: %s" % self.message
class PluginProgrammingError(Exception):
pass
class VersionMismatchError(PluginProgrammingError):
"""Raised when a plugin is loaded that declares a different version.
The version requested by the plugin is saved to the Error.
"""
def __init__(self, version):
self.version = version
class VersionNotFoundError(PluginProgrammingError):
"""Raised when a plugin doesn't declare a version attribute."""
pass
class LoadedResourceClass(object):
"""Convenience store of introspected information about BaseStorageResource
subclasses from loaded modules."""
def __init__(self, resource_class, resource_class_id):
self.resource_class = resource_class
self.resource_class_id = resource_class_id
class LoadedPlugin(object):
"""Convenience store of introspected information about loaded
plugin modules."""
def __init__(self, plugin_manager, module, module_name, plugin_class):
# Populate _resource_classes from all BaseStorageResource in the same module
# (or leave it untouched if the plugin author overrode it)
if not hasattr(plugin_class, "_resource_classes"):
import inspect
plugin_class._resource_classes = []
for name, cls in inspect.getmembers(module):
if inspect.isclass(cls) and issubclass(cls, BaseStorageResource) and cls != BaseStorageResource:
plugin_class._resource_classes.append(cls)
# Map of name string to class
self.resource_classes = {}
self.plugin_class = plugin_class
self.plugin_record, created = StoragePluginRecord.objects.get_or_create(module_name=module_name)
if created:
self.plugin_record.internal = plugin_class.internal
self.plugin_record.save()
self.scannable_resource_classes = []
for cls in plugin_class._resource_classes:
if not hasattr(cls._meta, "identifier"):
raise ResourceProgrammingError(cls.__name__, "No Meta.identifier")
# Populate database records for the classes
vrc, created = StorageResourceClass.objects.get_or_create(
storage_plugin=self.plugin_record, class_name=cls.__name__
)
if created:
vrc.user_creatable = issubclass(cls, BaseScannableResource)
vrc.save()
plugin_manager.resource_class_id_to_class[vrc.id] = cls
plugin_manager.resource_class_class_to_id[cls] = vrc.id
self.resource_classes[cls.__name__] = LoadedResourceClass(cls, vrc.id)
if issubclass(cls, BaseScannableResource):
self.scannable_resource_classes.append(cls.__name__)
class StoragePluginManager(object):
def __init__(self):
self.loaded_plugins = {}
self.errored_plugins = []
self.resource_class_id_to_class = {}
self.resource_class_class_to_id = {}
from settings import INSTALLED_STORAGE_PLUGINS
for plugin in INSTALLED_STORAGE_PLUGINS:
try:
self.load_plugin(plugin)
except (ImportError, SyntaxError, ResourceProgrammingError, PluginProgrammingError) as e:
storage_plugin_log.error("Failed to load plugin '%s': %s" % (plugin, traceback.format_exc()))
self.errored_plugins.append((plugin, e))
for id, klass in self.resource_class_id_to_class.items():
klass._meta.relations = list(klass._meta.orig_relations)
def can_satisfy_relation(klass, attributes):
for attribute in attributes:
if not attribute in klass._meta.storage_attributes:
return False
return True
for id, klass in self.resource_class_id_to_class.items():
for relation in klass._meta.relations:
# If ('linux', 'ScsiDevice') form was used, substitute the real class
if isinstance(relation, relations.Provide):
if isinstance(relation.provide_to, tuple):
prov_klass, prov_klass_id = self.get_plugin_resource_class(*relation.provide_to)
relation.provide_to = prov_klass
elif isinstance(relation, relations.Subscribe):
if isinstance(relation.subscribe_to, tuple):
sub_klass, sub_klass_id = self.get_plugin_resource_class(*relation.subscribe_to)
relation.subscribe_to = sub_klass
# Generate reverse-Subscribe relations
if isinstance(relation, relations.Provide):
# Synthesize Subscribe objects on the objects which might
# be on the receiving event of a Provide relation. The original
# Provide object plays no further role.
subscription = relations.Subscribe(klass, relation.attributes, relation.ignorecase)
if can_satisfy_relation(relation.provide_to, relation.attributes):
relation.provide_to._meta.relations.append(subscription)
for sc in all_subclasses(relation.provide_to):
if can_satisfy_relation(sc, relation.attributes):
sc._meta.relations.append(subscription)
@property
def loaded_plugin_names(self):
return self.loaded_plugins.keys()
def get_errored_plugins(self):
return [e[0] for e in self.errored_plugins]
def get_resource_class_id(self, klass):
try:
return self.resource_class_class_to_id[klass]
except KeyError:
raise PluginNotFound("Looking for class %s" % klass.__name__)
def get_resource_class_by_id(self, id):
try:
return self.resource_class_id_to_class[id]
except KeyError:
raise PluginNotFound("Looking for class id %s " % id)
def get_scannable_resource_ids(self, plugin):
loaded_plugin = self.loaded_plugins[plugin]
records = (
StorageResourceRecord.objects.filter(resource_class__storage_plugin=loaded_plugin.plugin_record)
.filter(resource_class__class_name__in=loaded_plugin.scannable_resource_classes)
.filter(parents=None)
.values("id")
)
return [r["id"] for r in records]
def get_resource_classes(self, scannable_only=False, show_internal=False):
"""Return a list of StorageResourceClass records
:param scannable_only: Only report BaseScannableResource subclasses
:param show_internal: Include plugins with the internal=True attribute (excluded by default)
"""
class_records = []
for k, v in self.loaded_plugins.items():
if not show_internal and v.plugin_class.internal:
continue
filter = {}
filter["storage_plugin"] = v.plugin_record
if scannable_only:
filter["class_name__in"] = v.scannable_resource_classes
class_records.extend(list(StorageResourceClass.objects.filter(**filter)))
return class_records
def register_plugin(self, plugin_instance):
"""Register a particular instance of a BaseStoragePlugin"""
# FIXME: session ID not really used for anything, it's a vague
# nod to the future remote-run plugins.
session_id = plugin_instance.__class__.__name__
storage_plugin_log.info("Registered plugin instance %s with id %s" % (plugin_instance, session_id))
return session_id
def get_plugin_resource_class(self, plugin_module, resource_class_name):
"""Return a BaseStorageResource subclass"""
try:
loaded_plugin = self.loaded_plugins[plugin_module]
except KeyError:
raise PluginNotFound("Plugin %s not found (not one of %s)" % (plugin_module, self.loaded_plugins.keys()))
try:
loaded_resource = loaded_plugin.resource_classes[resource_class_name]
except KeyError:
raise PluginNotFound(
"Resource %s not found in %s (not one of %s)"
% (resource_class_name, plugin_module, loaded_plugin.resource_classes.keys())
)
return loaded_resource.resource_class, loaded_resource.resource_class_id
# FIXME: rename to get_all_resource_classes
def get_all_resources(self):
for plugin in self.loaded_plugins.values():
for loaded_res in plugin.resource_classes.values():
yield (loaded_res.resource_class_id, loaded_res.resource_class)
def get_plugin_class(self, module):
try:
return self.loaded_plugins[module].plugin_class
except KeyError:
raise PluginNotFound(module)
def validate_plugin(self, module):
errors = []
try:
self.load_plugin(module)
except ResourceProgrammingError as e:
errors.append(e.__str__())
except VersionNotFoundError as e:
errors.append(
"Add version=<int> to your plugin module. Consult "
"Comand Center documentation for API version "
"supported."
)
except VersionMismatchError as e:
plugin_version = e.version
command_center_version = settings.STORAGE_API_VERSION
errors.append(
"The plugin declares version %s. "
"However, this manager server version supports "
"version %s of the Plugin API." % (plugin_version, command_center_version)
)
except PluginProgrammingError as e:
errors.append(e.__str__())
except SyntaxError as e:
errors.append("SyntaxError: %s:%s:%s: %s" % (e.filename, e.lineno, e.offset, e.text))
except ImportError as e:
errors.append(e.__str__())
return errors
def _validate_api_version(self, module):
if not hasattr(module, "version"):
raise VersionNotFoundError()
if type(module.version) != int or settings.STORAGE_API_VERSION != module.version:
raise VersionMismatchError(module.version)
def _load_plugin(self, module, module_name, plugin_klass):
storage_plugin_log.debug("_load_plugin %s %s" % (module_name, plugin_klass))
self.loaded_plugins[module_name] = LoadedPlugin(self, module, module_name, plugin_klass)
def load_plugin(self, module):
"""Load a BaseStoragePlugin class from a module given a
python path like chroma_core.lib.lvm',
or simply return it if it was already loaded. Note that the
BaseStoragePlugin within the module will not be instantiated when this
returns, caller is responsible for instantiating it.
@return A subclass of BaseStoragePlugin"""
if module in self.loaded_plugins:
raise PluginProgrammingError("Duplicate storage plugin module %s" % module)
if module in sys.modules:
storage_plugin_log.warning("Reloading module %s (okay if testing)" % module)
mod = sys.modules[module]
else:
# Load the module
try:
mod = __import__(module)
except (ImportError, ResourceProgrammingError, SyntaxError) as e:
storage_plugin_log.error("Error importing %s: %s" % (module, e))
raise
components = module.split(".")
plugin_name = module
for comp in components[1:]:
mod = getattr(mod, comp)
plugin_name = comp
plugin_module = mod
self._validate_api_version(plugin_module)
# Find all BaseStoragePlugin subclasses in the module
from chroma_core.lib.storage_plugin.api.plugin import Plugin
plugin_klasses = []
import inspect
for name, cls in inspect.getmembers(plugin_module):
if (
inspect.isclass(cls)
and issubclass(cls, BaseStoragePlugin)
and cls != BaseStoragePlugin
and cls != Plugin
):
plugin_klasses.append(cls)
# Make sure we have exactly one BaseStoragePlugin subclass
if len(plugin_klasses) > 1:
raise PluginProgrammingError(
"Module %s defines more than one BaseStoragePlugin: %s!" % (module, plugin_klasses)
)
elif len(plugin_klasses) == 0:
raise PluginProgrammingError("Module %s does not define a BaseStoragePlugin!" % module)
else:
plugin_klass = plugin_klasses[0]
# Hook in a logger to the BaseStoragePlugin subclass
if not plugin_klass._log:
import logging
import settings
log = logging.getLogger("storage_plugin_log_%s" % module)
if module in settings.STORAGE_PLUGIN_DEBUG_PLUGINS or settings.STORAGE_PLUGIN_DEBUG:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.WARNING)
plugin_klass._log = log
plugin_klass._log_format = "[%%(asctime)s: %%(levelname)s/%s] %%(message)s" % module
else:
storage_plugin_log.warning("Double load of %s (okay if testing)" % plugin_name)
try:
self._load_plugin(plugin_module, plugin_name, plugin_klass)
except ResourceProgrammingError as e:
storage_plugin_log.error("Error loading %s: %s" % (plugin_name, e))
raise
else:
return plugin_klass
storage_plugin_manager = StoragePluginManager()
| mit | -6,246,934,484,721,304,000 | 39.027322 | 117 | 0.625188 | false | 4.379671 | false | false | false |
BeyondTheClouds/nova | nova/db/api.py | 1 | 70629 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
Discovery modifies the original `nova.db.api.py' in order to choose
between MySQL backend and REDIS backend. The operator should specify
the backend in the `nova.conf' file.
```
[discovery]
db_backend = (redis | mysql)
gen_logs = (True | False)
```
- `db_backend' targets the database backend.
- `gen_logs' records the execution time of db methods.
-----
Functions in this module are imported into the nova.db namespace. Call
these functions from nova.db namespace, not the nova.db.api namespace.
All functions in this module return objects that implement a
dictionary-like interface. Currently, many of these objects are
sqlalchemy objects that implement a dictionary interface. However, a
future goal is to have all of these objects be simple dictionaries.
"""
from oslo_config import cfg
from oslo_log import log as logging
from nova.cells import rpcapi as cells_rpcapi
from nova.i18n import _LE
import json
import time
import inspect
get_time_ms = lambda: int(round(time.time() * 1000))
db_opts = [
cfg.BoolOpt('enable_new_services',
default=True,
help='Services to be added to the available pool on create'),
cfg.StrOpt('instance_name_template',
default='instance-%08x',
help='Template string to be used to generate instance names'),
cfg.StrOpt('snapshot_name_template',
default='snapshot-%s',
help='Template string to be used to generate snapshot names')]
# Discovery parameters in the nova.conf file
discovery_opts = [
cfg.StrOpt('db_backend',
default='redis',
help='Database backend'),
cfg.BoolOpt('gen_logs',
default=False,
help='Generates logs')
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
CONF.register_opts(discovery_opts, group='discovery')
class ApiProxy:
"""Class that enables the comparison between MySQL and Discovery
implementations. It logs the execution time of db methods.
"""
def __init__(self):
self.backend = None
self.label = ''
def _init_backend(self):
# Proxy that targets the correct backend
if CONF.discovery.db_backend.upper() == 'REDIS':
from nova.db.discovery import api as discovery_api
self.backend = discovery_api
self.label = "[Discovery_impl]"
else:
from nova.db.sqlalchemy import api as mysql_api
self.backend = mysql_api
self.label = "[MySQL_impl]"
def __getattr__(self, attr):
if attr in ["backend", "label"]:
return self.__dict__[attr]
self._init_backend()
ret = object.__getattribute__(self.backend, attr)
if hasattr(ret, "__call__") and CONF.discovery.gen_logs:
return self.FunctionWrapper(ret, attr, self.label)
return ret
class FunctionWrapper:
"""Class used to measure the execution time of a method and log it
inside `opt.logs.db_api_<backend>.log'.
"""
def __init__(self, callable, call_name, label):
self.callable = callable
self.call_name = call_name
self.label = label
def __call__(self, *args, **kwargs):
time_before = get_time_ms()
result_callable = self.callable(*args, **kwargs)
time_after = get_time_ms()
duration = time_after - time_before
frm = inspect.stack()[1]
mod = inspect.getmodule(frm[0])
dct = {
'backend': self.label,
'class': mod.__name__,
'method': self.call_name,
'args': str(args),
'kwargs': str(kwargs),
'result': str(result_callable),
'timestamp': get_time_ms(),
'duration': duration
}
ppjson = json.dumps(dct)
print(ppjson)
if self.label == "[MySQL_impl]":
with open("/opt/logs/db_api_mysql.log", "a") as f:
f.write(ppjson+"\n")
else:
with open("/opt/logs/db_api_disco.log", "a") as f:
f.write(ppjson+"\n")
return result_callable
IMPL = ApiProxy()
LOG = logging.getLogger(__name__)
# The maximum value a signed INT type may have
MAX_INT = 0x7FFFFFFF
###################
def constraint(**conditions):
"""Return a constraint object suitable for use with some updates."""
return IMPL.constraint(**conditions)
def equal_any(*values):
"""Return an equality condition object suitable for use in a constraint.
Equal_any conditions require that a model object's attribute equal any
one of the given values.
"""
return IMPL.equal_any(*values)
def not_equal(*values):
"""Return an inequality condition object suitable for use in a constraint.
Not_equal conditions require that a model object's attribute differs from
all of the given values.
"""
return IMPL.not_equal(*values)
def create_context_manager(connection):
"""Return a context manager for a cell database connection."""
return IMPL.create_context_manager(connection=connection)
###################
def select_db_reader_mode(f):
"""Decorator to select synchronous or asynchronous reader mode.
The kwarg argument 'use_slave' defines reader mode. Asynchronous reader
will be used if 'use_slave' is True and synchronous reader otherwise.
"""
return IMPL.select_db_reader_mode(f)
###################
def service_destroy(context, service_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, service_id)
def service_get(context, service_id):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id)
def service_get_minimum_version(context, binary):
"""Get the minimum service version in the database."""
return IMPL.service_get_minimum_version(context, binary)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by hostname and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_by_host_and_binary(context, host, binary):
"""Get a service by hostname and binary."""
return IMPL.service_get_by_host_and_binary(context, host, binary)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_by_binary(context, binary, include_disabled=False):
"""Get services for a given binary.
Includes disabled services if 'include_disabled' parameter is True
"""
return IMPL.service_get_all_by_binary(context, binary,
include_disabled=include_disabled)
def service_get_all_by_host(context, host):
"""Get all services for a given host."""
return IMPL.service_get_all_by_host(context, host)
def service_get_by_compute_host(context, host):
"""Get the service entry for a given compute host.
Returns the service entry joined with the compute_node entry.
"""
return IMPL.service_get_by_compute_host(context, host)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on a service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
###################
def compute_node_get(context, compute_id):
"""Get a compute node by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Dictionary-like object containing properties of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get(context, compute_id)
# TODO(edleafe): remove once the compute node resource provider migration is
# complete, and this distinction is no longer necessary.
def compute_node_get_model(context, compute_id):
"""Get a compute node sqlalchemy model object by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Sqlalchemy model object containing properties of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get_model(context, compute_id)
def compute_nodes_get_by_service_id(context, service_id):
"""Get a list of compute nodes by their associated service id.
:param context: The security context
:param service_id: ID of the associated service
:returns: List of dictionary-like objects, each containing properties of
the compute node, including its corresponding service and
statistics
Raises ServiceNotFound if service with the given ID doesn't exist.
"""
return IMPL.compute_nodes_get_by_service_id(context, service_id)
def compute_node_get_by_host_and_nodename(context, host, nodename):
"""Get a compute node by its associated host and nodename.
:param context: The security context (admin)
:param host: Name of the host
:param nodename: Name of the node
:returns: Dictionary-like object containing properties of the compute node,
including its statistics
Raises ComputeHostNotFound if host with the given name doesn't exist.
"""
return IMPL.compute_node_get_by_host_and_nodename(context, host, nodename)
def compute_node_get_all(context):
"""Get all computeNodes.
:param context: The security context
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all(context)
def compute_node_get_all_by_host(context, host):
"""Get compute nodes by host name
:param context: The security context (admin)
:param host: Name of the host
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all_by_host(context, host)
def compute_node_search_by_hypervisor(context, hypervisor_match):
"""Get compute nodes by hypervisor hostname.
:param context: The security context
:param hypervisor_match: The hypervisor hostname
:returns: List of dictionary-like objects each containing compute node
properties
"""
return IMPL.compute_node_search_by_hypervisor(context, hypervisor_match)
def compute_node_create(context, values):
"""Create a compute node from the values dictionary.
:param context: The security context
:param values: Dictionary containing compute node properties
:returns: Dictionary-like object containing the properties of the created
node, including its corresponding service and statistics
"""
return IMPL.compute_node_create(context, values)
def compute_node_update(context, compute_id, values):
"""Set the given properties on a compute node and update it.
:param context: The security context
:param compute_id: ID of the compute node
:param values: Dictionary containing compute node properties to be updated
:returns: Dictionary-like object containing the properties of the updated
compute node, including its corresponding service and statistics
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_update(context, compute_id, values)
def compute_node_delete(context, compute_id):
"""Delete a compute node from the database.
:param context: The security context
:param compute_id: ID of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_delete(context, compute_id)
def compute_node_statistics(context):
"""Get aggregate statistics over all compute nodes.
:param context: The security context
:returns: Dictionary containing compute node characteristics summed up
over all the compute nodes, e.g. 'vcpus', 'free_ram_mb' etc.
"""
return IMPL.compute_node_statistics(context)
###################
def certificate_create(context, values):
"""Create a certificate from the values dictionary."""
return IMPL.certificate_create(context, values)
def certificate_get_all_by_project(context, project_id):
"""Get all certificates for a project."""
return IMPL.certificate_get_all_by_project(context, project_id)
def certificate_get_all_by_user(context, user_id):
"""Get all certificates for a user."""
return IMPL.certificate_get_all_by_user(context, user_id)
def certificate_get_all_by_user_and_project(context, user_id, project_id):
"""Get all certificates for a user and project."""
return IMPL.certificate_get_all_by_user_and_project(context,
user_id,
project_id)
###################
def floating_ip_get(context, id):
return IMPL.floating_ip_get(context, id)
def floating_ip_get_pools(context):
"""Returns a list of floating IP pools."""
return IMPL.floating_ip_get_pools(context)
def floating_ip_allocate_address(context, project_id, pool,
auto_assigned=False):
"""Allocate free floating IP from specified pool and return the address.
Raises if one is not available.
"""
return IMPL.floating_ip_allocate_address(context, project_id, pool,
auto_assigned)
def floating_ip_bulk_create(context, ips, want_result=True):
"""Create a lot of floating IPs from the values dictionary.
:param want_result: If set to True, return floating IPs inserted
"""
return IMPL.floating_ip_bulk_create(context, ips, want_result=want_result)
def floating_ip_bulk_destroy(context, ips):
"""Destroy a lot of floating IPs from the values dictionary."""
return IMPL.floating_ip_bulk_destroy(context, ips)
def floating_ip_create(context, values):
"""Create a floating IP from the values dictionary."""
return IMPL.floating_ip_create(context, values)
def floating_ip_deallocate(context, address):
"""Deallocate a floating IP by address."""
return IMPL.floating_ip_deallocate(context, address)
def floating_ip_destroy(context, address):
"""Destroy the floating_ip or raise if it does not exist."""
return IMPL.floating_ip_destroy(context, address)
def floating_ip_disassociate(context, address):
"""Disassociate a floating IP from a fixed IP by address.
:returns: the fixed IP record joined to network record or None
if the IP was not associated to an IP.
"""
return IMPL.floating_ip_disassociate(context, address)
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
"""Associate a floating IP to a fixed_ip by address.
:returns: the fixed IP record joined to network record or None
if the IP was already associated to the fixed IP.
"""
return IMPL.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
host)
def floating_ip_get_all(context):
"""Get all floating IPs."""
return IMPL.floating_ip_get_all(context)
def floating_ip_get_all_by_host(context, host):
"""Get all floating IPs by host."""
return IMPL.floating_ip_get_all_by_host(context, host)
def floating_ip_get_all_by_project(context, project_id):
"""Get all floating IPs by project."""
return IMPL.floating_ip_get_all_by_project(context, project_id)
def floating_ip_get_by_address(context, address):
"""Get a floating IP by address or raise if it doesn't exist."""
return IMPL.floating_ip_get_by_address(context, address)
def floating_ip_get_by_fixed_address(context, fixed_address):
"""Get a floating IPs by fixed address."""
return IMPL.floating_ip_get_by_fixed_address(context, fixed_address)
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id):
"""Get a floating IPs by fixed address."""
return IMPL.floating_ip_get_by_fixed_ip_id(context, fixed_ip_id)
def floating_ip_update(context, address, values):
"""Update a floating IP by address or raise if it doesn't exist."""
return IMPL.floating_ip_update(context, address, values)
def dnsdomain_get_all(context):
"""Get a list of all dnsdomains in our database."""
return IMPL.dnsdomain_get_all(context)
def dnsdomain_register_for_zone(context, fqdomain, zone):
"""Associated a DNS domain with an availability zone."""
return IMPL.dnsdomain_register_for_zone(context, fqdomain, zone)
def dnsdomain_register_for_project(context, fqdomain, project):
"""Associated a DNS domain with a project id."""
return IMPL.dnsdomain_register_for_project(context, fqdomain, project)
def dnsdomain_unregister(context, fqdomain):
"""Purge associations for the specified DNS zone."""
return IMPL.dnsdomain_unregister(context, fqdomain)
def dnsdomain_get(context, fqdomain):
"""Get the db record for the specified domain."""
return IMPL.dnsdomain_get(context, fqdomain)
####################
def migration_update(context, id, values):
"""Update a migration instance."""
return IMPL.migration_update(context, id, values)
def migration_create(context, values):
"""Create a migration record."""
return IMPL.migration_create(context, values)
def migration_get(context, migration_id):
"""Finds a migration by the id."""
return IMPL.migration_get(context, migration_id)
def migration_get_by_id_and_instance(context, migration_id, instance_uuid):
"""Finds a migration by the migration id and the instance uuid."""
return IMPL.migration_get_by_id_and_instance(context,
migration_id,
instance_uuid)
def migration_get_by_instance_and_status(context, instance_uuid, status):
"""Finds a migration by the instance uuid its migrating."""
return IMPL.migration_get_by_instance_and_status(context, instance_uuid,
status)
def migration_get_unconfirmed_by_dest_compute(context, confirm_window,
dest_compute):
"""Finds all unconfirmed migrations within the confirmation window for
a specific destination compute host.
"""
return IMPL.migration_get_unconfirmed_by_dest_compute(context,
confirm_window, dest_compute)
def migration_get_in_progress_by_host_and_node(context, host, node):
"""Finds all migrations for the given host + node that are not yet
confirmed or reverted.
"""
return IMPL.migration_get_in_progress_by_host_and_node(context, host, node)
def migration_get_all_by_filters(context, filters):
"""Finds all migrations in progress."""
return IMPL.migration_get_all_by_filters(context, filters)
def migration_get_in_progress_by_instance(context, instance_uuid,
migration_type=None):
"""Finds all migrations of an instance in progress."""
return IMPL.migration_get_in_progress_by_instance(context, instance_uuid,
migration_type)
####################
def fixed_ip_associate(context, address, instance_uuid, network_id=None,
reserved=False, virtual_interface_id=None):
"""Associate fixed IP to instance.
Raises if fixed IP is not available.
"""
return IMPL.fixed_ip_associate(context, address, instance_uuid, network_id,
reserved, virtual_interface_id)
def fixed_ip_associate_pool(context, network_id, instance_uuid=None,
host=None, virtual_interface_id=None):
"""Find free IP in network and associate it to instance or host.
Raises if one is not available.
"""
return IMPL.fixed_ip_associate_pool(context, network_id,
instance_uuid, host,
virtual_interface_id)
def fixed_ip_create(context, values):
"""Create a fixed IP from the values dictionary."""
return IMPL.fixed_ip_create(context, values)
def fixed_ip_bulk_create(context, ips):
"""Create a lot of fixed IPs from the values dictionary."""
return IMPL.fixed_ip_bulk_create(context, ips)
def fixed_ip_disassociate(context, address):
"""Disassociate a fixed IP from an instance by address."""
return IMPL.fixed_ip_disassociate(context, address)
def fixed_ip_disassociate_all_by_timeout(context, host, time):
"""Disassociate old fixed IPs from host."""
return IMPL.fixed_ip_disassociate_all_by_timeout(context, host, time)
def fixed_ip_get(context, id, get_network=False):
"""Get fixed IP by id or raise if it does not exist.
If get_network is true, also return the associated network.
"""
return IMPL.fixed_ip_get(context, id, get_network)
def fixed_ip_get_all(context):
"""Get all defined fixed IPs."""
return IMPL.fixed_ip_get_all(context)
def fixed_ip_get_by_address(context, address, columns_to_join=None):
"""Get a fixed IP by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address(context, address,
columns_to_join=columns_to_join)
def fixed_ip_get_by_floating_address(context, floating_address):
"""Get a fixed IP by a floating address."""
return IMPL.fixed_ip_get_by_floating_address(context, floating_address)
def fixed_ip_get_by_instance(context, instance_uuid):
"""Get fixed IPs by instance or raise if none exist."""
return IMPL.fixed_ip_get_by_instance(context, instance_uuid)
def fixed_ip_get_by_host(context, host):
"""Get fixed IPs by compute host."""
return IMPL.fixed_ip_get_by_host(context, host)
def fixed_ip_get_by_network_host(context, network_uuid, host):
"""Get fixed IP for a host in a network."""
return IMPL.fixed_ip_get_by_network_host(context, network_uuid, host)
def fixed_ips_by_virtual_interface(context, vif_id):
"""Get fixed IPs by virtual interface or raise if none exist."""
return IMPL.fixed_ips_by_virtual_interface(context, vif_id)
def fixed_ip_update(context, address, values):
"""Create a fixed IP from the values dictionary."""
return IMPL.fixed_ip_update(context, address, values)
####################
def virtual_interface_create(context, values):
"""Create a virtual interface record in the database."""
return IMPL.virtual_interface_create(context, values)
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table."""
return IMPL.virtual_interface_get(context, vif_id)
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table filtering on address."""
return IMPL.virtual_interface_get_by_address(context, address)
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table filtering on vif uuid."""
return IMPL.virtual_interface_get_by_uuid(context, vif_uuid)
def virtual_interface_get_by_instance(context, instance_id):
"""Gets all virtual_interfaces for instance."""
return IMPL.virtual_interface_get_by_instance(context, instance_id)
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets all virtual interfaces for instance."""
return IMPL.virtual_interface_get_by_instance_and_network(context,
instance_id,
network_id)
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records associated with instance."""
return IMPL.virtual_interface_delete_by_instance(context, instance_id)
def virtual_interface_get_all(context):
"""Gets all virtual interfaces from the table."""
return IMPL.virtual_interface_get_all(context)
####################
def instance_create(context, values):
"""Create an instance from the values dictionary."""
return IMPL.instance_create(context, values)
def instance_destroy(context, instance_uuid, constraint=None):
"""Destroy the instance or raise if it does not exist."""
return IMPL.instance_destroy(context, instance_uuid, constraint)
def instance_get_by_uuid(context, uuid, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get_by_uuid(context, uuid, columns_to_join)
def instance_get(context, instance_id, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get(context, instance_id,
columns_to_join=columns_to_join)
def instance_get_all(context, columns_to_join=None):
"""Get all instances."""
return IMPL.instance_get_all(context, columns_to_join=columns_to_join)
def instance_get_all_by_filters(context, filters, sort_key='created_at',
sort_dir='desc', limit=None, marker=None,
columns_to_join=None):
"""Get all instances that match all filters."""
# Note: This function exists for backwards compatibility since calls to
# the instance layer coming in over RPC may specify the single sort
# key/direction values; in this case, this function is invoked instead
# of the 'instance_get_all_by_filters_sort' function.
return IMPL.instance_get_all_by_filters(context, filters, sort_key,
sort_dir, limit=limit,
marker=marker,
columns_to_join=columns_to_join)
def instance_get_all_by_filters_sort(context, filters, limit=None,
marker=None, columns_to_join=None,
sort_keys=None, sort_dirs=None):
"""Get all instances that match all filters sorted by multiple keys.
sort_keys and sort_dirs must be a list of strings.
"""
return IMPL.instance_get_all_by_filters_sort(
context, filters, limit=limit, marker=marker,
columns_to_join=columns_to_join, sort_keys=sort_keys,
sort_dirs=sort_dirs)
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None,
columns_to_join=None):
"""Get instances and joins active during a certain time window.
Specifying a project_id will filter for a certain project.
Specifying a host will filter for instances on a given compute host.
"""
return IMPL.instance_get_active_by_window_joined(context, begin, end,
project_id, host,
columns_to_join=columns_to_join)
def instance_get_all_by_host(context, host, columns_to_join=None):
"""Get all instances belonging to a host."""
return IMPL.instance_get_all_by_host(context, host, columns_to_join)
def instance_get_all_by_host_and_node(context, host, node,
columns_to_join=None):
"""Get all instances belonging to a node."""
return IMPL.instance_get_all_by_host_and_node(
context, host, node, columns_to_join=columns_to_join)
def instance_get_all_by_host_and_not_type(context, host, type_id=None):
"""Get all instances belonging to a host with a different type_id."""
return IMPL.instance_get_all_by_host_and_not_type(context, host, type_id)
def instance_get_all_by_grantee_security_groups(context, group_ids):
"""Get instances with rules granted to them by a list of secgroups ids."""
return IMPL.instance_get_all_by_grantee_security_groups(context, group_ids)
def instance_floating_address_get_all(context, instance_uuid):
"""Get all floating IP addresses of an instance."""
return IMPL.instance_floating_address_get_all(context, instance_uuid)
# NOTE(hanlind): This method can be removed as conductor RPC API moves to v2.0.
def instance_get_all_hung_in_rebooting(context, reboot_window):
"""Get all instances stuck in a rebooting state."""
return IMPL.instance_get_all_hung_in_rebooting(context, reboot_window)
def instance_update(context, instance_uuid, values, expected=None):
"""Set the given properties on an instance and update it.
Raises NotFound if instance does not exist.
"""
return IMPL.instance_update(context, instance_uuid, values,
expected=expected)
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None, expected=None):
"""Set the given properties on an instance and update it. Return
a shallow copy of the original instance reference, as well as the
updated one.
:param context: = request context object
:param instance_uuid: = instance id or uuid
:param values: = dict containing column values
:returns: a tuple of the form (old_instance_ref, new_instance_ref)
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=columns_to_join,
expected=expected)
return rv
def instance_add_security_group(context, instance_id, security_group_id):
"""Associate the given security group with the given instance."""
return IMPL.instance_add_security_group(context, instance_id,
security_group_id)
def instance_remove_security_group(context, instance_id, security_group_id):
"""Disassociate the given security group from the given instance."""
return IMPL.instance_remove_security_group(context, instance_id,
security_group_id)
####################
def instance_group_create(context, values, policies=None, members=None):
"""Create a new group.
Each group will receive a unique uuid. This will be used for access to the
group.
"""
return IMPL.instance_group_create(context, values, policies, members)
def instance_group_get(context, group_uuid):
"""Get a specific group by id."""
return IMPL.instance_group_get(context, group_uuid)
def instance_group_get_by_instance(context, instance_uuid):
"""Get the group an instance is a member of."""
return IMPL.instance_group_get_by_instance(context, instance_uuid)
def instance_group_update(context, group_uuid, values):
"""Update the attributes of an group."""
return IMPL.instance_group_update(context, group_uuid, values)
def instance_group_delete(context, group_uuid):
"""Delete an group."""
return IMPL.instance_group_delete(context, group_uuid)
def instance_group_get_all(context):
"""Get all groups."""
return IMPL.instance_group_get_all(context)
def instance_group_get_all_by_project_id(context, project_id):
"""Get all groups for a specific project_id."""
return IMPL.instance_group_get_all_by_project_id(context, project_id)
def instance_group_members_add(context, group_uuid, members,
set_delete=False):
"""Add members to the group."""
return IMPL.instance_group_members_add(context, group_uuid, members,
set_delete=set_delete)
def instance_group_member_delete(context, group_uuid, instance_id):
"""Delete a specific member from the group."""
return IMPL.instance_group_member_delete(context, group_uuid, instance_id)
def instance_group_members_get(context, group_uuid):
"""Get the members from the group."""
return IMPL.instance_group_members_get(context, group_uuid)
###################
def instance_info_cache_get(context, instance_uuid):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
"""
return IMPL.instance_info_cache_get(context, instance_uuid)
def instance_info_cache_update(context, instance_uuid, values):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
"""
return IMPL.instance_info_cache_update(context, instance_uuid, values)
def instance_info_cache_delete(context, instance_uuid):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
"""
return IMPL.instance_info_cache_delete(context, instance_uuid)
###################
def instance_extra_get_by_instance_uuid(context, instance_uuid, columns=None):
"""Get the instance extra record
:param instance_uuid: = uuid of the instance tied to the topology record
:param columns: A list of the columns to load, or None for 'all of them'
"""
return IMPL.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=columns)
def instance_extra_update_by_uuid(context, instance_uuid, updates):
"""Update the instance extra record by instance uuid
:param instance_uuid: = uuid of the instance tied to the record
:param updates: A dict of updates to apply
"""
return IMPL.instance_extra_update_by_uuid(context, instance_uuid,
updates)
###################
def key_pair_create(context, values):
"""Create a key_pair from the values dictionary."""
return IMPL.key_pair_create(context, values)
def key_pair_destroy(context, user_id, name):
"""Destroy the key_pair or raise if it does not exist."""
return IMPL.key_pair_destroy(context, user_id, name)
def key_pair_get(context, user_id, name):
"""Get a key_pair or raise if it does not exist."""
return IMPL.key_pair_get(context, user_id, name)
def key_pair_get_all_by_user(context, user_id):
"""Get all key_pairs by user."""
return IMPL.key_pair_get_all_by_user(context, user_id)
def key_pair_count_by_user(context, user_id):
"""Count number of key pairs for the given user ID."""
return IMPL.key_pair_count_by_user(context, user_id)
####################
def network_associate(context, project_id, network_id=None, force=False):
"""Associate a free network to a project."""
return IMPL.network_associate(context, project_id, network_id, force)
def network_count_reserved_ips(context, network_id):
"""Return the number of reserved IPs in the network."""
return IMPL.network_count_reserved_ips(context, network_id)
def network_create_safe(context, values):
"""Create a network from the values dict.
The network is only returned if the create succeeds. If the create violates
constraints because the network already exists, no exception is raised.
"""
return IMPL.network_create_safe(context, values)
def network_delete_safe(context, network_id):
"""Delete network with key network_id.
This method assumes that the network is not associated with any project
"""
return IMPL.network_delete_safe(context, network_id)
def network_disassociate(context, network_id, disassociate_host=True,
disassociate_project=True):
"""Disassociate the network from project or host
Raises if it does not exist.
"""
return IMPL.network_disassociate(context, network_id, disassociate_host,
disassociate_project)
def network_get(context, network_id, project_only="allow_none"):
"""Get a network or raise if it does not exist."""
return IMPL.network_get(context, network_id, project_only=project_only)
def network_get_all(context, project_only="allow_none"):
"""Return all defined networks."""
return IMPL.network_get_all(context, project_only)
def network_get_all_by_uuids(context, network_uuids,
project_only="allow_none"):
"""Return networks by ids."""
return IMPL.network_get_all_by_uuids(context, network_uuids,
project_only=project_only)
def network_in_use_on_host(context, network_id, host=None):
"""Indicates if a network is currently in use on host."""
return IMPL.network_in_use_on_host(context, network_id, host)
def network_get_associated_fixed_ips(context, network_id, host=None):
"""Get all network's IPs that have been associated."""
return IMPL.network_get_associated_fixed_ips(context, network_id, host)
def network_get_by_uuid(context, uuid):
"""Get a network by uuid or raise if it does not exist."""
return IMPL.network_get_by_uuid(context, uuid)
def network_get_by_cidr(context, cidr):
"""Get a network by cidr or raise if it does not exist."""
return IMPL.network_get_by_cidr(context, cidr)
def network_get_all_by_host(context, host):
"""All networks for which the given host is the network host."""
return IMPL.network_get_all_by_host(context, host)
def network_set_host(context, network_id, host_id):
"""Safely set the host for network."""
return IMPL.network_set_host(context, network_id, host_id)
def network_update(context, network_id, values):
"""Set the given properties on a network and update it.
Raises NotFound if network does not exist.
"""
return IMPL.network_update(context, network_id, values)
###############
def quota_create(context, project_id, resource, limit, user_id=None):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit,
user_id=user_id)
def quota_get(context, project_id, resource, user_id=None):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource, user_id=user_id)
def quota_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all quotas associated with a given project and user."""
return IMPL.quota_get_all_by_project_and_user(context, project_id, user_id)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_get_all(context, project_id):
"""Retrieve all user quotas associated with a given project."""
return IMPL.quota_get_all(context, project_id)
def quota_update(context, project_id, resource, limit, user_id=None):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit,
user_id=user_id)
###################
def quota_class_create(context, class_name, resource, limit):
"""Create a quota class for the given name and resource."""
return IMPL.quota_class_create(context, class_name, resource, limit)
def quota_class_get(context, class_name, resource):
"""Retrieve a quota class or raise if it does not exist."""
return IMPL.quota_class_get(context, class_name, resource)
def quota_class_get_default(context):
"""Retrieve all default quotas."""
return IMPL.quota_class_get_default(context)
def quota_class_get_all_by_name(context, class_name):
"""Retrieve all quotas associated with a given quota class."""
return IMPL.quota_class_get_all_by_name(context, class_name)
def quota_class_update(context, class_name, resource, limit):
"""Update a quota class or raise if it does not exist."""
return IMPL.quota_class_update(context, class_name, resource, limit)
###################
def quota_usage_get(context, project_id, resource, user_id=None):
"""Retrieve a quota usage or raise if it does not exist."""
return IMPL.quota_usage_get(context, project_id, resource, user_id=user_id)
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project_and_user(context,
project_id, user_id)
def quota_usage_get_all_by_project(context, project_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project(context, project_id)
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
"""Update a quota usage or raise if it does not exist."""
return IMPL.quota_usage_update(context, project_id, user_id, resource,
**kwargs)
###################
def quota_reserve(context, resources, quotas, user_quotas, deltas, expire,
until_refresh, max_age, project_id=None, user_id=None):
"""Check quotas and create appropriate reservations."""
return IMPL.quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age,
project_id=project_id, user_id=user_id)
def reservation_commit(context, reservations, project_id=None, user_id=None):
"""Commit quota reservations."""
return IMPL.reservation_commit(context, reservations,
project_id=project_id,
user_id=user_id)
def reservation_rollback(context, reservations, project_id=None, user_id=None):
"""Roll back quota reservations."""
return IMPL.reservation_rollback(context, reservations,
project_id=project_id,
user_id=user_id)
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
"""Destroy all quotas associated with a given project and user."""
return IMPL.quota_destroy_all_by_project_and_user(context,
project_id, user_id)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_destroy_all_by_project(context, project_id)
def reservation_expire(context):
"""Roll back any expired reservations."""
return IMPL.reservation_expire(context)
###################
def ec2_volume_create(context, volume_id, forced_id=None):
return IMPL.ec2_volume_create(context, volume_id, forced_id)
def ec2_volume_get_by_id(context, volume_id):
return IMPL.ec2_volume_get_by_id(context, volume_id)
def ec2_volume_get_by_uuid(context, volume_uuid):
return IMPL.ec2_volume_get_by_uuid(context, volume_uuid)
def ec2_snapshot_create(context, snapshot_id, forced_id=None):
return IMPL.ec2_snapshot_create(context, snapshot_id, forced_id)
def ec2_snapshot_get_by_ec2_id(context, ec2_id):
return IMPL.ec2_snapshot_get_by_ec2_id(context, ec2_id)
def ec2_snapshot_get_by_uuid(context, snapshot_uuid):
return IMPL.ec2_snapshot_get_by_uuid(context, snapshot_uuid)
####################
def block_device_mapping_create(context, values, legacy=True):
"""Create an entry of block device mapping."""
return IMPL.block_device_mapping_create(context, values, legacy)
def block_device_mapping_update(context, bdm_id, values, legacy=True):
"""Update an entry of block device mapping."""
return IMPL.block_device_mapping_update(context, bdm_id, values, legacy)
def block_device_mapping_update_or_create(context, values, legacy=True):
"""Update an entry of block device mapping.
If not existed, create a new entry
"""
return IMPL.block_device_mapping_update_or_create(context, values, legacy)
def block_device_mapping_get_all_by_instance_uuids(context, instance_uuids):
"""Get all block device mapping belonging to a list of instances."""
return IMPL.block_device_mapping_get_all_by_instance_uuids(context,
instance_uuids)
def block_device_mapping_get_all_by_instance(context, instance_uuid):
"""Get all block device mapping belonging to an instance."""
return IMPL.block_device_mapping_get_all_by_instance(context,
instance_uuid)
def block_device_mapping_get_all_by_volume_id(context, volume_id,
columns_to_join=None):
"""Get block device mapping for a given volume."""
return IMPL.block_device_mapping_get_all_by_volume_id(context, volume_id,
columns_to_join)
def block_device_mapping_get_by_instance_and_volume_id(context, volume_id,
instance_uuid,
columns_to_join=None):
"""Get block device mapping for a given volume ID and instance UUID."""
return IMPL.block_device_mapping_get_by_instance_and_volume_id(
context, volume_id, instance_uuid, columns_to_join)
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id)
def block_device_mapping_destroy_by_instance_and_device(context, instance_uuid,
device_name):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_device(
context, instance_uuid, device_name)
def block_device_mapping_destroy_by_instance_and_volume(context, instance_uuid,
volume_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_volume(
context, instance_uuid, volume_id)
####################
def security_group_get_all(context):
"""Get all security groups."""
return IMPL.security_group_get_all(context)
def security_group_get(context, security_group_id, columns_to_join=None):
"""Get security group by its id."""
return IMPL.security_group_get(context, security_group_id,
columns_to_join)
def security_group_get_by_name(context, project_id, group_name,
columns_to_join=None):
"""Returns a security group with the specified name from a project."""
return IMPL.security_group_get_by_name(context, project_id, group_name,
columns_to_join=None)
def security_group_get_by_project(context, project_id):
"""Get all security groups belonging to a project."""
return IMPL.security_group_get_by_project(context, project_id)
def security_group_get_by_instance(context, instance_uuid):
"""Get security groups to which the instance is assigned."""
return IMPL.security_group_get_by_instance(context, instance_uuid)
def security_group_in_use(context, group_id):
"""Indicates if a security group is currently in use."""
return IMPL.security_group_in_use(context, group_id)
def security_group_create(context, values):
"""Create a new security group."""
return IMPL.security_group_create(context, values)
def security_group_update(context, security_group_id, values,
columns_to_join=None):
"""Update a security group."""
return IMPL.security_group_update(context, security_group_id, values,
columns_to_join=columns_to_join)
def security_group_ensure_default(context):
"""Ensure default security group exists for a project_id.
Returns a tuple with the first element being a bool indicating
if the default security group previously existed. Second
element is the dict used to create the default security group.
"""
return IMPL.security_group_ensure_default(context)
def security_group_destroy(context, security_group_id):
"""Deletes a security group."""
return IMPL.security_group_destroy(context, security_group_id)
####################
def security_group_rule_create(context, values):
"""Create a new security group."""
return IMPL.security_group_rule_create(context, values)
def security_group_rule_get_by_security_group(context, security_group_id,
columns_to_join=None):
"""Get all rules for a given security group."""
return IMPL.security_group_rule_get_by_security_group(
context, security_group_id, columns_to_join=columns_to_join)
def security_group_rule_get_by_instance(context, instance_uuid):
"""Get all rules for a given instance."""
return IMPL.security_group_rule_get_by_instance(context, instance_uuid)
def security_group_rule_destroy(context, security_group_rule_id):
"""Deletes a security group rule."""
return IMPL.security_group_rule_destroy(context, security_group_rule_id)
def security_group_rule_get(context, security_group_rule_id):
"""Gets a security group rule."""
return IMPL.security_group_rule_get(context, security_group_rule_id)
def security_group_rule_count_by_group(context, security_group_id):
"""Count rules in a given security group."""
return IMPL.security_group_rule_count_by_group(context, security_group_id)
###################
def security_group_default_rule_get(context, security_group_rule_default_id):
return IMPL.security_group_default_rule_get(context,
security_group_rule_default_id)
def security_group_default_rule_destroy(context,
security_group_rule_default_id):
return IMPL.security_group_default_rule_destroy(
context, security_group_rule_default_id)
def security_group_default_rule_create(context, values):
return IMPL.security_group_default_rule_create(context, values)
def security_group_default_rule_list(context):
return IMPL.security_group_default_rule_list(context)
###################
def provider_fw_rule_create(context, rule):
"""Add a firewall rule at the provider level (all hosts & instances)."""
return IMPL.provider_fw_rule_create(context, rule)
def provider_fw_rule_get_all(context):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all(context)
def provider_fw_rule_destroy(context, rule_id):
"""Delete a provider firewall rule from the database."""
return IMPL.provider_fw_rule_destroy(context, rule_id)
###################
def project_get_networks(context, project_id, associate=True):
"""Return the network associated with the project.
If associate is true, it will attempt to associate a new
network if one is not found, otherwise it returns None.
"""
return IMPL.project_get_networks(context, project_id, associate)
###################
def console_pool_create(context, values):
"""Create console pool."""
return IMPL.console_pool_create(context, values)
def console_pool_get_by_host_type(context, compute_host, proxy_host,
console_type):
"""Fetch a console pool for a given proxy host, compute host, and type."""
return IMPL.console_pool_get_by_host_type(context,
compute_host,
proxy_host,
console_type)
def console_pool_get_all_by_host_type(context, host, console_type):
"""Fetch all pools for given proxy host and type."""
return IMPL.console_pool_get_all_by_host_type(context,
host,
console_type)
def console_create(context, values):
"""Create a console."""
return IMPL.console_create(context, values)
def console_delete(context, console_id):
"""Delete a console."""
return IMPL.console_delete(context, console_id)
def console_get_by_pool_instance(context, pool_id, instance_uuid):
"""Get console entry for a given instance and pool."""
return IMPL.console_get_by_pool_instance(context, pool_id, instance_uuid)
def console_get_all_by_instance(context, instance_uuid, columns_to_join=None):
"""Get consoles for a given instance."""
return IMPL.console_get_all_by_instance(context, instance_uuid,
columns_to_join)
def console_get(context, console_id, instance_uuid=None):
"""Get a specific console (possibly on a given instance)."""
return IMPL.console_get(context, console_id, instance_uuid)
##################
def flavor_create(context, values, projects=None):
"""Create a new instance type."""
return IMPL.flavor_create(context, values, projects=projects)
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
"""Get all instance flavors."""
return IMPL.flavor_get_all(
context, inactive=inactive, filters=filters, sort_key=sort_key,
sort_dir=sort_dir, limit=limit, marker=marker)
def flavor_get(context, id):
"""Get instance type by id."""
return IMPL.flavor_get(context, id)
def flavor_get_by_name(context, name):
"""Get instance type by name."""
return IMPL.flavor_get_by_name(context, name)
def flavor_get_by_flavor_id(context, id, read_deleted=None):
"""Get instance type by flavor id."""
return IMPL.flavor_get_by_flavor_id(context, id, read_deleted)
def flavor_destroy(context, name):
"""Delete an instance type."""
return IMPL.flavor_destroy(context, name)
def flavor_access_get_by_flavor_id(context, flavor_id):
"""Get flavor access by flavor id."""
return IMPL.flavor_access_get_by_flavor_id(context, flavor_id)
def flavor_access_add(context, flavor_id, project_id):
"""Add flavor access for project."""
return IMPL.flavor_access_add(context, flavor_id, project_id)
def flavor_access_remove(context, flavor_id, project_id):
"""Remove flavor access for project."""
return IMPL.flavor_access_remove(context, flavor_id, project_id)
def flavor_extra_specs_get(context, flavor_id):
"""Get all extra specs for an instance type."""
return IMPL.flavor_extra_specs_get(context, flavor_id)
def flavor_extra_specs_delete(context, flavor_id, key):
"""Delete the given extra specs item."""
IMPL.flavor_extra_specs_delete(context, flavor_id, key)
def flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs):
"""Create or update instance type extra specs.
This adds or modifies the key/value pairs specified in the
extra specs dict argument
"""
IMPL.flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs)
####################
def pci_device_get_by_addr(context, node_id, dev_addr):
"""Get PCI device by address."""
return IMPL.pci_device_get_by_addr(context, node_id, dev_addr)
def pci_device_get_by_id(context, id):
"""Get PCI device by id."""
return IMPL.pci_device_get_by_id(context, id)
def pci_device_get_all_by_node(context, node_id):
"""Get all PCI devices for one host."""
return IMPL.pci_device_get_all_by_node(context, node_id)
def pci_device_get_all_by_instance_uuid(context, instance_uuid):
"""Get PCI devices allocated to instance."""
return IMPL.pci_device_get_all_by_instance_uuid(context, instance_uuid)
def pci_device_get_all_by_parent_addr(context, node_id, parent_addr):
"""Get all PCI devices by parent address."""
return IMPL.pci_device_get_all_by_parent_addr(context, node_id,
parent_addr)
def pci_device_destroy(context, node_id, address):
"""Delete a PCI device record."""
return IMPL.pci_device_destroy(context, node_id, address)
def pci_device_update(context, node_id, address, value):
"""Update a pci device."""
return IMPL.pci_device_update(context, node_id, address, value)
###################
def cell_create(context, values):
"""Create a new child Cell entry."""
return IMPL.cell_create(context, values)
def cell_update(context, cell_name, values):
"""Update a child Cell entry."""
return IMPL.cell_update(context, cell_name, values)
def cell_delete(context, cell_name):
"""Delete a child Cell."""
return IMPL.cell_delete(context, cell_name)
def cell_get(context, cell_name):
"""Get a specific child Cell."""
return IMPL.cell_get(context, cell_name)
def cell_get_all(context):
"""Get all child Cells."""
return IMPL.cell_get_all(context)
####################
def instance_metadata_get(context, instance_uuid):
"""Get all metadata for an instance."""
return IMPL.instance_metadata_get(context, instance_uuid)
def instance_metadata_delete(context, instance_uuid, key):
"""Delete the given metadata item."""
IMPL.instance_metadata_delete(context, instance_uuid, key)
def instance_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
return IMPL.instance_metadata_update(context, instance_uuid,
metadata, delete)
####################
def instance_system_metadata_get(context, instance_uuid):
"""Get all system metadata for an instance."""
return IMPL.instance_system_metadata_get(context, instance_uuid)
def instance_system_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.instance_system_metadata_update(
context, instance_uuid, metadata, delete)
####################
def agent_build_create(context, values):
"""Create a new agent build entry."""
return IMPL.agent_build_create(context, values)
def agent_build_get_by_triple(context, hypervisor, os, architecture):
"""Get agent build by hypervisor/OS/architecture triple."""
return IMPL.agent_build_get_by_triple(context, hypervisor, os,
architecture)
def agent_build_get_all(context, hypervisor=None):
"""Get all agent builds."""
return IMPL.agent_build_get_all(context, hypervisor)
def agent_build_destroy(context, agent_update_id):
"""Destroy agent build entry."""
IMPL.agent_build_destroy(context, agent_update_id)
def agent_build_update(context, agent_build_id, values):
"""Update agent build entry."""
IMPL.agent_build_update(context, agent_build_id, values)
####################
def bw_usage_get(context, uuid, start_period, mac):
"""Return bw usage for instance and mac in a given audit period."""
return IMPL.bw_usage_get(context, uuid, start_period, mac)
def bw_usage_get_by_uuids(context, uuids, start_period):
"""Return bw usages for instance(s) in a given audit period."""
return IMPL.bw_usage_get_by_uuids(context, uuids, start_period)
def bw_usage_update(context, uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed=None,
update_cells=True):
"""Update cached bandwidth usage for an instance's network based on mac
address. Creates new record if needed.
"""
rv = IMPL.bw_usage_update(context, uuid, mac, start_period, bw_in,
bw_out, last_ctr_in, last_ctr_out, last_refreshed=last_refreshed)
if update_cells:
try:
cells_rpcapi.CellsAPI().bw_usage_update_at_top(context,
uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed)
except Exception:
LOG.exception(_LE("Failed to notify cells of bw_usage update"))
return rv
###################
def vol_get_usage_by_time(context, begin):
"""Return volumes usage that have been updated after a specified time."""
return IMPL.vol_get_usage_by_time(context, begin)
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes,
instance_id, project_id, user_id, availability_zone,
update_totals=False):
"""Update cached volume usage for a volume
Creates new record if needed.
"""
return IMPL.vol_usage_update(context, id, rd_req, rd_bytes, wr_req,
wr_bytes, instance_id, project_id, user_id,
availability_zone,
update_totals=update_totals)
###################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id."""
return IMPL.s3_image_get(context, image_id)
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid."""
return IMPL.s3_image_get_by_uuid(context, image_uuid)
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid."""
return IMPL.s3_image_create(context, image_uuid)
####################
def aggregate_create(context, values, metadata=None):
"""Create a new aggregate with metadata."""
return IMPL.aggregate_create(context, values, metadata)
def aggregate_get(context, aggregate_id):
"""Get a specific aggregate by id."""
return IMPL.aggregate_get(context, aggregate_id)
def aggregate_get_by_host(context, host, key=None):
"""Get a list of aggregates that host belongs to."""
return IMPL.aggregate_get_by_host(context, host, key)
def aggregate_metadata_get_by_host(context, host, key=None):
"""Get metadata for all aggregates that host belongs to.
Returns a dictionary where each value is a set, this is to cover the case
where there two aggregates have different values for the same key.
Optional key filter
"""
return IMPL.aggregate_metadata_get_by_host(context, host, key)
def aggregate_get_by_metadata_key(context, key):
return IMPL.aggregate_get_by_metadata_key(context, key)
def aggregate_update(context, aggregate_id, values):
"""Update the attributes of an aggregates.
If values contains a metadata key, it updates the aggregate metadata too.
"""
return IMPL.aggregate_update(context, aggregate_id, values)
def aggregate_delete(context, aggregate_id):
"""Delete an aggregate."""
return IMPL.aggregate_delete(context, aggregate_id)
def aggregate_get_all(context):
"""Get all aggregates."""
return IMPL.aggregate_get_all(context)
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
"""Add/update metadata. If set_delete=True, it adds only."""
IMPL.aggregate_metadata_add(context, aggregate_id, metadata, set_delete)
def aggregate_metadata_get(context, aggregate_id):
"""Get metadata for the specified aggregate."""
return IMPL.aggregate_metadata_get(context, aggregate_id)
def aggregate_metadata_delete(context, aggregate_id, key):
"""Delete the given metadata key."""
IMPL.aggregate_metadata_delete(context, aggregate_id, key)
def aggregate_host_add(context, aggregate_id, host):
"""Add host to the aggregate."""
IMPL.aggregate_host_add(context, aggregate_id, host)
def aggregate_host_get_all(context, aggregate_id):
"""Get hosts for the specified aggregate."""
return IMPL.aggregate_host_get_all(context, aggregate_id)
def aggregate_host_delete(context, aggregate_id, host):
"""Delete the given host from the aggregate."""
IMPL.aggregate_host_delete(context, aggregate_id, host)
####################
def instance_fault_create(context, values):
"""Create a new Instance Fault."""
return IMPL.instance_fault_create(context, values)
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
return IMPL.instance_fault_get_by_instance_uuids(context, instance_uuids)
####################
def action_start(context, values):
"""Start an action for an instance."""
return IMPL.action_start(context, values)
def action_finish(context, values):
"""Finish an action for an instance."""
return IMPL.action_finish(context, values)
def actions_get(context, uuid):
"""Get all instance actions for the provided instance."""
return IMPL.actions_get(context, uuid)
def action_get_by_request_id(context, uuid, request_id):
"""Get the action by request_id and given instance."""
return IMPL.action_get_by_request_id(context, uuid, request_id)
def action_event_start(context, values):
"""Start an event on an instance action."""
return IMPL.action_event_start(context, values)
def action_event_finish(context, values):
"""Finish an event on an instance action."""
return IMPL.action_event_finish(context, values)
def action_events_get(context, action_id):
"""Get the events by action id."""
return IMPL.action_events_get(context, action_id)
def action_event_get_by_id(context, action_id, event_id):
return IMPL.action_event_get_by_id(context, action_id, event_id)
####################
def get_instance_uuid_by_ec2_id(context, ec2_id):
"""Get uuid through ec2 id from instance_id_mappings table."""
return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
def ec2_instance_create(context, instance_uuid, id=None):
"""Create the ec2 id to instance uuid mapping on demand."""
return IMPL.ec2_instance_create(context, instance_uuid, id)
def ec2_instance_get_by_uuid(context, instance_uuid):
return IMPL.ec2_instance_get_by_uuid(context, instance_uuid)
def ec2_instance_get_by_id(context, instance_id):
return IMPL.ec2_instance_get_by_id(context, instance_id)
####################
def task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message=None):
"""Mark a task as complete for a given host/time period."""
return IMPL.task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message)
def task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items=None,
message=None):
"""Mark a task as started for a given host/time period."""
return IMPL.task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items,
message)
def task_log_get_all(context, task_name, period_beginning,
period_ending, host=None, state=None):
return IMPL.task_log_get_all(context, task_name, period_beginning,
period_ending, host, state)
def task_log_get(context, task_name, period_beginning,
period_ending, host, state=None):
return IMPL.task_log_get(context, task_name, period_beginning,
period_ending, host, state)
####################
def archive_deleted_rows(max_rows=None):
"""Move up to max_rows rows from production tables to corresponding shadow
tables.
:returns: dict that maps table name to number of rows archived from that
table, for example:
::
{
'instances': 5,
'block_device_mapping': 5,
'pci_devices': 2,
}
"""
return IMPL.archive_deleted_rows(max_rows=max_rows)
def pcidevice_online_data_migration(context, max_count):
return IMPL.pcidevice_online_data_migration(context, max_count)
def aggregate_uuids_online_data_migration(context, max_count):
return IMPL.aggregate_uuids_online_data_migration(context, max_count)
def computenode_uuids_online_data_migration(context, max_count):
return IMPL.computenode_uuids_online_data_migration(context, max_count)
####################
def instance_tag_add(context, instance_uuid, tag):
"""Add tag to the instance."""
return IMPL.instance_tag_add(context, instance_uuid, tag)
def instance_tag_set(context, instance_uuid, tags):
"""Replace all of the instance tags with specified list of tags."""
return IMPL.instance_tag_set(context, instance_uuid, tags)
def instance_tag_get_by_instance_uuid(context, instance_uuid):
"""Get all tags for a given instance."""
return IMPL.instance_tag_get_by_instance_uuid(context, instance_uuid)
def instance_tag_delete(context, instance_uuid, tag):
"""Delete specified tag from the instance."""
return IMPL.instance_tag_delete(context, instance_uuid, tag)
def instance_tag_delete_all(context, instance_uuid):
"""Delete all tags from the instance."""
return IMPL.instance_tag_delete_all(context, instance_uuid)
def instance_tag_exists(context, instance_uuid, tag):
"""Check if specified tag exist on the instance."""
return IMPL.instance_tag_exists(context, instance_uuid, tag)
| apache-2.0 | 232,568,456,855,932,960 | 32.457603 | 79 | 0.656487 | false | 3.892906 | false | false | false |
pchaigno/grr | lib/queue_manager.py | 2 | 31207 | #!/usr/bin/env python
"""This is the manager for the various queues."""
import os
import random
import socket
import time
import logging
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import stats
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
class Error(Exception):
"""Base class for errors in this module."""
class MoreDataException(Error):
"""Raised when there is more data available."""
class QueueManager(object):
"""This class manages the representation of the flow within the data store.
The workflow for client task scheduling is as follows:
1) Create a bunch of tasks (rdf_flows.GrrMessage()). Tasks must
be assigned to queues and contain arbitrary values.
2) Call QueueManager.Schedule(task) to add the tasks to their queues.
3) In another thread, call QueueManager.QueryAndOwn(queue) to
obtain a list of tasks leased for a particular time.
4) If the lease time expires, the tasks automatically become
available for consumption. When done with the task we can remove it
from the scheduler using QueueManager.Delete(tasks).
5) Tasks can be re-leased by calling QueueManager.Schedule(task)
repeatedly. Each call will extend the lease by the specified amount.
Important QueueManager's feature is the ability to freeze the timestamp used
for time-limiting Resolve and Delete queries to the datastore. "with"
statement should be used to freeze the timestamp, like:
with queue_manager.QueueManager(token=self.token) as manager:
...
Another option is to use FreezeTimestamp()/UnfreezeTimestamp() methods:
queue_manager.FreezeTimestamp()
...
queue_manager.UnfreezeTimestamp()
"""
# These attributes are related to a flow's internal data structures Requests
# are protobufs of type RequestState. They have a constant prefix followed by
# the request number:
FLOW_REQUEST_PREFIX = "flow:request:"
FLOW_REQUEST_TEMPLATE = FLOW_REQUEST_PREFIX + "%08X"
# When a status message is received from the client, we write it with the
# request using the following template.
FLOW_STATUS_TEMPLATE = "flow:status:%08X"
FLOW_STATUS_REGEX = "flow:status:.*"
# This regex will return all the requests in order
FLOW_REQUEST_REGEX = FLOW_REQUEST_PREFIX + ".*"
# Each request may have any number of responses. Responses are kept in their
# own subject object. The subject name is derived from the session id.
FLOW_RESPONSE_PREFIX = "flow:response:%08X:"
FLOW_RESPONSE_TEMPLATE = FLOW_RESPONSE_PREFIX + "%08X"
# This regex will return all the responses in order
FLOW_RESPONSE_REGEX = "flow:response:.*"
TASK_PREDICATE_PREFIX = "task:%s"
NOTIFY_PREDICATE_PREFIX = "notify:%s"
STUCK_PRIORITY = "Flow stuck"
request_limit = 1000000
response_limit = 1000000
notification_shard_counters = {}
def __init__(self, store=None, sync=True, token=None):
self.sync = sync
self.token = token
if store is None:
store = data_store.DB
self.data_store = store
# We cache all these and write/delete in one operation.
self.to_write = {}
self.to_delete = {}
# A queue of client messages to remove. Keys are client ids, values are
# lists of task ids.
self.client_messages_to_delete = {}
self.new_client_messages = []
self.notifications = []
self.prev_frozen_timestamps = []
self.frozen_timestamp = None
self.num_notification_shards = config_lib.CONFIG["Worker.queue_shards"]
def GetNotificationShard(self, queue):
queue_name = str(queue)
QueueManager.notification_shard_counters.setdefault(queue_name, 0)
QueueManager.notification_shard_counters[queue_name] += 1
notification_shard_index = (
QueueManager.notification_shard_counters[queue_name] %
self.num_notification_shards)
if notification_shard_index > 0:
return queue.Add(str(notification_shard_index))
else:
return queue
def GetAllNotificationShards(self, queue):
result = [queue]
for i in range(1, self.num_notification_shards):
result.append(queue.Add(str(i)))
return result
def Copy(self):
"""Return a copy of the queue manager.
Returns:
Copy of the QueueManager object.
NOTE: pending writes/deletions are not copied. On the other hand, if the
original object has a frozen timestamp, a copy will have it as well.
"""
result = QueueManager(store=self.data_store, sync=self.sync,
token=self.token)
result.prev_frozen_timestamps = self.prev_frozen_timestamps
result.frozen_timestamp = self.frozen_timestamp
return result
def FreezeTimestamp(self):
"""Freezes the timestamp used for resolve/delete database queries.
Frozen timestamp is used to consistently limit the datastore resolve and
delete queries by time range: from 0 to self.frozen_timestamp. This is
done to avoid possible race conditions, like accidentally deleting
notifications that were written by another process while we were
processing requests.
"""
self.prev_frozen_timestamps.append(self.frozen_timestamp)
self.frozen_timestamp = rdfvalue.RDFDatetime().Now()
def UnfreezeTimestamp(self):
"""Unfreezes the timestamp used for resolve/delete database queries."""
if not self.prev_frozen_timestamps:
raise RuntimeError("Unbalanced UnfreezeTimestamp call.")
self.frozen_timestamp = self.prev_frozen_timestamps.pop()
def __enter__(self):
"""Supports 'with' protocol."""
self.FreezeTimestamp()
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
"""Supports 'with' protocol."""
self.UnfreezeTimestamp()
self.Flush()
def GetFlowResponseSubject(self, session_id, request_id):
"""The subject used to carry all the responses for a specific request_id."""
return session_id.Add("state/request:%08X" % request_id)
def DeQueueClientRequest(self, client_id, task_id):
"""Remove the message from the client queue that this request forms."""
# Check this request was actually bound for a client.
if client_id:
client_id = rdf_client.ClientURN(client_id)
self.client_messages_to_delete.setdefault(client_id, []).append(task_id)
def MultiCheckStatus(self, messages):
"""Checks if there is a client status queued for a number of requests."""
subjects = [m.session_id.Add("state") for m in messages]
statuses_found = {}
for subject, result in self.data_store.MultiResolveRegex(
subjects, self.FLOW_STATUS_REGEX,
token=self.token):
for predicate, _, _ in result:
request_nr = int(predicate.split(":")[-1], 16)
statuses_found.setdefault(subject, set()).add(request_nr)
status_available = set()
for m in messages:
if m.request_id in statuses_found.get(m.session_id.Add("state"), set()):
status_available.add(m)
return status_available
def FetchCompletedRequests(self, session_id, timestamp=None):
"""Fetch all the requests with a status message queued for them."""
subject = session_id.Add("state")
requests = {}
status = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
for predicate, serialized, _ in self.data_store.ResolveRegex(
subject, [self.FLOW_REQUEST_REGEX, self.FLOW_STATUS_REGEX],
token=self.token, limit=self.request_limit, timestamp=timestamp):
parts = predicate.split(":", 3)
request_id = parts[2]
if parts[1] == "status":
status[request_id] = serialized
else:
requests[request_id] = serialized
for request_id, serialized in sorted(requests.items()):
if request_id in status:
yield (rdf_flows.RequestState(serialized),
rdf_flows.GrrMessage(status[request_id]))
def FetchCompletedResponses(self, session_id, timestamp=None, limit=10000):
"""Fetch only completed requests and responses up to a limit."""
response_subjects = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
total_size = 0
for request, status in self.FetchCompletedRequests(
session_id, timestamp=timestamp):
# Make sure at least one response is fetched.
response_subject = self.GetFlowResponseSubject(session_id, request.id)
response_subjects[response_subject] = request
# Quit if there are too many responses.
total_size += status.response_id
if total_size > limit:
break
response_data = dict(self.data_store.MultiResolveRegex(
response_subjects, self.FLOW_RESPONSE_REGEX, token=self.token,
timestamp=timestamp))
for response_urn, request in sorted(response_subjects.items()):
responses = []
for _, serialized, _ in response_data.get(response_urn, []):
responses.append(rdf_flows.GrrMessage(serialized))
yield (request, sorted(responses, key=lambda msg: msg.response_id))
# Indicate to the caller that there are more messages.
if total_size > limit:
raise MoreDataException()
def FetchRequestsAndResponses(self, session_id, timestamp=None):
"""Fetches all outstanding requests and responses for this flow.
We first cache all requests and responses for this flow in memory to
prevent round trips.
Args:
session_id: The session_id to get the requests/responses for.
timestamp: Tupe (start, end) with a time range. Fetched requests and
responses will have timestamp in this range.
Yields:
an tuple (request protobufs, list of responses messages) in ascending
order of request ids.
Raises:
MoreDataException: When there is more data available than read by the
limited query.
"""
subject = session_id.Add("state")
requests = {}
if timestamp is None:
timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())
# Get some requests.
for predicate, serialized, _ in self.data_store.ResolveRegex(
subject, self.FLOW_REQUEST_REGEX, token=self.token,
limit=self.request_limit, timestamp=timestamp):
request_id = predicate.split(":", 1)[1]
requests[str(subject.Add(request_id))] = serialized
# And the responses for them.
response_data = dict(self.data_store.MultiResolveRegex(
requests.keys(), self.FLOW_RESPONSE_REGEX,
limit=self.response_limit, token=self.token,
timestamp=timestamp))
for urn, request_data in sorted(requests.items()):
request = rdf_flows.RequestState(request_data)
responses = []
for _, serialized, _ in response_data.get(urn, []):
responses.append(rdf_flows.GrrMessage(serialized))
yield (request, sorted(responses, key=lambda msg: msg.response_id))
if len(requests) >= self.request_limit:
raise MoreDataException()
def DeleteFlowRequestStates(self, session_id, request_state):
"""Deletes the request and all its responses from the flow state queue."""
queue = self.to_delete.setdefault(session_id.Add("state"), [])
queue.append(self.FLOW_REQUEST_TEMPLATE % request_state.id)
queue.append(self.FLOW_STATUS_TEMPLATE % request_state.id)
if request_state and request_state.HasField("request"):
self.DeQueueClientRequest(request_state.client_id,
request_state.request.task_id)
# Efficiently drop all responses to this request.
response_subject = self.GetFlowResponseSubject(session_id, request_state.id)
self.data_store.DeleteSubject(response_subject, token=self.token)
def DestroyFlowStates(self, session_id):
"""Deletes all states in this flow and dequeues all client messages."""
self.MultiDestroyFlowStates([session_id])
def MultiDestroyFlowStates(self, session_ids):
"""Deletes all states in multiple flows and dequeues all client messages."""
subjects = [session_id.Add("state") for session_id in session_ids]
to_delete = []
for subject, values in self.data_store.MultiResolveRegex(
subjects, self.FLOW_REQUEST_REGEX, token=self.token,
limit=self.request_limit):
for _, serialized, _ in values:
request = rdf_flows.RequestState(serialized)
# Drop all responses to this request.
response_subject = self.GetFlowResponseSubject(request.session_id,
request.id)
to_delete.append(response_subject)
if request.HasField("request"):
# Client request dequeueing is cached so we can call it directly.
self.DeQueueClientRequest(request.client_id, request.request.task_id)
# Mark the request itself for deletion.
to_delete.append(subject)
# Drop them all at once.
self.data_store.DeleteSubjects(to_delete, token=self.token)
def Flush(self):
"""Writes the changes in this object to the datastore."""
session_ids = set(self.to_write) | set(self.to_delete)
for session_id in session_ids:
try:
self.data_store.MultiSet(session_id, self.to_write.get(session_id, {}),
to_delete=self.to_delete.get(session_id, []),
sync=False, token=self.token)
except data_store.Error:
pass
for client_id, messages in self.client_messages_to_delete.iteritems():
self.Delete(client_id.Queue(), messages)
if self.new_client_messages:
for timestamp, messages in utils.GroupBy(
self.new_client_messages, lambda x: x[1]).iteritems():
self.Schedule([x[0] for x in messages], timestamp=timestamp)
# We need to make sure that notifications are written after the requests so
# we flush here and only notify afterwards.
if self.sync and session_ids:
self.data_store.Flush()
for notification, timestamp in self.notifications:
self.NotifyQueue(notification, timestamp=timestamp, sync=False)
if self.sync:
self.data_store.Flush()
self.to_write = {}
self.to_delete = {}
self.client_messages_to_delete = {}
self.notifications = []
self.new_client_messages = []
def QueueResponse(self, session_id, response, timestamp=None):
"""Queues the message on the flow's state."""
if timestamp is None:
timestamp = self.frozen_timestamp
# Status messages cause their requests to be marked as complete. This allows
# us to quickly enumerate all the completed requests - it is essentially an
# index for completed requests.
if response.type == rdf_flows.GrrMessage.Type.STATUS:
subject = session_id.Add("state")
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
self.FLOW_STATUS_TEMPLATE % response.request_id, []).append((
response.SerializeToString(), timestamp))
subject = self.GetFlowResponseSubject(session_id, response.request_id)
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
QueueManager.FLOW_RESPONSE_TEMPLATE % (
response.request_id, response.response_id),
[]).append((response.SerializeToString(), timestamp))
def QueueRequest(self, session_id, request_state, timestamp=None):
if timestamp is None:
timestamp = self.frozen_timestamp
subject = session_id.Add("state")
queue = self.to_write.setdefault(subject, {})
queue.setdefault(
self.FLOW_REQUEST_TEMPLATE % request_state.id, []).append(
(request_state.SerializeToString(), timestamp))
def QueueClientMessage(self, msg, timestamp=None):
if timestamp is None:
timestamp = self.frozen_timestamp
self.new_client_messages.append((msg, timestamp))
def QueueNotification(self, notification=None, timestamp=None, **kw):
"""Queues a notification for a flow."""
if notification is None:
notification = rdf_flows.GrrNotification(**kw)
if notification.session_id:
if timestamp is None:
timestamp = self.frozen_timestamp
self.notifications.append((notification, timestamp))
def _TaskIdToColumn(self, task_id):
"""Return a predicate representing this task."""
return self.TASK_PREDICATE_PREFIX % ("%08d" % task_id)
def Delete(self, queue, tasks):
"""Removes the tasks from the queue.
Note that tasks can already have been removed. It is not an error
to re-delete an already deleted task.
Args:
queue: A queue to clear.
tasks: A list of tasks to remove. Tasks may be Task() instances
or integers representing the task_id.
"""
if queue:
predicates = []
for task in tasks:
try:
task_id = task.task_id
except AttributeError:
task_id = int(task)
predicates.append(self._TaskIdToColumn(task_id))
data_store.DB.DeleteAttributes(
queue, predicates, token=self.token, sync=False)
def Schedule(self, tasks, sync=False, timestamp=None):
"""Schedule a set of Task() instances."""
if timestamp is None:
timestamp = self.frozen_timestamp
for queue, queued_tasks in utils.GroupBy(
tasks, lambda x: x.queue).iteritems():
if queue:
to_schedule = dict(
[(self._TaskIdToColumn(task.task_id),
[task.SerializeToString()]) for task in queued_tasks])
self.data_store.MultiSet(
queue, to_schedule, timestamp=timestamp, sync=sync,
token=self.token)
def _SortByPriority(self, notifications, queue, output_dict=None):
"""Sort notifications by priority into output_dict."""
if not output_dict:
output_dict = {}
for notification in notifications:
priority = notification.priority
if notification.in_progress:
priority = self.STUCK_PRIORITY
output_dict.setdefault(priority, []).append(notification)
for priority in output_dict:
stats.STATS.SetGaugeValue("notification_queue_count",
len(output_dict[priority]),
fields=[queue.Basename(), str(priority)])
random.shuffle(output_dict[priority])
return output_dict
def GetNotificationsByPriority(self, queue):
"""Retrieves session ids for processing grouped by priority."""
# Check which sessions have new data.
# Read all the sessions that have notifications.
queue_shard = self.GetNotificationShard(queue)
return self._SortByPriority(
self._GetUnsortedNotifications(queue_shard).values(), queue)
def GetNotificationsByPriorityForAllShards(self, queue):
"""Same as GetNotificationsByPriority but for all shards.
Used by worker_test to cover all shards with a single worker.
Args:
queue: usually rdfvalue.RDFURN("aff4:/W")
Returns:
dict of notifications objects keyed by priority.
"""
output_dict = {}
for queue_shard in self.GetAllNotificationShards(queue):
output_dict = self._GetUnsortedNotifications(
queue_shard, notifications_by_session_id=output_dict)
output_dict = self._SortByPriority(output_dict.values(), queue)
return output_dict
def GetNotifications(self, queue):
"""Returns all queue notifications sorted by priority."""
queue_shard = self.GetNotificationShard(queue)
notifications = self._GetUnsortedNotifications(queue_shard).values()
notifications.sort(key=lambda notification: notification.priority,
reverse=True)
return notifications
def GetNotificationsForAllShards(self, queue):
"""Returns notifications for all shards of a queue at once.
Used by test_lib.MockWorker to cover all shards with a single worker.
Args:
queue: usually rdfvalue.RDFURN("aff4:/W")
Returns:
List of rdf_flows.GrrNotification objects
"""
notifications_by_session_id = {}
for queue_shard in self.GetAllNotificationShards(queue):
notifications_by_session_id = self._GetUnsortedNotifications(
queue_shard, notifications_by_session_id=notifications_by_session_id)
notifications = notifications_by_session_id.values()
notifications.sort(key=lambda notification: notification.priority,
reverse=True)
return notifications
def _GetUnsortedNotifications(self, queue_shard,
notifications_by_session_id=None):
"""Returns all the available notifications for a queue_shard.
Args:
queue_shard: urn of queue shard
notifications_by_session_id: store notifications in this dict rather than
creating a new one
Returns:
dict of notifications. keys are session ids.
"""
if not notifications_by_session_id:
notifications_by_session_id = {}
end_time = self.frozen_timestamp or rdfvalue.RDFDatetime().Now()
for predicate, serialized_notification, ts in data_store.DB.ResolveRegex(
queue_shard, self.NOTIFY_PREDICATE_PREFIX % ".*",
timestamp=(0, end_time),
token=self.token, limit=10000):
# Parse the notification.
try:
notification = rdf_flows.GrrNotification(serialized_notification)
except Exception: # pylint: disable=broad-except
logging.exception("Can't unserialize notification, deleting it: "
"predicate=%s, ts=%d", predicate, ts)
data_store.DB.DeleteAttributes(
queue_shard, [predicate], token=self.token,
# Make the time range narrow, but be sure to include the needed
# notification.
start=ts, end=ts, sync=True)
continue
# Strip the prefix from the predicate to get the session_id.
session_id = predicate[len(self.NOTIFY_PREDICATE_PREFIX % ""):]
notification.session_id = session_id
notification.timestamp = ts
existing = notifications_by_session_id.get(notification.session_id)
if existing:
# If we have a notification for this session_id already, we only store
# the one that was scheduled last.
if notification.first_queued > existing.first_queued:
notifications_by_session_id[notification.session_id] = notification
else:
notifications_by_session_id[notification.session_id] = notification
return notifications_by_session_id
def NotifyQueue(self, notification, **kwargs):
"""This signals that there are new messages available in a queue."""
self._MultiNotifyQueue(notification.session_id.Queue(), [notification],
**kwargs)
def MultiNotifyQueue(self, notifications, timestamp=None, sync=True):
"""This is the same as NotifyQueue but for several session_ids at once.
Args:
notifications: A list of notifications.
timestamp: An optional timestamp for this notification.
sync: If True, sync to the data_store immediately.
Raises:
RuntimeError: An invalid session_id was passed.
"""
extract_queue = lambda notification: notification.session_id.Queue()
for queue, notifications in utils.GroupBy(
notifications, extract_queue).iteritems():
self._MultiNotifyQueue(
queue, notifications, timestamp=timestamp, sync=sync)
def _MultiNotifyQueue(self, queue, notifications, timestamp=None, sync=True):
"""Does the actual queuing."""
serialized_notifications = {}
now = rdfvalue.RDFDatetime().Now()
expiry_time = config_lib.CONFIG["Worker.notification_expiry_time"]
for notification in notifications:
if not notification.first_queued:
notification.first_queued = (self.frozen_timestamp or
rdfvalue.RDFDatetime().Now())
else:
diff = now - notification.first_queued
if diff.seconds >= expiry_time:
# This notification has been around for too long, we drop it.
logging.debug("Dropping notification: %s", str(notification))
continue
session_id = notification.session_id
# Don't serialize session ids to save some bytes.
notification.session_id = None
notification.timestamp = None
serialized_notifications[session_id] = notification.SerializeToString()
data_store.DB.MultiSet(
self.GetNotificationShard(queue),
dict([(self.NOTIFY_PREDICATE_PREFIX % session_id,
[(data, timestamp)])
for session_id, data in serialized_notifications.iteritems()]),
sync=sync, replace=False, token=self.token)
def DeleteNotification(self, session_id, start=None, end=None):
"""This deletes the notification when all messages have been processed."""
if not isinstance(session_id, rdfvalue.SessionID):
raise RuntimeError(
"Can only delete notifications for rdfvalue.SessionIDs.")
if start is None:
start = 0
else:
start = int(start)
if end is None:
end = self.frozen_timestamp or rdfvalue.RDFDatetime().Now()
for queue_shard in self.GetAllNotificationShards(session_id.Queue()):
data_store.DB.DeleteAttributes(
queue_shard, [self.NOTIFY_PREDICATE_PREFIX % session_id],
token=self.token, start=start, end=end, sync=True)
def Query(self, queue, limit=1, task_id=None):
"""Retrieves tasks from a queue without leasing them.
This is good for a read only snapshot of the tasks.
Args:
queue: The task queue that this task belongs to, usually client.Queue()
where client is the ClientURN object you want to schedule msgs on.
limit: Number of values to fetch.
task_id: If an id is provided we only query for this id.
Returns:
A list of Task() objects.
"""
# This function is usually used for manual testing so we also accept client
# ids and get the queue from it.
if isinstance(queue, rdf_client.ClientURN):
queue = queue.Queue()
if task_id is None:
regex = self.TASK_PREDICATE_PREFIX % ".*"
else:
regex = utils.SmartStr(task_id)
all_tasks = []
for _, serialized, ts in self.data_store.ResolveRegex(
queue, regex, timestamp=self.data_store.ALL_TIMESTAMPS,
token=self.token):
task = rdf_flows.GrrMessage(serialized)
task.eta = ts
all_tasks.append(task)
# Sort the tasks in order of priority.
all_tasks.sort(key=lambda task: task.priority, reverse=True)
return all_tasks[:limit]
def DropQueue(self, queue):
"""Deletes a queue - all tasks will be lost."""
data_store.DB.DeleteSubject(queue, token=self.token)
def QueryAndOwn(self, queue, lease_seconds=10, limit=1):
"""Returns a list of Tasks leased for a certain time.
Args:
queue: The queue to query from.
lease_seconds: The tasks will be leased for this long.
limit: Number of values to fetch.
Returns:
A list of GrrMessage() objects leased.
"""
user = ""
if self.token:
user = self.token.username
# Do the real work in a transaction
try:
res = self.data_store.RetryWrapper(
queue, self._QueryAndOwn, lease_seconds=lease_seconds, limit=limit,
token=self.token, user=user)
return res
except data_store.TransactionError:
# This exception just means that we could not obtain the lock on the queue
# so we just return an empty list, let the worker sleep and come back to
# fetch more tasks.
return []
except data_store.Error as e:
logging.warning("Datastore exception: %s", e)
return []
def _QueryAndOwn(self, transaction, lease_seconds=100,
limit=1, user=""):
"""Does the real work of self.QueryAndOwn()."""
tasks = []
lease = long(lease_seconds * 1e6)
ttl_exceeded_count = 0
# Only grab attributes with timestamps in the past.
for predicate, task, timestamp in transaction.ResolveRegex(
self.TASK_PREDICATE_PREFIX % ".*",
timestamp=(0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now())):
task = rdf_flows.GrrMessage(task)
task.eta = timestamp
task.last_lease = "%s@%s:%d" % (user,
socket.gethostname(),
os.getpid())
# Decrement the ttl
task.task_ttl -= 1
if task.task_ttl <= 0:
# Remove the task if ttl is exhausted.
transaction.DeleteAttribute(predicate)
ttl_exceeded_count += 1
stats.STATS.IncrementCounter("grr_task_ttl_expired_count")
else:
if task.task_ttl != rdf_flows.GrrMessage.max_ttl - 1:
stats.STATS.IncrementCounter("grr_task_retransmission_count")
# Update the timestamp on the value to be in the future
transaction.Set(predicate, task.SerializeToString(), replace=True,
timestamp=long(time.time() * 1e6) + lease)
tasks.append(task)
if len(tasks) >= limit:
break
if ttl_exceeded_count:
logging.info("TTL exceeded for %d messages on queue %s",
ttl_exceeded_count, transaction.subject)
return tasks
class WellKnownQueueManager(QueueManager):
"""A flow manager for well known flows."""
response_limit = 10000
def DeleteWellKnownFlowResponses(self, session_id, responses):
"""Deletes given responses from the flow state queue."""
subject = session_id.Add("state/request:00000000")
predicates = []
for response in responses:
predicates.append(QueueManager.FLOW_RESPONSE_TEMPLATE % (
response.request_id, response.response_id))
data_store.DB.DeleteAttributes(
subject, predicates, sync=True, start=0, token=self.token)
def FetchRequestsAndResponses(self, session_id):
"""Well known flows do not have real requests.
This manages retrieving all the responses without requiring corresponding
requests.
Args:
session_id: The session_id to get the requests/responses for.
Yields:
A tuple of request (None) and responses.
"""
subject = session_id.Add("state/request:00000000")
# Get some requests
for _, serialized, _ in sorted(self.data_store.ResolveRegex(
subject, self.FLOW_RESPONSE_REGEX, token=self.token,
limit=self.response_limit,
timestamp=(0, self.frozen_timestamp or rdfvalue.RDFDatetime().Now()))):
# The predicate format is flow:response:REQUEST_ID:RESPONSE_ID. For well
# known flows both request_id and response_id are randomized.
response = rdf_flows.GrrMessage(serialized)
yield rdf_flows.RequestState(id=0), [response]
class QueueManagerInit(registry.InitHook):
"""Registers vars used by the QueueManager."""
pre = ["StatsInit"]
def Run(self):
# Counters used by the QueueManager.
stats.STATS.RegisterCounterMetric("grr_task_retransmission_count")
stats.STATS.RegisterCounterMetric("grr_task_ttl_expired_count")
stats.STATS.RegisterGaugeMetric("notification_queue_count", int,
fields=[("queue_name", str),
("priority", str)])
| apache-2.0 | -6,282,211,440,012,330,000 | 35.714118 | 80 | 0.673118 | false | 4.130642 | false | false | false |
OpServ-Monitoring/opserv-backend | test/gathering_tests/test_measuring.py | 1 | 5682 | import logging
import sys
import time
import pytest
from test_general import start_gather_thread, mock_db_open
import misc.queue_manager as queue_manager
from misc.constants import implemented_hardware, HARDWARE_DEFAULTS, SYSTEM_METRICS_TO_COMPS
DATA_TEST_TIMEOUT = 5
log = logging.getLogger("opserv.test")
log.setLevel(logging.DEBUG)
# Test system gathering more specifically, e.g. check results for fitting structure
def test_system_gathering():
mock_db_open()
with start_gather_thread() as t:
pass
return
def test_all_components():
'''
Tests all components, that don't require an argument
'''
mock_db_open()
with start_gather_thread() as t:
check_all_hardware()
return
def test_gathering_delete():
'''
Sets a gathering rate and then deletes it
'''
test_comp = "cpu"
test_metric = "usage"
mock_db_open()
with start_gather_thread() as t:
queue_manager.set_gathering_rate(test_comp, test_metric, 500)
time.sleep(1)
queue_manager.set_gathering_rate(test_comp, test_metric, 0)
# Add some extra sleep to ensure no function is still inserting data into the queue
time.sleep(0.5)
# Empty the whole queue
while queue_manager.read_measurement_from_queue(test_comp, test_metric) is not None:
pass
time.sleep(2)
# Queue Should still be empty
assert queue_manager.real_time_queue_empty(test_comp, test_metric)
return
def check_all_hardware():
'''
Sends a data request to the gathering backend and immediately checks for response
The response will be timedout after a certain period of time
'''
# For each hardware in the hardware list
for hw in implemented_hardware:
# For Each metric of that specific hardware
for met in implemented_hardware[hw]:
if HARDWARE_DEFAULTS[hw][0] and HARDWARE_DEFAULTS[hw][1] is not None:
queue_manager.request_data_queue.put({"component": hw, "metric": met,
"args": HARDWARE_DEFAULTS[hw][1]})
queue_manager.get_queue(hw, met,
HARDWARE_DEFAULTS[hw][1]).get(timeout=DATA_TEST_TIMEOUT)
elif not HARDWARE_DEFAULTS[hw][0]:
queue_manager.request_data_queue.put({"component": hw, "metric": met})
queue_manager.get_queue(hw, met).get(timeout=DATA_TEST_TIMEOUT)
# Check that system gathering is always a list
def test_system_is_list():
'''
Test that the system gathering data is always list type
'''
mock_db_open()
with start_gather_thread() as t:
for metric in implemented_hardware["system"]:
queue_manager.request_data("system", metric)
return_type = type(queue_manager.read_measurement_from_queue("system", metric,
blocking=True)["value"])
assert return_type == type(list())
return
@pytest.mark.skipif(sys.platform != 'win32',
reason="does not run on windows")
def test_ohm():
from gathering.measuring.ohm_source import OHMSource
ohm = OHMSource()
if ohm.can_measure("cpu", "temperature"):
newTemp = ohm.get_measurement("cpu", "temperature", "0")
ohm.deinit()
def test_advanced_all_components():
'''
Similar test to test all components, but here all the arguments are gathered aswell
and are used to really test all the available hardware
'''
# Get available args for each componennt
# RequestData for each comp/arg/metric only do one process
# Wait for a queue entry for each combo
SYSTEM_DATA_TIMEOUT = 6
mock_db_open()
with start_gather_thread() as t:
available_args = {}
for component in implemented_hardware["system"]:
queue_manager.request_data("system", component)
new_args = queue_manager.read_measurement_from_queue("system", component,
None, True, SYSTEM_DATA_TIMEOUT)
available_args[SYSTEM_METRICS_TO_COMPS[component]] = new_args["value"]
# Specifically add memory
available_args["memory"] = [None]
# For each component in the system
for comp in available_args:
# For each possible argument in the
for i, arg in enumerate(available_args[comp]):
# Only check one process and only the third process in the list
if not (comp == "process" and i != 3):
for metric in implemented_hardware[comp]:
queue_manager.request_data(comp, metric, arg)
result = queue_manager.read_measurement_from_queue(comp, metric, arg, True,
SYSTEM_DATA_TIMEOUT)
log.info("result: %s", result)
def test_psutil_network():
'''
Tests the Psutil MeasuringSource directly
Currently only then network measures
'''
from gathering.measuring.psutil_source import PsUtilWrap
ps = PsUtilWrap()
all_netif = ps.get_measurement("system", "networks", None)
for netif in all_netif:
log.info(ps.get_measurement("network", "receivepersec", netif))
log.info(ps.get_measurement("network", "transmitpersec", netif))
log.info(ps.get_measurement("network", "info", netif))
time.sleep(0.5)
# Get System Data, and test everything ADVANCED
# Test measuring wrong metric, component or argument
| gpl-3.0 | -4,369,075,741,567,289,000 | 35.423077 | 99 | 0.611228 | false | 4.135371 | true | false | false |
ananth95/ananth95.github.io-simQuad | sim/simquad.py | 2 | 1069 | import matplotlib.pyplot as plt
import time
import numpy as np
import quadcopter
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.animation as animation
#don't try to understand these imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
# NP.ARRAYS are NOT Matrices. #
# Always print your array operation results to #
# check result with expected dimensionality and #
# values. #
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
#create the figure object, this will hold subplots, which will hold (multiple) axes.
fig = plt.figure()
#add an axis to first subplot (111) of the fig-object
ax = fig.add_subplot(111, projection='3d')
#set limits. Refer the todo.md for a cautionary nnote on limits
#IF LIMITS SET, THEN PAN AND ZOOM FAIL.
ax.set_xlim3d(-1.3,1.3)
ax.set_ylim3d(-1.3,1.3)
ax.set_zlim3d(-1.3,1.3)
quad = quadcopter.copter(ax, False) #false => NO tracking
#make the animation object
quad_anim = animation.FuncAnimation(fig, quad.update_ends, interval=15, blit=True)
plt.show() | gpl-2.0 | -6,147,418,515,710,489,000 | 35.896552 | 84 | 0.638915 | false | 3.382911 | false | false | false |
Naeka/cmsplugin-dailymotion | cmsplugin_dailymotion/models.py | 1 | 1767 | import os
import re
import urllib
from django.db import models
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
from cms.models import CMSPlugin
class DailymotionViewer(CMSPlugin):
"""
Plugin for embedding a Dailymotion video.
"""
video_src = models.URLField(_('video address'))
width = models.CharField(_('width'), max_length=6, default='480px', validators=[RegexValidator(r'\d+(px|\%)')], help_text=_('Width in pixels or percent'))
height = models.CharField(_('height'), max_length=6, default='270px', validators=[RegexValidator(r'\d+(px|\%)')], help_text=_('Height in pixels or percent'))
allow_fullscreen = models.BooleanField(_('allow fullscreen'), default=True)
start_at = models.PositiveIntegerField(_('start at'), blank=True, null=True, help_text=_('Start delay in seconds'))
auto_start = models.BooleanField(_('auto start'), blank=True, default=False)
@property
def src(self):
kwargs = dict()
if self.start_at:
kwargs['start'] = self.start_at
if self.auto_start:
kwargs['autoPlay'] = 1
base_url = self.get_base_url()
return '{0}{1}'.format(base_url, '?{0}'.format(urllib.urlencode(kwargs)) if kwargs else '')
def get_base_url(self):
short = re.findall(r'://dai\.ly/([a-zA-Z0-9]+)', self.video_src)
if short:
return 'http://www.dailymotion.com/embed/video/{0}'.format(short[0])
classic = re.findall(r'dailymotion.com/video/([a-zA-Z0-9]+)?', self.video_src)
if classic:
return 'http://www.dailymotion.com/embed/video/{0}'.format(classic[0])
return self.video_src
def __unicode__(self):
return self.video_src
| mit | -1,962,434,410,417,595,000 | 40.093023 | 161 | 0.642332 | false | 3.650826 | false | false | false |
Alwnikrotikz/opencamlib | scripts/ocl_stlsurf_polydata.py | 8 | 1228 | import ocl
import camvtk
import time
if __name__ == "__main__":
p = ocl.Point()
p.x=7
p.y=8
p.z=-3
print p
q = ocl.Point(1,2,3)
r = p + q
t = ocl.Triangle(p,q,r)
print t
s= ocl.STLSurf()
print s
s.addTriangle(t)
s.addTriangle(t)
print s
print "end."
myscreen = camvtk.VTKScreen()
print "screen created"
stl = camvtk.STLSurf("../stl/sphere.stl")
print "STL surface read"
myscreen.addActor(stl)
b = stl.src.GetOutput()
print b
print "Verts:",b.GetNumberOfVerts()
print "Cells:",b.GetNumberOfCells()
print "Lines:",b.GetNumberOfLines()
print "Polys:",b.GetNumberOfPolys()
print "Strips:",b.GetNumberOfStrips()
c = b.GetCell(0)
print c
print "Points:",c.GetNumberOfPoints()
print "Edges:",c.GetNumberOfEdges()
print "Faces:",c.GetNumberOfFaces()
ps = c.GetPoints()
print ps
n=ps.GetNumberOfPoints()
print "Nr of Points:",n
for id in range(0,n):
print id,"=",
print ps.GetPoint(id)
myscreen.addActor( camvtk.Sphere(radius=0.5,center=ps.GetPoint(id)) )
myscreen.render()
myscreen.iren.Start()
#raw_input("Press Enter to terminate")
| gpl-3.0 | 1,589,877,246,728,596,000 | 22.615385 | 77 | 0.594463 | false | 3.002445 | false | false | false |
Hack4Eugene/Hack4Cause2016 | src/SERT/ParcadeArcade/game-element/gameElement.py | 1 | 2182 | # -*- coding: utf-8 -*-
"""
Created on Sun Feb 14 08:28:34 2016
@author: South Eugene Robotics Team
ParcadeArcade gameElement
This is intended to encapsulate the general case of a game element.
"""
import grovepi
import lcd
import requests
import json
import mote
import socket
import fcntl
import struct
from flask import Flask, request
# create a mote object to track the device specific bit
mote = mote.Mote( "Name", "Description", "10.0.0.1" )
myId = configure()
# create the device specific I/O
greenLed = 2
grovepi.pinMode( greenLed, "OUTPUT" )
button1 = 3
grovepi.pinMode( button1, "INPUT" )
# call game()
# create a web listener hook
app = Flask(__name__)
app.run(host = '0.0.0.0')
# configure this device by reading its config.ini
def configure( self ):
self.mote.loadConfig()
url = 'http://andrew.local:1337/add_listener'
header = {'content-type': 'application/json'}
foo = requests.post(url, params=self.mote.toDict(), headers=header)
rslt = json.loads( foo.text)
id = rslt["response"]["id"]
self.mote.id = id
for ob in self.mote.capabilities:
ob.moteId = id
# send a test signal
#grovepi.digitalWrite( greenLed, 0 )
addCapUrl = 'http://andrew.local:1337/add_capability'
clist = [ requests.post(addCapUrl, params=ob.toDict(), headers=header) for ob in self.mote.capabilities ]
print(self.mote.id)
print(self.mote.name)
print(self.mote.description)
lcd.settext( self.mote.name )
return id
@app.route("/set", methods=['POST'])
def respond():
port = request.args["port"]
value = request.args["value"]
ioType = request.args["ioType"]
print( 'port: ' + port )
print( 'value: ' + value )
print( 'ioType: ' + ioType )
grovepi.digitalWrite( int(port), int(value) )
# ToDo: validate that this capability exists
return "Success\n"
# hack to get this device's ip address
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24]) | mit | -7,986,441,928,178,751,000 | 22.223404 | 109 | 0.652154 | false | 3.199413 | false | false | false |
frgomes/copperhead | copperhead/compiler/__init__.py | 5 | 1312 | #
# Copyright 2012 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import binarygenerator
import parsetypes
import passes
import pltools
import pyast
import rewrites
import typeinference
import unifier
import utility
import visitor
import imp as _imp
import os as _os
import glob as _glob
_cur_dir, _cur_file = _os.path.split(__file__)
def _find_module(name):
_ext_poss = [ path for path in _glob.glob(_os.path.join(_cur_dir, name+'*')) if _os.path.splitext(path)[1] in ['.so', '.dll'] ]
if len(_ext_poss) != 1:
raise ImportError(name)
return _imp.load_dynamic(name, _ext_poss[0])
backendcompiler = _find_module('backendcompiler')
backendsyntax = _find_module('backendsyntax')
backendtypes = _find_module('backendtypes')
import conversions
| apache-2.0 | -6,850,180,489,521,163,000 | 31 | 131 | 0.721037 | false | 3.526882 | false | false | false |
xpansa/pmis | purchase_stock_analytic/purchase.py | 2 | 1448 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Eficent (<http://www.eficent.com/>)
# Jordi Ballester Alomar <jordi.ballester@eficent.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_order(osv.osv):
_inherit = "purchase.order"
def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id, group_id, context=None):
res = super(purchase_order, self)._prepare_order_line_move(
cr, uid, order, order_line, picking_id, group_id, context=context)
# res['analytic_account_id'] = order_line.account_analytic_id.id
return res
| agpl-3.0 | 6,970,221,989,417,657,000 | 42.878788 | 103 | 0.616713 | false | 4 | false | false | false |
jeromecc/doctoctocbot | src/display/migrations/0001_initial.py | 1 | 1592 | # Generated by Django 2.0.8 on 2018-12-18 07:24
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebTweet',
fields=[
('statusid', models.BigIntegerField(primary_key=True, serialize=False, unique=True)),
('conversationid', models.BigIntegerField()),
('userid', models.BigIntegerField()),
('username', models.CharField(max_length=15)),
('name', models.CharField(max_length=50)),
('time', models.PositiveIntegerField(help_text='time in seconds since epoch')),
('html', models.TextField()),
('text', models.TextField()),
('reply', models.PositiveIntegerField(null=True)),
('like', models.PositiveIntegerField(null=True)),
('retweet', models.PositiveIntegerField(null=True)),
('parentid', models.BigIntegerField(null=True)),
('rtl', models.BooleanField(help_text='right to left')),
('image0', models.URLField(null=True)),
('image1', models.URLField(null=True)),
('image2', models.URLField(null=True)),
('image3', models.URLField(null=True)),
('avatar_mini', models.URLField(null=True)),
('avatar_normal', models.URLField(null=True)),
('avatar_bigger', models.URLField(null=True)),
],
),
]
| mpl-2.0 | 7,850,544,495,661,705,000 | 39.820513 | 101 | 0.547739 | false | 4.710059 | false | false | false |
lightbase/WSCServer | wscserver/model/Rede.py | 1 | 6744 | from pyramid_restler.model import SQLAlchemyORMContext
from sqlalchemy import Table
from sqlalchemy.orm import mapper
from sqlalchemy.schema import Column
from sqlalchemy.types import *
from wscserver.model import Base, session
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
class Rede():
"""
Classe que define a tabela 'rede'
"""
__tablename__ = 'rede'
id_rede = Column(Integer, primary_key=True, nullable=False)
id_local = Column(Integer)
id_servidor_autenticacao = Column(Integer)
te_ip_rede = Column(String(15), nullable=False)
nm_rede = Column(String(100))
te_observacao = Column(String(100))
nm_pessoa_contato1 = Column(String(50))
nm_pessoa_contato2 = Column(String(50))
nu_telefone1 = Column(String(11))
te_email_contato2 = Column(String(50))
nu_telefone2 = Column(String(11))
te_email_contato1 = Column(String(50))
te_serv_cacic = Column(String(60), nullable=False)
te_serv_updates = Column(String(60), nullable=False)
te_path_serv_updates = Column(String(255))
nm_usuario_login_serv_updates = Column(String(20))
te_senha_login_serv_updates = Column(String(20))
nu_porta_serv_updates = Column(String(4))
te_mascara_rede = Column(String(15))
dt_verifica_updates = Column(DateTime)
nm_usuario_login_serv_updates_gerente = Column(String(20))
te_senha_login_serv_updates_gerente = Column(String(20))
nu_limite_ftp = Column(Integer, nullable=False)
cs_permitir_desativar_srcacic = Column(String(1), nullable=False)
te_debugging = Column(String)
dt_debug = Column(String(8))
def __init__(self, id_rede, id_local, id_servidor_autenticacao,
te_ip_rede, nm_rede, te_observacao, nm_pessoa_contato1,
nm_pessoa_contato2, nu_telefone1, te_email_contato2,
nu_telefone2, te_email_contato1, te_serv_cacic,
te_serv_updates, te_path_serv_updates,
nm_usuario_login_serv_updates, te_senha_login_serv_updates,
nu_porta_serv_updates, te_mascara_rede, dt_verifica_updates,
nm_usuario_login_serv_updates_gerente,
te_senha_login_serv_updates_gerente,
nu_limite_ftp, cs_permitir_desativar_srcacic, te_debugging,
dt_debug):
"""
Metodo que chama as colunas
"""
self.id_rede = id_rede
self.id_local = id_local
self.id_servidor_autenticacao = id_servidor_autenticacao
self.te_ip_rede = te_ip_rede
self.nm_rede = nm_rede
self.te_observacao = te_observacao
self.nm_pessoa_contato1 = nm_pessoa_contato1
self.nm_pessoa_contato2 = nm_pessoa_contato2
self.nu_telefone1 = nu_telefone1
self.te_email_contato2 = te_email_contato2
self.nu_telefone2 = nu_telefone2
self.te_email_contato1 = te_email_contato1
self.te_serv_cacic = te_serv_cacic
self.te_serv_updates = te_serv_updates
self.te_path_serv_updates = te_path_serv_updates
self.nm_usuario_login_serv_updates = nm_usuario_login_serv_updates
self.te_senha_login_serv_updates = te_senha_login_serv_updates
self.nu_porta_serv_updates = nu_porta_serv_updates
self.te_mascara_rede = te_mascara_rede
self.dt_verifica_updates = dt_verifica_updates
self.nm_usuario_login_serv_updates_gerente = nm_usuario_login_serv_updates_gerente
self.te_senha_login_serv_updates_gerente = te_senha_login_serv_updates_gerente
self.nu_limite_ftp = nu_limite_ftp
self.cs_permitir_desativar_srcacic = cs_permitir_desativar_srcacic
self.te_debugging = te_debugging
self.dt_debug = dt_debug
def __repr__(self):
"""
Metodo que passa a lista de parametros da classe
"""
return "<Rede('%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)>" %\
(self.id_rede,
self.id_local,
self.id_servidor_autenticacao,
self.te_ip_rede,
self.nm_rede,
self.te_observacao,
self.nm_pessoa_contato1,
self.nm_pessoa_contato2,
self.nu_telefone1,
self.te_email_contato2,
self.nu_telefone2,
self.te_email_contato1,
self.te_serv_cacic,
self.te_serv_updates,
self.te_path_serv_updates,
self.nm_usuario_login_serv_updates,
self.te_senha_login_serv_updates,
self.nu_porta_serv_updates,
self.te_mascara_rede,
self.dt_verifica_updates,
self.nm_usuario_login_serv_updates_gerente,
self.te_senha_login_serv_updates_gerente,
self.nu_limite_ftp,
self.cs_permitir_desativar_srcacic,
self.te_debugging,
self.dt_debug
)
class RedeContextFactory(SQLAlchemyORMContext):
entity = Rede
def session_factory(self):
return session
rede = Table('rede', Base.metadata,
Column('id_rede', Integer, primary_key=True, nullable=False),
Column('id_local', Integer),
Column('id_servidor_autenticacao', Integer),
Column('te_ip_rede', String(15), nullable=False),
Column('nm_rede', String(100)),
Column('te_observacao', String(100)),
Column('nm_pessoa_contato1', String(50)),
Column('nm_pessoa_contato2', String(50)),
Column('te_email_contato2', String(50)),
Column('nu_telefone2', String(11)),
Column('te_email_contato1', String(50)),
Column('te_serv_cacic', String(60), nullable=False),
Column('te_serv_updates', String(60), nullable=False),
Column('te_path_serv_updates', String(255)),
Column('nm_usuario_login_serv_updates', String(20)),
Column('te_senha_login_serv_updates', String(20)),
Column('nu_porta_serv_updates', String(4)),
Column('te_mascara_rede', String(15)),
Column('dt_verifica_updates', DateTime),
Column('nm_usuario_login_serv_updates_gerente', String(20)),
Column('te_senha_login_serv_updates_gerente', String(20)),
Column('nu_limite_ftp', Integer, nullable=False),
Column('cs_permitir_desativar_srcacic', String(1),
nullable=False),
Column('te_debugging', String),
Column('dt_debug', String(8)),
extend_existing=True
)
mapper(Rede, rede)
| gpl-2.0 | 4,116,666,593,917,616,600 | 41.415094 | 90 | 0.604389 | false | 3.169173 | false | false | false |
enthought/uchicago-pyanno | pyanno/ui/main.py | 1 | 1028 | # Copyright (c) 2011, Enthought, Ltd.
# Author: Pietro Berkes <pberkes@enthought.com>
# License: Modified BSD license (2-clause)
"""Entry point for pyanno UI application.
At present, the application is based on the wx backend of the traitsui library.
It also supports 2 screen formats:
* for large displays (larger than 1300x850), the main window will be
1300 x 850 pixels large
* for small displays it will be 1024x768
"""
from traits.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'wx'
import pyanno.ui.appbase.wx_utils as wx_utils
wx_utils.set_background_color_to_white()
from pyanno.ui.pyanno_ui_application import pyanno_application
import numpy
import logging
def main():
"""Create and start the application."""
# deactivate warnings for operations like log(0.) and log(-inf), which
# are handled correctly by pyanno
numpy.seterr(divide='ignore', invalid='ignore')
with pyanno_application(logging_level=logging.INFO) as app:
app.open()
if __name__ == '__main__':
main()
| bsd-2-clause | -5,457,398,681,339,882,000 | 24.7 | 79 | 0.725681 | false | 3.484746 | false | false | false |
scemama/irpf90 | src/init.py | 1 | 2443 | #!/usr/bin/env python2
# IRPF90 is a Fortran90 preprocessor written in Python for programming using
# the Implicit Reference to Parameters (IRP) method.
# Copyright (C) 2009 Anthony SCEMAMA
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Anthony Scemama
# LCPQ - IRSAMC - CNRS
# Universite Paul Sabatier
# 118, route de Narbonne
# 31062 Toulouse Cedex 4
# scemama@irsamc.ups-tlse.fr
import os
import util
import makefile
import irpf90_t
from command_line import command_line
initialized = False
def init():
global initialized
if initialized:
return
# Create directories
for dir in [ irpf90_t.irpdir, irpf90_t.mandir ]:
try:
wd = os.getcwd()
os.chdir(dir)
os.chdir(wd)
except OSError:
os.mkdir(dir)
for dir in command_line.include_dir:
dir = irpf90_t.irpdir+dir
try:
wd = os.getcwd()
os.chdir(dir)
os.chdir(wd)
except OSError:
os.mkdir(dir)
# Create makefile
makefile.create()
# Copy current files in the irpdir
for dir in ['./']+command_line.include_dir:
try:
os.stat(dir)
except:
print dir,'not in dir'
continue
for filename in os.listdir(dir):
filename = dir+filename
if not filename.startswith(".") and not os.path.isdir(filename):
try:
file = open(filename,"r")
except IOError:
if command_line.do_warnings:
print "Warning : Unable to read file %s."%(filename)
else:
buffer = file.read()
file.close()
if not util.same_file(irpf90_t.irpdir+filename,buffer):
file = open(irpf90_t.irpdir+filename,"w")
file.write(buffer)
file.close()
initialized = True
| gpl-2.0 | -3,043,614,512,485,690,000 | 26.449438 | 78 | 0.653704 | false | 3.520173 | false | false | false |
kamyu104/LeetCode | Python/network-delay-time.py | 2 | 1728 | # Time: O((|E| + |V|) * log|V|) = O(|E| * log|V|) by using binary heap,
# if we can further to use Fibonacci heap, it would be O(|E| + |V| * log|V|)
# Space: O(|E| + |V|) = O(|E|)
# There are N network nodes, labelled 1 to N.
#
# Given times, a list of travel times as directed edges times[i] = (u, v, w),
# where u is the source node, v is the target node,
# and w is the time it takes for a signal to travel from source to target.
#
# Now, we send a signal from a certain node K.
# How long will it take for all nodes to receive the signal? If it is impossible, return -1.
#
# Note:
# - N will be in the range [1, 100].
# - K will be in the range [1, N].
# - The length of times will be in the range [1, 6000].
# - All edges times[i] = (u, v, w) will have 1 <= u, v <= N and 1 <= w <= 100.
import collections
import heapq
# Dijkstra's algorithm
class Solution(object):
def networkDelayTime(self, times, N, K):
"""
:type times: List[List[int]]
:type N: int
:type K: int
:rtype: int
"""
adj = [[] for _ in xrange(N)]
for u, v, w in times:
adj[u-1].append((v-1, w))
result = 0
lookup = set()
best = collections.defaultdict(lambda: float("inf"))
min_heap = [(0, K-1)]
while min_heap and len(lookup) != N:
result, u = heapq.heappop(min_heap)
lookup.add(u)
if best[u] < result:
continue
for v, w in adj[u]:
if v in lookup: continue
if result+w < best[v]:
best[v] = result+w
heapq.heappush(min_heap, (result+w, v))
return result if len(lookup) == N else -1
| mit | -489,586,891,004,168,770 | 33.56 | 92 | 0.539352 | false | 3.141818 | false | false | false |
jgardner1/JGRPGTools | jgrpg/model/Race.py | 1 | 3479 | from PyQt5.QtCore import QObject, pyqtSignal
from .ObjectStore import ObjectStore, ObjectStoreObject
from random import choice, uniform
class Race(ObjectStoreObject):
"""A race is a type of creature."""
def update(self, *,
id=None,
name="",
male_names=[],
female_names=[],
family_names=[],
attribute_modifiers={},
# Avg 95%
height=[ 65.0, 9.5], # inched
weight=[160.0, 85.0], # lbs
m_f_ratio=1.0
):
self.male_names = male_names
self.female_names = female_names
self.family_names = family_names
self.attribute_modifiers = attribute_modifiers
self.height = height
self.weight = weight
self.m_f_ratio = m_f_ratio
super(Race, self).update(id=id, name=name)
def data(self):
data = super(Race, self).data()
data.update({
"male_names": self.male_names,
"female_names": self.female_names,
"family_names": self.family_names,
"attribute_modifiers": self.attribute_modifiers,
"height": self.height,
"weight": self.weight,
"m_f_ratio": self.m_f_ratio
})
return data
def generate_name(self, *, male=False, female=False):
first_names = None
if male:
first_names = self.male_names
elif female:
first_names = self.female_names
else:
first_names = self.male_names + self.female_names
name = "{} {}".format(
self.choose_name(first_names),
self.choose_name(self.family_names))
return name.title()
@staticmethod
def choose_name(names):
if not names:
return "Fred"
# Sort the names
prefixes = []
suffixes = []
whole_names = []
for name in names:
if name.startswith('-'):
suffixes.append(name[1:])
elif name.endswith('-'):
prefixes.append(name[:-1])
else:
whole_names.append(name)
# How many of each?
combos = len(prefixes) * len(suffixes)
print("prefixes={}, suffixes={}, combos={}".format(
prefixes, suffixes, combos))
# Whole or composed names?
which = uniform(0, combos+len(whole_names))
print("which={}, combos={}, which > combos={}".format(
which,
combos,
which > combos))
if which > combos:
print("Whole")
return choice(whole_names)
else:
print("composed")
return choice(prefixes)+choice(suffixes)
def generate_height_weight(self,
gender='M',
attrs={},
height=0.5,
weight=0.5,
):
size_mod = pow(
sqrt(4.0/3.0),
attrs.get('strength') \
- attrs.get('dexterity'))
height = normal(self.height[0], self.height[1]/2.0)*size_mod
height_variance = height - self.height[0]
weight = normal(self.weight[0], self.weight[1]/2.0) \
* height_variance \
* height_variance
if gender.lower() in ('f', 'female'):
height = height/self.m_f_ratio
weight = weight/self.m_f_ratio
return (height, weight)
Races = ObjectStore(Race)
| gpl-2.0 | 1,627,887,318,513,343,200 | 27.752066 | 68 | 0.510492 | false | 3.998851 | false | false | false |
nmmmnu/MessageQueue | protocols/redishandler.py | 1 | 4643 | #
# Memcached protocol implementation
# Nikolay Mihaylov nmmm@nmmm.nu
#
# For Memcached telnet protocol see:
# http://blog.elijaa.org/?post/2010/05/21/Memcached-telnet-command-summary
import asynchat
import time
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class RedisHandler(asynchat.async_chat):
def __init__(self, sock, addr, processor):
#
# Constructs new Redis protocol handler
#
# @param sock : socket from asyncore
# @param addr : address from asyncore
# @param processor : processor class
#
asynchat.async_chat.__init__(self, sock=sock)
self.addr = addr
self.started = time.time()
self.lastping = time.time()
self.processor = processor
self.state_change("read_count")
def state_change(self, state, size = 0):
self.io = StringIO()
if state == "read_count":
self.state_params_count = 0
self.state_params_waiting = 0
self.state_params_temp = 0
self.state_params = []
self.state = state
self.set_terminator("\r\n")
return True
if state == "read_param":
self.state = state
self.set_terminator("\r\n")
return True
if state == "read_data":
# size == 0 is an error, but we will ignore it.
if size < 0:
return False
self.state = state
self.set_terminator(size + len("\r\n") )
return True
# Unknown state ?
return False
def cmd_parse(self):
self.lastping = time.time()
args = self.state_params
command = args[0].lower()
if command == "spop":
key = args[1]
x = self.processor.get(key)
if x is None:
# NULL responce
self.push("$-1\r\n")
return
msg = "$%d\r\n%s\r\n" % (len(x), x)
self.push(msg)
return
if command == "del":
key = args[1]
x = self.processor.delete(key)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
# this is ADD. We do not implement SET
if command == "sadd":
key = args[1]
val = args[2]
x = self.processor.add(key, val)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
# Non standard command
if command == "scard":
key = args[1]
x = self.processor.len(key)
if x is None:
x = "0"
self.cmd_int(int(x))
return
if command == "sismember":
key = args[1]
val = args[2]
x = self.processor.contains(key, val)
if x:
self.cmd_int(1)
return
self.cmd_int(0)
return
if command == "quit":
self.push("+OK\r\n")
self.close()
return
# error, not implemented
self.cmd_error("Not implemented")
return
def cmd_int(self, id):
self.push(":%d\r\n" % id)
def cmd_error(self, msg = None):
if msg is None:
s = "-ERR\r\n"
else:
s = "-ERR %s\r\n" % msg
self.push(s)
self.state_change("read_count")
def state_read_count(self):
x = self.io.getvalue()
if not x:
self.cmd_error()
return False
if x[0] != "*":
self.cmd_error("proceed with number of params")
return False
try:
self.state_params_count = int(x[1:])
self.state_params_waiting = self.state_params_count
except:
self.cmd_error("wrong number of params")
return False
if self.state_params_count is 0:
self.cmd_error("wrong number of params, *0 is not allowed")
return False
self.state_change("read_param")
return True
def state_read_param(self):
x = self.io.getvalue()
if not x:
self.cmd_error("proceed with size of param")
return False
if x[0] != "$":
self.cmd_error("proceed with size of param")
return False
try:
self.state_params_temp = int(x[1:])
except:
self.cmd_error("wrong size of param")
return False
self.state_change("read_data", self.state_params_temp )
return True
def state_read_data(self):
x = self.io.getvalue()
if not self.state_params_temp:
self.state_params_temp = 0
x = x[0:self.state_params_temp]
self.state_params.append(x)
self.state_params_waiting -= 1
if self.state_params_waiting > 0:
self.state_change("read_param")
return True
# Proceed with request
self.cmd_parse()
self.state_change("read_count")
def found_terminator(self):
if self.state == "read_count":
# *2
return self.state_read_count()
if self.state == "read_param":
# $3
return self.state_read_param()
if self.state == "read_data":
# <data>
return self.state_read_data()
# Unknown state ?
return False
def collect_incoming_data(self, data):
self.io.write(data)
| gpl-3.0 | 3,239,277,496,775,437,000 | 15.464539 | 74 | 0.60112 | false | 2.853719 | false | false | false |
whosken/destinate | destinate/facebook.py | 1 | 1592 | import requests
HOST = 'https://graph.facebook.com/v2.5'
USER_FIELDS = 'fields=id,first_name,email,tagged_places{place{name,place_type,place_topics,location{city}}},events{name,description},location'
def get_user(token):
data = get('me', USER_FIELDS, token)
if 'error' in data:
print data
raise ValueError
return map_details(data)
def map_details(data):
places = data.get('tagged_places',{}).get('data',[])
events = data.get('events',{}).get('data',[])
return {
'facebook_id':data['id'],
'name':data['first_name'],
'email':data['email'],
'city':map_place(data['location']) if 'location' in data else None,
'places':[map_place(p['place']) for p in places if p['place'].get('location')],
'events':[map_event(e) for e in events if 'description' in e]
}
def map_place(data):
place = {
'name':data['name'],
'city':data['location']['city'],
}
if data['place_type'] == 'PLACE' and data.get('place_topics',{}).get('data'):
place['topics'] = [t['name'] for t in data['place_topics']['data']]
return place
def map_event(data):
return {
'name':data['name'],
'description':data['description'],
}
def get(path, query, token):
uri = '{}/{}?{}&access_token={}'.format(HOST,path,query,token)
try:
response = requests.get(uri)
response.raise_for_status()
except requests.exceptions.HTTPError as error:
print error.response.status_code, error.response.content
return response.json()
| apache-2.0 | -9,129,957,803,148,151,000 | 32.166667 | 142 | 0.592965 | false | 3.514349 | false | false | false |
predicthq/sdk-py | usecases/broadcasts/search_by_location.py | 1 | 1561 | from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The events endpoint supports two types of search by location:
# - by latitude and longitude coordinates (location.origin)
# - by geoname place ID (location.place_id, see places endpoint for more details about geoname place ID)
# The `location.origin` parameter allows you to search for broadcasts
# happening in the county of the provided geopoint
# (a latitude and longitude coordinate string in the form {latitude},{longitude}).
# https://docs.predicthq.com/resources/broadcasts/#param-location.origin
for broadcast in phq.broadcasts.search(location__origin='45.5051,-122.6750'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.location.places[0].place_id)
# You can also specify a geoname place ID or a list of place IDs.
# The `location.place_id` suffix includes broadcasts being children
# or parent of the specified place ID.
# For the broadcasts happening in the US, the default location level
# is the county level. If you specify a city place ID, you'll get all
# the broadcasts for this city's county.
# https://docs.predicthq.com/resources/events/#param-place
for broadcast in phq.broadcasts.search(location__place_id='5746545'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.location.country,
broadcast.location.places[0].place_id)
| mit | 1,714,105,414,977,001,200 | 46.30303 | 104 | 0.771941 | false | 3.655738 | false | false | false |
momahajogami/Prime-Numbers | simple_factor.py | 1 | 1913 | from practical_sieve import primes_in_block
primes_under_100 = [2,3,5,7]+primes_in_block([3,5,7],10,100,5)
def next_ten_thousand_primes(primes):
L = primes[-1]+1
R = L + 10000
B = 1000
return primes_in_block(primes[1:],L,R,B)
def next_primes(primes):
"""
Square the size of your window.
Usually, this is too far to go.
If you start with just the primes under a
hundred and run this twice, you're looking
at checking congruence for tens of millions
of numbers, and this might take some
minutes already.
See: next_ten_thousand_primes(primes)
"""
L = primes[-1] + 1
# Choose the largest possible upper bound
R = L*L
# Choose an appropriate block size,
# which is rather arbitrary.
B = (R-L)/10
# Now adjust R so that B|R-L
R = L + 10*B
return primes_in_block(primes[1:],L,R,B)
def factor_with_primes(n, primes):
ps.reverse()
pairs = []
while ps and n > 1:
p = ps.pop()
e = 0
while n % p == 0:
e = e + 1
n = n/p
if e>0:
pairs.append((p,e))
return n,pairs
def simple_factor(n):
"""
This is a demonstration of how to use
Algorithm 3.2.1 as well a as a practical
and simple way to factor numbers of modest
size. Of course, the near term goal will
be to explore the modern factoring
algorithms, but this will do as a sandbox
for testing and playing.
"""
primes = [2,3,5,7]+primes_in_block([3,5,7],10,100,10)
n, pairs = factor_with_primes(n, primes)
if n == 1:
return pairs
primes = next_primes(primes)
n, new_pairs = factor_with_primes(n, primes)
pairs = pairs + new_pairs
while n>1:
primes = next_ten_thousand_primes(primes)
n, new_pairs = factor_with_primes(n, primes)
pairs = pairs + new_pairs
return pairs
| mit | 880,722,730,485,058,400 | 24.851351 | 62 | 0.592263 | false | 3.105519 | false | false | false |
leriomaggio/code-coherence-evaluation-tool | code_comments_coherence/source_code_analysis/code_analysis/kernels.py | 1 | 4215 | """
Author: Valerio Maggio (@leriomaggio)
Mail: valeriomaggio@gmail.com
"""
from itertools import ifilter, product
from functools import wraps
from math import sqrt
from numpy import sum as np_sum
# --------------------------------------------------------------------------
# Node Similarities (Kernels on Nodes)
# --------------------------------------------------------------------------
def match(n1, n2):
""" Matching Function: determines wether two nodes are comparable. """
return n1.instruction_class == n2.instruction_class
def features_similarity(n1, n2):
"""
Feature Similarity: Computes a similarity value according to nodes attributes.
"""
if n1.is_leaf_node and n2.is_leaf_node:
return int(n1.instruction == n2.instruction and n1.label == n2.label)
return int(n1.instruction == n2.instruction)
def structural_similarity(n1, n2):
"""
Structural Similarity function (used to detect (without errors) up to Type 2 clones)
"""
if n1.instruction == n2.instruction:
return 1.0
return 0.0
#------------------------------------------------------------------------------
# 01. Iterative Contiguous Kernel (Partial Trees)
#------------------------------------------------------------------------------
# Supporting functions
def compute_pairs_similarities(node_pairs_list, similarity=features_similarity):
"""
Reminder: Improve using numpy.sum
"""
return np_sum([similarity(n1, n2) for n1, n2 in node_pairs_list])
def extract_contiguous_kernel_nodes(t1, t2):
"""
Extract all the possibile pairs of nodes that match
--- (Improved version using itertools - TO BE TESTED.) ---
Note that ifilter returns a Generator, rather than a list (this should me more
efficient in terms of memory consumption).
Nevertheless, the list could be trivially returned instead by removing
the "i" from `ifilter` :-)
(This will call the built-in Python `filter` function)
"""
# return [(n1, n2) for n1 in t1.children for n2 in t2.children if match(n1, n2)]
return ifilter(lambda p: match(p[0], p[1]), product(t1.children, t2.children))
# Memoization in Python with wraps - useful for normalization to avoid repeating calculations
# The memoization is exploited only in case of t1 == t2, i.e., we are computing
# normalization values.
# This is to avoid repeating useless calculations, while not wasting memory storing the
# computation of each pair.
def memo(func):
cache = {}
@wraps(func)
def wrap(t1, t2, node_sim_func):
if t1 == t2:
if t1 not in cache:
cache[t1] = func(t1, t2, node_sim_func)
return cache[t1]
return func(t1, t2, node_sim_func)
return wrap
def iterative_kernel_function(node_pairs_list, node_similarity=features_similarity):
"""
Iterative Tree Kernel Function
"""
if not node_pairs_list or not len(node_pairs_list):
return 0.0
k = 0.0
while len(node_pairs_list):
pair = node_pairs_list.pop(0)
k += compute_pairs_similarities([pair], similarity=node_similarity)
matching_subtrees = extract_contiguous_kernel_nodes(pair[0], pair[1])
node_pairs_list.extend(matching_subtrees)
return k
@memo
def iterative_tree_kernel(tree1, tree2, node_similarity=features_similarity):
'''
Iterative Tree Kernel
'''
if not match(tree1, tree2):
return 0.0
return iterative_kernel_function([(tree1, tree2)], node_similarity)
# --------------------------------------------------------------------------
# Normalized Tree Kernel function
# --------------------------------------------------------------------------
def contiguous_tree_kernel(t1, t2, node_similarity=features_similarity):
"""
Compute the Normalized version of the Contiguous Tree Kernel function
(Value that range from 0 to 1)
"""
kernel_sim = iterative_tree_kernel(t1, t2, node_similarity)
#Normalization
return float(kernel_sim) / sqrt(iterative_tree_kernel(t1, t1, node_similarity) *
iterative_tree_kernel(t2, t2, node_similarity)) | bsd-3-clause | 6,587,368,949,784,121,000 | 32.728 | 93 | 0.594069 | false | 3.931903 | false | false | false |
yfdyh000/pontoon | pontoon/base/utils.py | 1 | 12890 | import json
import logging
import os
import re
import requests
import traceback
from datetime import datetime
from django.conf import settings
from django.contrib.auth.models import Permission
from django.http import HttpResponse
from django.utils import timezone
from django.utils.translation import trans_real
from translate.filters import checks
from translate.storage import base as storage_base
from translate.storage.placeables import base, general, parse
from translate.storage.placeables.interfaces import BasePlaceable
from translate.lang import data as lang_data
log = logging.getLogger('pontoon')
def add_can_localize(user):
email = user.email
log.debug(email)
# Grant permission to Mozilla localizers
url = "https://mozillians.org/api/v1/users/"
payload = {
"app_name": "pontoon",
"app_key": settings.MOZILLIANS_API_KEY,
"email": email,
"is_vouched": True,
"groups": "localization",
}
try:
response = requests.get(url, params=payload)
mozillians = response.json()["objects"]
if len(mozillians) > 0:
can_localize = Permission.objects.get(codename="can_localize")
user.user_permissions.add(can_localize)
log.debug("Permission can_localize set.")
# Fallback if profile does not allow accessing data
user.first_name = mozillians[0].get("full_name", email)
user.save()
except Exception as e:
log.debug(e)
log.debug("Is your MOZILLIANS_API_KEY set?")
user.save()
def get_project_locale_from_request(request, locales):
"""Get Pontoon locale from Accept-language request header."""
header = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
accept = trans_real.parse_accept_lang_header(header)
for a in accept:
try:
return locales.get(code__iexact=a[0]).code
except:
continue
class NewlineEscapePlaceable(base.Ph):
"""Placeable handling newline escapes."""
istranslatable = False
regex = re.compile(r'\\n')
parse = classmethod(general.regex_parse)
class TabEscapePlaceable(base.Ph):
"""Placeable handling tab escapes."""
istranslatable = False
regex = re.compile(r'\t')
parse = classmethod(general.regex_parse)
class EscapePlaceable(base.Ph):
"""Placeable handling escapes."""
istranslatable = False
regex = re.compile(r'\\')
parse = classmethod(general.regex_parse)
class SpacesPlaceable(base.Ph):
"""Placeable handling spaces."""
istranslatable = False
regex = re.compile('^ +| +$|[\r\n\t] +| {2,}')
parse = classmethod(general.regex_parse)
def mark_placeables(text):
"""Wrap placeables to easily distinguish and manipulate them.
Source: http://bit.ly/1yQOC9B
"""
PARSERS = [
NewlineEscapePlaceable.parse,
TabEscapePlaceable.parse,
EscapePlaceable.parse,
# The spaces placeable can match '\n ' and mask the newline,
# so it has to come later.
SpacesPlaceable.parse,
general.XMLTagPlaceable.parse,
general.AltAttrPlaceable.parse,
general.XMLEntityPlaceable.parse,
general.PythonFormattingPlaceable.parse,
general.JavaMessageFormatPlaceable.parse,
general.FormattingPlaceable.parse,
# The Qt variables can consume the %1 in %1$s which will mask a printf
# placeable, so it has to come later.
general.QtFormattingPlaceable.parse,
general.UrlPlaceable.parse,
general.FilePlaceable.parse,
general.EmailPlaceable.parse,
general.CapsPlaceable.parse,
general.CamelCasePlaceable.parse,
general.OptionPlaceable.parse,
general.PunctuationPlaceable.parse,
general.NumberPlaceable.parse,
]
TITLES = {
'NewlineEscapePlaceable': "Escaped newline",
'TabEscapePlaceable': "Escaped tab",
'EscapePlaceable': "Escaped sequence",
'SpacesPlaceable': "Unusual space in string",
'AltAttrPlaceable': "'alt' attribute inside XML tag",
'NewlinePlaceable': "New-line",
'NumberPlaceable': "Number",
'QtFormattingPlaceable': "Qt string formatting variable",
'PythonFormattingPlaceable': "Python string formatting variable",
'JavaMessageFormatPlaceable': "Java Message formatting variable",
'FormattingPlaceable': "String formatting variable",
'UrlPlaceable': "URI",
'FilePlaceable': "File location",
'EmailPlaceable': "Email",
'PunctuationPlaceable': "Punctuation",
'XMLEntityPlaceable': "XML entity",
'CapsPlaceable': "Long all-caps string",
'CamelCasePlaceable': "Camel case string",
'XMLTagPlaceable': "XML tag",
'OptionPlaceable': "Command line option",
}
output = u""
# Get a flat list of placeables and StringElem instances
flat_items = parse(text, PARSERS).flatten()
for item in flat_items:
# Placeable: mark
if isinstance(item, BasePlaceable):
class_name = item.__class__.__name__
placeable = unicode(item)
# CSS class used to mark the placeable
css = {
'TabEscapePlaceable': "escape ",
'EscapePlaceable': "escape ",
'SpacesPlaceable': "space ",
'NewlinePlaceable': "escape ",
}.get(class_name, "")
title = TITLES.get(class_name, "Unknown placeable")
spaces = ' ' * len(placeable)
if not placeable.startswith(' '):
spaces = placeable[0] + ' ' * (len(placeable) - 1)
# Correctly render placeables in translation editor
content = {
'TabEscapePlaceable': u'\\t',
'EscapePlaceable': u'\\',
'SpacesPlaceable': spaces,
'NewlinePlaceable': {
u'\r\n': u'\\r\\n<br/>\n',
u'\r': u'\\r<br/>\n',
u'\n': u'\\n<br/>\n',
}.get(placeable),
'XMLEntityPlaceable': placeable.replace('&', '&'),
'XMLTagPlaceable':
placeable.replace('<', '<').replace('>', '>'),
}.get(class_name, placeable)
output += ('<mark class="%splaceable" title="%s">%s</mark>') \
% (css, title, content)
# Not a placeable: skip
else:
output += unicode(item).replace('<', '<').replace('>', '>')
return output
def quality_check(original, string, locale, ignore):
"""Check for obvious errors like blanks and missing interpunction."""
if not ignore:
original = lang_data.normalized_unicode(original)
string = lang_data.normalized_unicode(string)
unit = storage_base.TranslationUnit(original)
unit.target = string
checker = checks.StandardChecker(
checkerconfig=checks.CheckerConfig(targetlanguage=locale.code))
warnings = checker.run_filters(unit)
if warnings:
# https://github.com/translate/pootle/
check_names = {
'accelerators': 'Accelerators',
'acronyms': 'Acronyms',
'blank': 'Blank',
'brackets': 'Brackets',
'compendiumconflicts': 'Compendium conflict',
'credits': 'Translator credits',
'doublequoting': 'Double quotes',
'doublespacing': 'Double spaces',
'doublewords': 'Repeated word',
'emails': 'E-mail',
'endpunc': 'Ending punctuation',
'endwhitespace': 'Ending whitespace',
'escapes': 'Escapes',
'filepaths': 'File paths',
'functions': 'Functions',
'gconf': 'GConf values',
'kdecomments': 'Old KDE comment',
'long': 'Long',
'musttranslatewords': 'Must translate words',
'newlines': 'Newlines',
'nplurals': 'Number of plurals',
'notranslatewords': 'Don\'t translate words',
'numbers': 'Numbers',
'options': 'Options',
'printf': 'printf()',
'puncspacing': 'Punctuation spacing',
'purepunc': 'Pure punctuation',
'sentencecount': 'Number of sentences',
'short': 'Short',
'simplecaps': 'Simple capitalization',
'simpleplurals': 'Simple plural(s)',
'singlequoting': 'Single quotes',
'startcaps': 'Starting capitalization',
'startpunc': 'Starting punctuation',
'startwhitespace': 'Starting whitespace',
'tabs': 'Tabs',
'unchanged': 'Unchanged',
'untranslated': 'Untranslated',
'urls': 'URLs',
'validchars': 'Valid characters',
'variables': 'Placeholders',
'xmltags': 'XML tags',
}
warnings_array = []
for key in warnings.keys():
warning = check_names.get(key, key)
warnings_array.append(warning)
return HttpResponse(json.dumps({
'warnings': warnings_array,
}), content_type='application/json')
def req(method, project, resource, locale,
username, password, payload=False):
"""
Make request to Transifex server.
Args:
method: Request method
project: Transifex project name
resource: Transifex resource name
locale: Locale code
username: Transifex username
password: Transifex password
payload: Data to be sent to the server
Returns:
A server response or error message.
"""
url = os.path.join(
'https://www.transifex.com/api/2/project/', project,
'resource', resource, 'translation', locale, 'strings')
try:
if method == 'get':
r = requests.get(
url + '?details', auth=(username, password), timeout=10)
elif method == 'put':
r = requests.put(url, auth=(username, password), timeout=10,
data=json.dumps(payload),
headers={'content-type': 'application/json'})
log.debug(r.status_code)
if r.status_code == 401:
return "authenticate"
elif r.status_code != 200:
log.debug("Response not 200")
return "error"
return r
# Network problem (DNS failure, refused connection, etc.)
except requests.exceptions.ConnectionError as e:
log.debug('ConnectionError: ' + str(e))
return "error"
# Invalid HTTP response
except requests.exceptions.HTTPError as e:
log.debug('HTTPError: ' + str(e))
return "error"
# A valid URL is required
except requests.exceptionsURLRequired as e:
log.debug('URLRequired: ' + str(e))
return "error"
# Request times out
except requests.exceptions.Timeout as e:
log.debug('Timeout: ' + str(e))
return "error"
# Request exceeds the number of maximum redirections
except requests.exceptions.TooManyRedirects as e:
log.debug('TooManyRedirects: ' + str(e))
return "error"
# Ambiguous exception occurres
except requests.exceptions.RequestException as e:
log.debug('RequestException: ' + str(e))
return "error"
except Exception:
log.debug('Generic exception: ' + traceback.format_exc())
return "error"
def first(collection, test, default=None):
"""
Return the first item that, when passed to the given test function,
returns True. If no item passes the test, return the default value.
"""
return next((c for c in collection if test(c)), default)
def match_attr(collection, **attributes):
"""
Return the first item that has matching values for the given
attributes, or None if no item is found to match.
"""
return first(
collection,
lambda i: all(getattr(i, attrib) == value
for attrib, value in attributes.items()),
default=None
)
def aware_datetime(*args, **kwargs):
"""Return an aware datetime using Django's configured timezone."""
return timezone.make_aware(datetime(*args, **kwargs))
def extension_in(filename, extensions):
"""
Check if the extension for the given filename is in the list of
allowed extensions. Uses os.path.splitext rules for getting the
extension.
"""
filename, extension = os.path.splitext(filename)
if extension and extension[1:] in extensions:
return True
else:
return False
| bsd-3-clause | 1,437,675,914,982,552,000 | 33.190981 | 78 | 0.590225 | false | 4.240132 | false | false | false |
hack4impact/maps4all | app/utils.py | 1 | 2163 | import os, json, boto3
from flask import url_for
from .models import EditableHTML, SiteAttribute
from werkzeug.utils import secure_filename
from uuid import uuid4
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
@app.context_processor
def inject_name():
return dict(site_name=SiteAttribute.get_value("ORG_NAME"),
logo_url=SiteAttribute.get_value("SITE_LOGO"),
style_timestamp=SiteAttribute.get_value("STYLE_TIME"),
style_sheet=SiteAttribute.get_value("STYLE_SHEET"),
site_color=SiteAttribute.get_value("SITE_COLOR"))
app.add_template_global(index_for_role)
@app.template_filter('pages')
def inject_pages(s):
pages = EditableHTML.query.order_by(EditableHTML.page_name)
pages_list = [p.__dict__ for p in pages]
return pages_list
def index_for_role(role):
return url_for(role.index)
def s3_upload(source_file, acl='public-read'):
# Load necessary information into the application
S3_KEY = os.environ.get('S3_KEY')
S3_SECRET = os.environ.get('S3_SECRET')
S3_BUCKET = os.environ.get('S3_BUCKET')
S3_REGION = os.environ.get('S3_REGION')
TARGET_FOLDER = ''
source_filename = secure_filename(source_file.data.filename)
source_extension = os.path.splitext(source_filename)[1]
destination_filename = uuid4().hex + source_extension
# Connect to S3 and upload file.
s3 = boto3.client(
's3',
aws_access_key_id=S3_KEY,
aws_secret_access_key=S3_SECRET,
)
try:
s3.upload_fileobj(
source_file.data,
S3_BUCKET,
source_filename,
ExtraArgs = {
"ACL": "public-read"
}
)
except Exception as e:
print("Error: ", e)
return e
return destination_filename
| mit | -8,044,579,873,987,158,000 | 26.379747 | 74 | 0.619972 | false | 3.672326 | false | false | false |
messente/messente-python | messente/api/sms/api/number_verification.py | 1 | 5307 | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from messente.api.sms.api import api
from messente.api.sms.api import utils
from messente.api.sms.api.error import ERROR_CODES
from messente.api.sms.api.response import Response
error_map = ERROR_CODES.copy()
error_map.update({
"ERROR 109": "PIN code field is missing in the template value.",
"ERROR 110": "Verification Session with following ID was not found.",
"ERROR 111": " ".join([
"Sender parameter 'from' is invalid."
"You have not activated this sender name from Messente.com"
]),
})
class NumberVerificationResponse(Response):
_VERIFIED = "VERIFIED"
_INVALID = "INVALID"
_EXPIRED = "EXPIRED"
_THROTTLED = "THROTTLED"
def __init__(self, *args, **kwargs):
self._verification_id = kwargs.pop("verification_id", "")
Response.__init__(self, *args, **kwargs)
def _get_error_map(self):
return error_map
def _parse(self):
custom_statuses = [
self._VERIFIED, self._INVALID, self._EXPIRED, self._THROTTLED
]
stripped = self.raw_response.text.strip()
if stripped in custom_statuses:
self.status = stripped
else:
Response._parse(self)
self._verification_id = self.status_text
def is_ok(self):
return (
self.is_replied() and
self.status in ["OK", self._VERIFIED]
)
def get_verification_id(self):
return self._verification_id
def is_verified(self):
return (self.status == self._VERIFIED)
def is_invalid(self):
return (self.status == self._INVALID)
def is_expired(self):
return (self.status == self._EXPIRED)
def is_throttled(self):
return (self.status == self._THROTTLED)
class NumberVerificationAPI(api.API):
"""
Api supporting PIN code based user verification.
Documentation:
http://messente.com/documentation/number-verification/number-verification-api
"""
def __init__(self, **kwargs):
api.API.__init__(self, "number-verification", **kwargs)
def send_pin(self, data, **kwargs):
self.adapt(data)
if kwargs.get("validate", True):
self.validate(data, mode="send_pin", fatal=True)
r = NumberVerificationResponse(
self.call_api(
"verify/start",
data
),
)
self.log_response(r)
return r
def verify_pin(self, data, **kwargs):
if kwargs.pop("validate", True):
self.validate(data, mode="verify_pin", fatal=True)
r = NumberVerificationResponse(
self.call_api(
"verify/pin",
data
)
)
self.log_response(r)
return r
def adapt(self, data):
data["to"] = utils.adapt_phone_number(data.get("to", ""))
return data
def _validate(self, data, **kwargs):
self.adapt(data)
errors = {}
if kwargs.get("mode", "") == "send_pin":
to = data.get("to", "")
if not to:
self.set_error_required(errors, "to")
elif not utils.is_phone_number_valid(to):
self.set_error(errors, "to")
template = data.get("template", None)
if template is not None and "<PIN>" not in str(template):
self.set_error(errors, "template")
max_tries = data.get("max_tries", None)
if max_tries is not None:
if not str(data["max_tries"]).isdigit() or max_tries < 1:
self.set_error(errors, "max_tries")
retry_delay = data.get("retry_delay", None)
if retry_delay is not None and not str(retry_delay).isdigit():
self.set_error(errors, "retry_delay")
validity = data.get("validity", None)
if validity is not None:
is_positive_int = str(data["validity"]).isdigit()
if not is_positive_int or validity > 1800:
self.set_error(errors, "validity")
elif kwargs.get("mode", "") == "verify_pin":
pin = data.get("pin", "")
if not pin:
self.set_error_required(errors, "pin")
else:
if not str(pin).isdigit() or not int(pin):
self.set_error(errors, "pin")
verification_id = data.get("verification_id", None)
if not verification_id:
self.set_error_required(errors, "verification_id")
elif not isinstance(verification_id, str):
self.set_error(errors, "verification_id")
return (not len(errors), errors)
| apache-2.0 | 1,170,721,060,193,383,200 | 31.552147 | 81 | 0.579721 | false | 3.956749 | false | false | false |
indexofire/django-muser | muser/extensions/__init__.py | 1 | 3783 | # -*- coding: utf-8 -*-
import inspect
from django.utils import six
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from muser.utils import get_object
class ExtensionsMixin(object):
"""Extension mixin class to include all content type extension. """
@classmethod
def register_extensions(cls, *extensions):
"""
Register all extensions passed as arguments.
Extensions should be specified as a string to the python module
containing the extension.
"""
if not hasattr(cls, '_extensions'):
cls._extensions = []
cls._extensions_seen = []
for ext in extensions:
if ext in cls._extensions:
continue
extension = None
# check if ext is a Class
if inspect.isclass(ext) and issubclass(ext, Extension):
extension = ext
elif isinstance(ext, six.string_types):
try:
extension = get_object(ext)
except (AttributeError, ImportError, ValueError):
if not extension:
raise ImproperlyConfigured(
'%s is not a valid extension for %s' % (
ext, cls.__name__))
if hasattr(extension, 'Extension'):
extension = extension.Extension
elif hasattr(extension, 'register'):
extension = extension.register
elif hasattr(extension, '__call__'):
pass
else:
raise ImproperlyConfigured(
'%s is not a valid extension for %s' % (
ext, cls.__name__))
if extension in cls._extensions_seen:
continue
cls._extensions_seen.append(extension)
if hasattr(extension, 'handle_model'):
cls._extensions.append(extension(cls))
else:
raise ImproperlyConfigured(
'%r is an invalid extension.' % extension)
class Extension(object):
"""Handle a extension object for content type. """
def __init__(self, model, **kwargs):
self.model = model
for key, value in kwargs.items():
if not hasattr(self, key):
raise TypeError('%s() received an invalid keyword %r' % (
self.__class__.__name__, key))
setattr(self, key, value)
self.handle_model()
def handle_model(self):
raise NotImplementedError
def handle_modeladmin(self, modeladmin):
pass
class ExtensionModelAdmin(admin.ModelAdmin):
def __init__(self, *args, **kwargs):
super(ExtensionModelAdmin, self).__init__(*args, **kwargs)
self.initialize_extensions()
def initialize_extensions(self):
if not hasattr(self, '_extensions_initialized'):
self._extensions_initialized = True
for extension in getattr(self.model, '_extensions', []):
extension.handle_modeladmin(self)
def add_extension_options(self, *f):
if self.fieldsets is None:
return
if isinstance(f[-1], dict): # called with a fieldset
self.fieldsets.insert(self.fieldset_insertion_index, f)
f[1]['classes'] = list(f[1].get('classes', []))
f[1]['classes'].append('collapse')
elif f: # assume called with "other" fields
try:
self.fieldsets[1][1]['fields'].extend(f)
except IndexError:
# Fall back to first fieldset if second does not exist
# XXX This is really messy.
self.fieldsets[0][1]['fields'].extend(f)
| bsd-3-clause | 2,942,409,199,333,107,700 | 33.081081 | 73 | 0.550621 | false | 5.010596 | false | false | false |
alvinwan/md2py | md2py/md2py.py | 1 | 5548 | from markdown import markdownFromFile, markdown
from bs4 import BeautifulSoup
class TreeOfContents:
"""Tree abstraction for markdown source"""
source_type = BeautifulSoup
valid_tags = ('a', 'abbr', 'address', 'area', 'article', 'aside', 'audio',
'b', 'base', 'bdi', 'bdo', 'blockquote', 'body', 'br', 'button',
'canvas', 'caption', 'cite', 'code', 'col', 'colgroup', 'data',
'datalist', 'dd', 'del', 'details', 'dfn', 'dialog', 'div', 'dl', 'dt',
'em', 'embed', 'fieldset', 'figcaption', 'figure', 'footer', 'form',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'header', 'hgroup', 'hr',
'html', 'i', 'iframe', 'img', 'input', 'ins', 'kbd', 'keygen', 'label',
'legend', 'li', 'link', 'main', 'map', 'mark', 'menu', 'menuitem',
'meta', 'meter', 'nav', 'noscript', 'object', 'ol', 'optgroup',
'option', 'output', 'p', 'param', 'picture', 'pre', 'progress', 'q',
'rp', 'rt', 'ruby', 's', 'samp', 'script', 'section', 'select', 'small',
'source', 'span', 'strong', 'style', 'sub', 'summary', 'sup', 'table',
'tbody', 'td', 'template', 'textarea', 'tfoot', 'th', 'thead', 'time',
'title', 'tr', 'track', 'u', 'ul', 'var', 'video', 'wbr')
allowed_attrs = ('string', 'name')
def __init__(self, root, branches=(), descendants=(), source=None,
depth=None):
"""
Construct TreeOfContents object using source
:param SourceType source: parsed source
:param list TreeOfContents branches: list of direct children
:param list SourceType descendants: all descendants
"""
assert source is not None, 'NoneType source passed into TreeOfContents'
self.source = source
self.depth = depth or self.parseTopDepth()
self.descendants = descendants or self.expandDescendants(branches)
self.branches = branches or self.parseBranches(descendants)
@staticmethod
def getHeadingLevel(bs):
"""
>>> bsify = lambda html: BeautifulSoup(html, 'html.parser')
>>> bs = bsify('<h1>Hello</h1>').h1
>>> TOC.getHeadingLevel(bs)
1
>>> bs2 = bsify('<p>Hello</p>').p
>>> TOC.getHeadingLevel(bs2)
>>> bs3 = bsify('<article>Hello</article>').article
>>> TOC.getHeadingLevel(bs3)
"""
try:
return int(bs.name[1])
except (ValueError, IndexError, TypeError):
return None
def parseTopDepth(self):
"""
Parse highest heading in markdown
>>> TOC.fromHTML('<h2>haha</h2><h1>hoho</h1>').parseTopDepth()
1
>>> TOC.fromHTML('<h3>haha</h3><h2>hoho</h2>').parseTopDepth()
2
"""
for i in range(1, 7):
if getattr(self.source, 'h%d' % i):
return i
def expandDescendants(self, branches):
"""
Expand descendants from list of branches
:param list branches: list of immediate children as TreeOfContents objs
:return: list of all descendants
"""
return sum([b.descendants() for b in branches], []) + \
[b.source for b in branches]
def parseBranches(self, descendants):
"""
Parse top level of markdown
:param list elements: list of source objects
:return: list of filtered TreeOfContents objects
"""
parsed, parent, cond = [], False, lambda b: (b.string or '').strip()
for branch in filter(cond, descendants):
if self.getHeadingLevel(branch) == self.depth:
parsed.append({'root':branch.string, 'source':branch})
parent = True
elif not parent:
parsed.append({'root':branch.string, 'source':branch})
else:
parsed[-1].setdefault('descendants', []).append(branch)
return [TOC(depth=self.depth+1, **kwargs) for kwargs in parsed]
def __getattr__(self, attr, *default):
"""Check source for attributes"""
tag = attr[:-1]
if attr in self.allowed_attrs:
return getattr(self.source, attr, *default)
if attr in self.valid_tags:
return next(filter(lambda t: t.name == attr, self.branches), None)
if len(default):
return default[0]
if attr[-1] == 's' and tag in self.valid_tags:
condition = lambda t: t.name == tag
return filter(condition, self.branches)
raise AttributeError("'TreeOfContents' object has no attribute '%s'" % attr)
def __repr__(self):
"""Display contents"""
return str(self)
def __str__(self):
"""Display contents"""
return self.string or ''
def __iter__(self):
"""Iterator over children"""
return iter(self.branches)
def __getitem__(self, i):
return self.branches[i]
@staticmethod
def fromMarkdown(md, *args, **kwargs):
"""
Creates abstraction using path to file
:param str path: path to markdown file
:return: TreeOfContents object
"""
return TOC.fromHTML(markdown(md, *args, **kwargs))
@staticmethod
def fromHTML(html, *args, **kwargs):
"""
Creates abstraction using HTML
:param str html: HTML
:return: TreeOfContents object
"""
source = BeautifulSoup(html, 'html.parser', *args, **kwargs)
return TOC('[document]',
source=source,
descendants=source.children)
TOC = TreeOfContents
| apache-2.0 | 905,091,113,787,922,600 | 35.5 | 84 | 0.558219 | false | 3.882435 | false | false | false |
mcalmer/spacewalk | backend/server/handlers/config_mgmt/rhn_config_management.py | 10 | 19913 | #
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Config file handler (management tool)
#
import sys
import difflib
from spacewalk.common.rhnLog import log_debug
from spacewalk.common.usix import raise_with_tb, next
from spacewalk.common.rhnException import rhnFault
from spacewalk.server import rhnSQL, configFilesHandler
from spacewalk.common.fileutils import f_date, ostr_to_sym
class ConfigManagement(configFilesHandler.ConfigFilesHandler):
def __init__(self):
log_debug(3)
configFilesHandler.ConfigFilesHandler.__init__(self)
self.functions.update({
'management.get_file': 'management_get_file',
'management.list_config_channels': 'management_list_channels',
'management.create_config_channel': 'management_create_channel',
'management.remove_config_channel': 'management_remove_channel',
'management.list_file_revisions': 'management_list_file_revisions',
'management.list_files': 'management_list_files',
'management.has_file': 'management_has_file',
'management.put_file': 'management_put_file',
'management.remove_file': 'management_remove_file',
'management.diff': 'management_diff',
'management.get_default_delimiters': 'management_get_delimiters',
'management.get_maximum_file_size': 'management_get_maximum_file_size',
})
self.user = None
self.default_delimiter = '@'
_query_list_config_channels = rhnSQL.Statement("""
select cc.name,
cc.label,
cct.label channel_type
from rhnConfigChannelType cct,
rhnConfigChannel cc
where cc.org_id = :org_id
and cc.confchan_type_id = cct.id
and cct.label = 'normal'
order by cc.label, cc.name
""")
def _get_and_validate_session(self, dict):
session = dict.get('session')
self._validate_session(session)
def management_list_channels(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
return [x['label'] for x in rhnSQL.fetchall_dict(self._query_list_config_channels,
org_id=self.org_id) or []]
_query_lookup_config_channel = rhnSQL.Statement("""
select id
from rhnConfigChannel
where org_id = :org_id
and label = :config_channel
""")
def management_create_channel(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
config_channel_name = dict.get('config_channel_name') or config_channel
config_channel_description = dict.get('description') or config_channel
row = rhnSQL.fetchone_dict(self._query_lookup_config_channel,
org_id=self.org_id, config_channel=config_channel)
if row:
raise rhnFault(4010, "Configuration channel %s already exists" %
config_channel, explain=0)
insert_call = rhnSQL.Function('rhn_config.insert_channel',
rhnSQL.types.NUMBER())
config_channel_id = insert_call(self.org_id,
'normal',
config_channel_name,
config_channel,
config_channel_description)
rhnSQL.commit()
return {}
_query_config_channel_by_label = rhnSQL.Statement("""
select id
from rhnConfigChannel
where org_id = :org_id
and label = :label
""")
def management_remove_channel(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
row = rhnSQL.fetchone_dict(self._query_config_channel_by_label,
org_id=self.org_id, label=config_channel)
if not row:
raise rhnFault(4009, "Channel not found")
delete_call = rhnSQL.Procedure('rhn_config.delete_channel')
try:
delete_call(row['id'])
except rhnSQL.SQLError:
e = sys.exc_info()[1]
errno = e.args[0]
if errno == 2292:
raise_with_tb(rhnFault(4005, "Cannot remove non-empty channel %s" %
config_channel, explain=0), sys.exc_info()[2])
raise
log_debug(5, "Removed:", config_channel)
rhnSQL.commit()
return ""
_query_management_list_files = rhnSQL.Statement("""
select cc.label config_channel,
cfn.path
from rhnConfigFileName cfn,
rhnConfigFileState cfs,
rhnConfigFile cf,
rhnConfigChannel cc
where cc.org_id = :org_id
and cc.label = :config_channel
and cc.id = cf.config_channel_id
and cf.state_id = cfs.id
and cfs.label = 'alive'
and cf.config_file_name_id = cfn.id
""")
def management_list_files(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the config channel
log_debug(3, "Org id", self.org_id, "Config channel", config_channel)
h = rhnSQL.prepare(self._query_management_list_files)
h.execute(org_id=self.org_id, config_channel=config_channel)
retval = []
while 1:
row = h.fetchone_dict()
if not row:
break
val = {}
# Only copy a subset of the keys
for f in ['config_channel', 'path']:
val[f] = row[f]
retval.append(val)
log_debug(4, "pre sort", retval)
retval.sort(lambda x, y: cmp(x['path'], y['path']))
log_debug(4, "Return value", retval)
return retval
def management_get_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
revision = dict.get('revision')
row = self._get_file(config_channel, path, revision=revision)
if not row:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
return self._format_file_results(row)
_query_list_file_revisions = rhnSQL.Statement("""
select cr.revision
from rhnConfigChannel cc,
rhnConfigRevision cr,
rhnConfigFile cf
where cf.config_channel_id = cc.id
and cc.label = :config_channel
and cc.org_id = :org_id
and cf.config_file_name_id = lookup_config_filename(:path)
and cr.config_file_id = cf.id
order by revision desc
""")
def management_list_file_revisions(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
retval = [x['revision'] for x in rhnSQL.fetchall_dict(self._query_list_file_revisions,
org_id=self.org_id, config_channel=config_channel, path=path) or []]
if not retval:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
return retval
def management_has_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
row = self._get_file(config_channel, path)
if not row:
return {}
return {
'revision': row['revision'],
}
_query_get_file = """
select :path path,
cc.label config_channel,
ccont.contents file_contents,
ccont.is_binary,
c.checksum_type,
c.checksum,
ccont.delim_start, ccont.delim_end,
cr.revision,
cf.modified,
ci.username,
ci.groupname,
ci.filemode,
cft.label,
ci.selinux_ctx,
case
when cft.label='symlink' then (select path from rhnConfigFileName where id = ci.SYMLINK_TARGET_FILENAME_ID)
else ''
end as symlink
from rhnConfigChannel cc,
rhnConfigInfo ci,
rhnConfigRevision cr
left join rhnConfigContent ccont
on cr.config_content_id = ccont.id
left join rhnChecksumView c
on ccont.checksum_id = c.id,
rhnConfigFile cf,
rhnConfigFileType cft
where cf.config_channel_id = cc.id
and cc.label = :config_channel
and cc.org_id = :org_id
and cf.config_file_name_id = lookup_config_filename(:path)
and cr.config_file_id = cf.id
and cr.config_info_id = ci.id
and cr.config_file_type_id = cft.id
"""
_query_get_file_latest = rhnSQL.Statement(_query_get_file + """
and cf.latest_config_revision_id = cr.id
""")
_query_get_file_revision = rhnSQL.Statement(_query_get_file + """
and cr.revision = :revision
""")
def _get_file(self, config_channel, path, revision=None):
log_debug(2, config_channel, path)
params = {
'org_id': self.org_id,
'config_channel': config_channel,
'path': path,
}
if revision is None:
# Fetch the latest
q = self._query_get_file_latest
else:
params['revision'] = revision
q = self._query_get_file_revision
log_debug(4, params)
return rhnSQL.fetchone_dict(q, **params)
_query_lookup_config_file_by_channel = rhnSQL.Statement("""
select cf.id,
cf.state_id
from rhnConfigFile cf,
rhnConfigChannel cc
where cc.org_id = :org_id
and cf.config_channel_id = cc.id
and cc.label = :config_channel
and cf.config_file_name_id = lookup_config_filename(:path)
""")
def management_remove_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel,
org_id=self.org_id, config_channel=config_channel, path=path)
if not row:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
config_file_id = row['id']
delete_call = rhnSQL.Procedure("rhn_config.delete_file")
delete_call(config_file_id)
rhnSQL.commit()
return {}
_query_update_file_state = rhnSQL.Statement("""
update rhnConfigFile
set state_id = :state_id
where id = :config_file_id
""")
def management_disable_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
# XXX Validate the namespace
path = dict.get('path')
t = rhnSQL.Table('rhnConfigFileState', 'label')
state_id_dead = t['dead']['id']
row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel,
config_channel=config_channel, path=path)
if not row or row['state_id'] == state_id_dead:
raise rhnFault(4011, "File %s does not exist in channel %s" %
(path, config_channel), explain=0)
rhnSQL.execute(self._query_update_file_state,
config_file_id=row['id'], state_id=state_id_dead)
rhnSQL.commit()
return {}
def management_put_file(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
config_channel = dict.get('config_channel')
row = self.lookup_org_config_channel_by_name(config_channel)
conf_channel_id = row['id']
file_path = dict.get('path')
result = self.push_file(conf_channel_id, dict)
file_too_large = result.get('file_too_large')
if file_too_large:
raise rhnFault(4003, "File %s is too large (%s bytes)" %
(dict['path'], dict['size']), explain=0)
rhnSQL.commit()
return {}
def management_get_delimiters(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
return self._get_delimiters()
def management_get_maximum_file_size(self, dict={}):
log_debug(1)
self._get_and_validate_session(dict)
return self._get_maximum_file_size()
def __attributes_differ(self, fsrc, fdst):
""" Returns true if acl, ownership, type or selinux context differ. """
return (fsrc['filemode'] != fdst['filemode']) or (fsrc['label'] != fdst['label']) or \
(fsrc['username'] != fdst['username']) or (fsrc['groupname'] != fdst['groupname']) or \
(fsrc['selinux_ctx'] != fdst['selinux_ctx'])
def __header(self, path, fsrc, config_channel_src, fdst, config_channel_dst):
""" Returns diff like header for this two files. """
template = "--- %s\t%s\tattributes: %s %s %s %s\tconfig channel: %s\trevision: %s"
first_row = template % (path, f_date(fsrc['modified']), ostr_to_sym(fsrc['filemode'], fsrc['label']),
fsrc['username'], fsrc['groupname'], fsrc['selinux_ctx'], config_channel_src,
fsrc['revision'],
)
second_row = template % (path, f_date(fdst['modified']), ostr_to_sym(fdst['filemode'], fdst['label']),
fdst['username'], fdst['groupname'], fdst['selinux_ctx'], config_channel_dst,
fdst['revision'],
)
return (first_row, second_row)
def management_diff(self, dict):
log_debug(1)
self._get_and_validate_session(dict)
param_names = ['config_channel_src', 'revision_src', 'path', ]
for p in param_names:
val = dict.get(p)
if val is None:
raise rhnFault(4007, "No content sent for `%s'" % p)
log_debug(4, "Params sent", dict)
path = dict['path']
config_channel_src = dict['config_channel_src']
revision_src = dict.get('revision_src')
fsrc = self._get_file_revision(config_channel_src, revision_src, path)
config_channel_dst = dict.get('config_channel_dst')
if config_channel_dst is None:
config_channel_dst = config_channel_src
revision_dst = dict.get('revision_dst')
fdst = self._get_file_revision(config_channel_dst, revision_dst, path)
if fsrc['label'] != fdst['label']:
raise rhnFault(4017,
"Path %s is a %s in channel %s while it is a %s in channel %s"
% (path, fsrc['label'],
config_channel_src, fdst['label'], config_channel_dst),
explain=0)
if fsrc['label'] == 'symlink':
if (fsrc["symlink"] != fdst['symlink']) or self.__attributes_differ(fsrc, fdst):
(first_row, second_row) = self.__header(path, fsrc, config_channel_src, fdst, config_channel_dst)
first_row += ' target: %s' % fsrc["symlink"]
second_row += ' target: %s' % fdst["symlink"]
return first_row + "\n" + second_row + "\n"
return ""
diff = difflib.unified_diff(
fsrc['file_content'], fdst['file_content'], path, path, fsrc['modified'], fdst['modified'], lineterm='')
try:
first_row = next(diff)
except StopIteration:
return ""
if not first_row.startswith('---'):
# Hmm, weird
return first_row + '\n'.join(list(diff))
try:
second_row = next(diff)
except StopIteration:
second_row = ''
if not second_row.startswith('+++'):
# Hmm, weird
return second_row + '\n'.join(list(diff))
(first_row, second_row) = self.__header(path, fsrc, config_channel_src, fdst, config_channel_dst)
return first_row + "\n" + second_row + '\n' + '\n'.join(list(diff))
def _get_file_revision(self, config_channel, revision, path):
if revision and not revision.isdigit():
raise rhnFault(4016, "Invalid revision number '%s' specified for path %s "
"in channel %s" % (revision, path, config_channel),
explain=0)
f = self._get_file(config_channel, path, revision=revision)
if not f:
raise rhnFault(4011, "File %s (revision %s) does not exist "
"in channel %s" % (path, revision, config_channel),
explain=0)
if f['label'] == 'file' and f['is_binary'] == 'Y':
raise rhnFault(4004, "File %s (revision %s) seems to contain "
"binary data" % (path, revision),
explain=0)
# We have to read the contents of the first file here, because the LOB
# object is tied to a cursor; if we re-execute the cursor, the LOB
# seems to be invalid (bug 151220)
# Empty files or directories may have NULL instead of lobs
fc_lob = f.get('file_contents')
if fc_lob:
f['file_content'] = rhnSQL.read_lob(fc_lob).splitlines()
else:
f['file_content'] = ''
return f
# Helper functions
_query_org_config_channels = rhnSQL.Statement("""
select cc.id, cc.label, cc.name, cct.label channel_type
from rhnConfigChannelType cct, rhnConfigChannel cc
where cc.label = :config_channel
and cc.org_id = :org_id
and cc.confchan_type_id = cct.id
""")
def lookup_org_config_channel_by_name(self, config_channel):
row = rhnSQL.fetchone_dict(self._query_org_config_channels,
config_channel=config_channel, org_id=self.org_id)
if not row:
raise rhnFault(4009, "Configuration channel %s does not exist" %
config_channel, explain=0)
return row
def _check_user_role(self):
user_roles = self.user.get_roles()
if 'config_admin' in user_roles or 'org_admin' in user_roles:
# All good
return
raise rhnFault(4006,
"User is not a allowed to manage config files")
| gpl-2.0 | 4,206,492,181,144,334,000 | 36.714015 | 123 | 0.555868 | false | 3.926065 | true | false | false |
afeno/pymysensors | mysensors/const_14.py | 1 | 6398 | """MySensors constants for version 1.4 of MySensors."""
from enum import IntEnum
class MessageType(IntEnum):
"""MySensors message types."""
# pylint: disable=too-few-public-methods
presentation = 0 # sent by a node when presenting attached sensors
set = 1 # sent from/to sensor when value should be updated
req = 2 # requests a variable value
internal = 3 # internal message
stream = 4 # OTA firmware updates
class Presentation(IntEnum):
"""MySensors presentation sub-types."""
# pylint: disable=too-few-public-methods
S_DOOR = 0 # Door and window sensors
S_MOTION = 1 # Motion sensors
S_SMOKE = 2 # Smoke sensor
S_LIGHT = 3 # Light Actuator (on/off)
S_DIMMER = 4 # Dimmable device of some kind
S_COVER = 5 # Window covers or shades
S_TEMP = 6 # Temperature sensor
S_HUM = 7 # Humidity sensor
S_BARO = 8 # Barometer sensor (Pressure)
S_WIND = 9 # Wind sensor
S_RAIN = 10 # Rain sensor
S_UV = 11 # UV sensor
S_WEIGHT = 12 # Weight sensor for scales etc.
S_POWER = 13 # Power measuring device, like power meters
S_HEATER = 14 # Heater device
S_DISTANCE = 15 # Distance sensor
S_LIGHT_LEVEL = 16 # Light sensor
S_ARDUINO_NODE = 17 # Arduino node device
S_ARDUINO_RELAY = 18 # Arduino repeating node device
S_LOCK = 19 # Lock device
S_IR = 20 # Ir sender/receiver device
S_WATER = 21 # Water meter
S_AIR_QUALITY = 22 # Air quality sensor e.g. MQ-2
S_CUSTOM = 23 # Use this for custom sensors
S_DUST = 24 # Dust level sensor
S_SCENE_CONTROLLER = 25 # Scene controller device
class SetReq(IntEnum):
"""MySensors set/req sub-types."""
# pylint: disable=too-few-public-methods
V_TEMP = 0 # Temperature
V_HUM = 1 # Humidity
V_LIGHT = 2 # Light status. 0=off 1=on
V_DIMMER = 3 # Dimmer value. 0-100%
V_PRESSURE = 4 # Atmospheric Pressure
# Weather forecast. One of "stable", "sunny", "cloudy", "unstable",
# "thunderstorm" or "unknown"
V_FORECAST = 5
V_RAIN = 6 # Amount of rain
V_RAINRATE = 7 # Rate of rain
V_WIND = 8 # Windspeed
V_GUST = 9 # Gust
V_DIRECTION = 10 # Wind direction
V_UV = 11 # UV light level
V_WEIGHT = 12 # Weight (for scales etc)
V_DISTANCE = 13 # Distance
V_IMPEDANCE = 14 # Impedance value
# Armed status of a security sensor. 1=Armed, 0=Bypassed
V_ARMED = 15
# Tripped status of a security sensor. 1=Tripped, 0=Untripped
V_TRIPPED = 16
V_WATT = 17 # Watt value for power meters
V_KWH = 18 # Accumulated number of KWH for a power meter
V_SCENE_ON = 19 # Turn on a scene
V_SCENE_OFF = 20 # Turn off a scene
# Mode of heater. One of "Off", "HeatOn", "CoolOn", or "AutoChangeOver"
V_HEATER = 21
V_HEATER_SW = 22 # Heater switch power. 1=On, 0=Off
V_LIGHT_LEVEL = 23 # Light level. 0-100%
V_VAR1 = 24 # Custom value
V_VAR2 = 25 # Custom value
V_VAR3 = 26 # Custom value
V_VAR4 = 27 # Custom value
V_VAR5 = 28 # Custom value
V_UP = 29 # Window covering. Up.
V_DOWN = 30 # Window covering. Down.
V_STOP = 31 # Window covering. Stop.
V_IR_SEND = 32 # Send out an IR-command
V_IR_RECEIVE = 33 # This message contains a received IR-command
V_FLOW = 34 # Flow of water (in meter)
V_VOLUME = 35 # Water volume
V_LOCK_STATUS = 36 # Set or get lock status. 1=Locked, 0=Unlocked
V_DUST_LEVEL = 37 # Dust level
V_VOLTAGE = 38 # Voltage level
V_CURRENT = 39 # Current level
class Internal(IntEnum):
"""MySensors internal sub-types."""
# pylint: disable=too-few-public-methods
# Use this to report the battery level (in percent 0-100).
I_BATTERY_LEVEL = 0
# Sensors can request the current time from the Controller using this
# message. The time will be reported as the seconds since 1970
I_TIME = 1
# Sensors report their library version at startup using this message type
I_VERSION = 2
# Use this to request a unique node id from the controller.
I_ID_REQUEST = 3
# Id response back to sensor. Payload contains sensor id.
I_ID_RESPONSE = 4
# Start/stop inclusion mode of the Controller (1=start, 0=stop).
I_INCLUSION_MODE = 5
# Config request from node. Reply with (M)etric or (I)mperal back to sensor
I_CONFIG = 6
# When a sensor starts up, it broadcast a search request to all neighbor
# nodes. They reply with a I_FIND_PARENT_RESPONSE.
I_FIND_PARENT = 7
# Reply message type to I_FIND_PARENT request.
I_FIND_PARENT_RESPONSE = 8
# Sent by the gateway to the Controller to trace-log a message
I_LOG_MESSAGE = 9
# A message that can be used to transfer child sensors
# (from EEPROM routing table) of a repeating node.
I_CHILDREN = 10
# Optional sketch name that can be used to identify sensor in the
# Controller GUI
I_SKETCH_NAME = 11
# Optional sketch version that can be reported to keep track of the version
# of sensor in the Controller GUI.
I_SKETCH_VERSION = 12
# Used by OTA firmware updates. Request for node to reboot.
I_REBOOT = 13
# Send by gateway to controller when startup is complete
I_GATEWAY_READY = 14
class Stream(IntEnum):
"""MySensors stream sub-types."""
# Request new FW, payload contains current FW details
ST_FIRMWARE_CONFIG_REQUEST = 0
# New FW details to initiate OTA FW update
ST_FIRMWARE_CONFIG_RESPONSE = 1
ST_FIRMWARE_REQUEST = 2 # Request FW block
ST_FIRMWARE_RESPONSE = 3 # Response FW block
ST_SOUND = 4 # Sound
ST_IMAGE = 5 # Image
| mit | 1,872,258,917,628,189,000 | 41.653333 | 79 | 0.573617 | false | 3.645584 | false | false | false |
SmingHub/Sming | Sming/Arch/Esp8266/Tools/decode-stacktrace.py | 2 | 1381 | #!/usr/bin/env python
########################################################
#
# Stack Trace Decoder
# Author: Slavey Karadzhov <slav@attachix.com>
#
########################################################
import shlex
import subprocess
import sys
import re
def usage():
print("Usage: \n\t%s <file.elf> [<error-stack.log>]" % sys.argv[0])
def extractAddresses(data):
m = re.findall("(40[0-2](\d|[a-f]){5})", data)
if len(m) == 0:
return m
addresses = []
for item in m:
addresses.append(item[0])
return addresses
if __name__ == "__main__":
if len(sys.argv) not in list(range(2,4)):
usage()
sys.exit(1)
command = "xtensa-lx106-elf-addr2line -aipfC -e '%s' " % sys.argv[1]
pipe = subprocess.Popen(shlex.split(command), bufsize=1, stdin=subprocess.PIPE)
if len(sys.argv) > 2:
data = open(sys.argv[2]).read()
pipe.communicate("\n".join(extractAddresses(data)).encode('ascii'))
else:
while True:
data = sys.stdin.readline()
addresses = extractAddresses(data)
if len(addresses) == 0:
continue
# print ( "[",addresses,"]" )
line = "\r\n".join(addresses)+"\r\n"
# line = line.ljust(125," ")
pipe.stdin.write(line.encode('ascii'))
pipe.stdin.flush()
| lgpl-3.0 | 6,130,658,258,010,436,000 | 25.557692 | 83 | 0.503983 | false | 3.478589 | false | false | false |
bkerster/utilities | lnbin.py | 1 | 1964 | import numpy as np
#x must be a np array
def lnbin(x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | gpl-2.0 | -1,842,046,911,617,157,000 | 30.693548 | 82 | 0.55499 | false | 3.162641 | false | false | false |
transifex/openformats | openformats/tests/formats/yaml/test_utils.py | 1 | 4844 | import unittest
from collections import OrderedDict
import six
from mock import MagicMock
from openformats.formats.yaml.utils import YamlGenerator
from openformats.formats.yaml.yaml_representee_classes import (BlockList,
FlowList,
FlowStyleOrderedDict,
double_quoted_unicode,
single_quoted_unicode)
class YamlGeneratorTestCase(unittest.TestCase):
def test_insert_translation_in_dict_empty_parent(self):
keys = ["one", "two", "[0]"]
flags = "block:block:'".split(':')
translation_string = "test"
result = OrderedDict()
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'two', BlockList([
# single_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], OrderedDict)
self.assertIsInstance(result['one']['two'], BlockList)
self.assertIsInstance(result['one']['two'][0], single_quoted_unicode)
def test_insert_translation_in_dict_non_empty_parent(self):
result = OrderedDict()
result['one'] = OrderedDict()
result['one']['three'] = 'a string'
keys = ["one", "two", "[0]"]
flags = "block:block:'".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'three', 'a string'),
# (u'two', BlockList([
# single_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertListEqual(list(six.iterkeys(result['one'])),
['three', 'two'])
self.assertIsInstance(result['one']['two'], BlockList)
self.assertIsInstance(result['one']['two'][0], single_quoted_unicode)
def test_insert_translation_in_dict_flow_list(self):
result = OrderedDict()
keys = ["one", "two", "[0]"]
flags = "block:flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', OrderedDict([
# (u'two', FlowList([
# double_quoted_unicode(u'test')
# ]))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], OrderedDict)
self.assertIsInstance(result['one']['two'], FlowList)
self.assertIsInstance(result['one']['two'][0], double_quoted_unicode)
def test_insert_translation_in_dict_flow_dict(self):
result = OrderedDict()
keys = ["one", "two"]
flags = "flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', FlowStyleOrderedDict([
# (u'two', double_quoted_unicode(u'test'))
# ]))
# ])
self.assertListEqual(list(six.iterkeys(result)), ['one'])
self.assertIsInstance(result['one'], FlowStyleOrderedDict)
self.assertIsInstance(result['one']['two'], double_quoted_unicode)
def test_insert_translation_in_dict_list_of_dicts(self):
result = OrderedDict()
keys = ["one", "[0]", "two"]
flags = "block:flow:\"".split(':')
translation_string = "test"
YamlGenerator(MagicMock())._insert_translation_in_dict(
result, keys, flags, translation_string
)
# produced result
# OrderedDict([
# (u'one', BlockList([
# BlockStyledOrderedDict([
# (u'two', double_quoted_unicode(u'test'))
# ])
# ]))
# ])
self.assertListEqual(list(list(six.iterkeys(result))), ['one'])
self.assertIsInstance(result['one'], BlockList)
self.assertIsInstance(result['one'][0], FlowStyleOrderedDict)
self.assertIsInstance(result['one'][0]['two'], double_quoted_unicode)
| gpl-3.0 | -4,205,691,273,339,091,000 | 37.141732 | 85 | 0.528282 | false | 4.427788 | true | false | false |
dishbreak/bluesync | python/modules/types.py | 1 | 1380 | from collections import namedtuple
Device = namedtuple('Device', 'addr_str addr last_heard')
bluesync_uuid = [0x92, 0xb8, 0xb4, 0xf7, 0xd9,
0x96, 0xb3, 0xaf, 0x24, 0x48,
0x03, 0x20, 0x35, 0x74, 0x67,
0x86]
timestamp_uuid = [0x6e, 0x16, 0x71, 0x8f, 0xb9,
0xde, 0x2b, 0x92, 0xa0, 0x4b,
0x9d, 0x92, 0xaa, 0x49, 0xd4,
0x63]
trigger_scanning_uuid = [0x04, 0x48, 0x2f, 0x9a, 0x2e,
0x35, 0xc7, 0xa8, 0xcd, 0x4c,
0x4b, 0x90, 0x9a, 0xcb, 0xec,
0xe8]
reference_time_uuid = [0x1d, 0x4f, 0xc4, 0xeb, 0xf5,
0x2c, 0x94, 0xb9, 0xfc, 0x42,
0xca, 0x9e, 0x4a, 0x3b, 0xd9,
0x33]
bluesync_slave_adv_data = [
0x02, 0x01, 0x06, 0x02, 0x0A, 0x03,
0x06, 0xFF, 0xFF, 0xFF, 0xBE, 0xEF, 0xFE
]
bluesync_master_adv_data_prefix = [
0x02, 0x01, 0x06,
0x0A, 0xFF, 0xFF, 0xFF, 0xBE, 0xEF, 0xEF
]
sequence_number_uuid = [
0x4e, 0x14, 0x6c, 0xa0, 0x98,
0xa2, 0xd7, 0x83, 0x81, 0x4f,
0xc1, 0x48, 0xd2, 0x10, 0x9c,
0xaa
]
standard_flag_uuid = [
0x0b, 0xd1, 0x67, 0xa6, 0xfb,
0xbb, 0xe5, 0x9a, 0x64, 0x46,
0x0d, 0x3d, 0xf2, 0x73, 0xc9,
0xe7
]
def array_to_integer(array):
bytes = len(array)
if bytes > 4:
bytes = 4
return sum([array[i] << (8*i) for i in range(0,bytes)])
def integer_to_array(integer):
return [int((integer & (0xFF << 8*i)) >> 8*i) for i in range (0,4)] | bsd-2-clause | -5,105,000,262,508,354,000 | 23.660714 | 71 | 0.605072 | false | 1.963016 | false | false | false |
hiteshgarg14/openstates | scrapers/tx/events.py | 2 | 4807 | from utils import LXMLMixin
import re
import datetime as dt
from openstates.scrape import Scraper, Event
import pytz
class TXEventScraper(Scraper, LXMLMixin):
_tz = pytz.timezone("US/Central")
def scrape(self, session=None, chamber=None):
if not session:
session = self.latest_session()
self.info("No session specified; using %s", session)
if chamber:
yield from self.scrape_committee_upcoming(session, chamber)
else:
yield from self.scrape_committee_upcoming(session, "upper")
yield from self.scrape_committee_upcoming(session, "lower")
def scrape_event_page(self, session, chamber, url, datetime):
page = self.lxmlize(url)
info = page.xpath("//p")
metainfo = {}
plaintext = ""
for p in info:
content = re.sub(r"\s+", " ", p.text_content())
plaintext += content + "\n"
if ":" in content:
key, val = content.split(":", 1)
metainfo[key.strip()] = val.strip()
committee = metainfo["COMMITTEE"]
where = metainfo["PLACE"]
if "CHAIR" in where:
where, chair = where.split("CHAIR:")
metainfo["PLACE"] = where.strip()
metainfo["CHAIR"] = chair.strip()
chair = None
if "CHAIR" in metainfo:
chair = metainfo["CHAIR"]
plaintext = re.sub(r"\s+", " ", plaintext).strip()
regexp = r"(S|J|H)(B|M|R) (\d+)"
bills = re.findall(regexp, plaintext)
event = Event(
name=committee, start_date=self._tz.localize(datetime), location_name=where
)
event.add_source(url)
event.add_participant(committee, type="committee", note="host")
if chair is not None:
event.add_participant(chair, type="legislator", note="chair")
for bill in bills:
chamber, type, number = bill
bill_id = "%s%s %s" % (chamber, type, number)
item = event.add_agenda_item("Bill up for discussion")
item.add_bill(bill_id)
event.add_agenda_item(plaintext)
yield event
def scrape_page(self, session, chamber, url):
page = self.lxmlize(url)
events = page.xpath("//a[contains(@href, 'schedules/html')]")
for event in events:
peers = event.getparent().getparent().xpath("./*")
date = peers[0].text_content()
time = peers[1].text_content()
tad = "%s %s" % (date, time)
tad = re.sub(r"(PM|AM).*", r"\1", tad)
tad_fmt = "%m/%d/%Y %I:%M %p"
if "AM" not in tad and "PM" not in tad:
tad_fmt = "%m/%d/%Y"
tad = date
# Time expressed as 9:00 AM, Thursday, May 17, 2012
datetime = dt.datetime.strptime(tad, tad_fmt)
yield from self.scrape_event_page(
session, chamber, event.attrib["href"], datetime
)
def scrape_upcoming_page(self, session, chamber, url):
page = self.lxmlize(url)
date = None
time = None
for row in page.xpath(".//tr"):
title = row.xpath(".//div[@class='sectionTitle']")
if len(title) > 0:
date = title[0].text_content()
time_elem = row.xpath(".//td/strong")
if time_elem:
time = time_elem[0].text_content()
events = row.xpath(".//a[contains(@href, 'schedules/html')]")
for event in events:
# Ignore text after the datetime proper (ie, after "AM" or "PM")
datetime = "{} {}".format(date, time)
datetime = re.search(r"(?i)(.+?[ap]m).+", datetime)
if not datetime:
self.warning("invalid datetime: %s %s", date, time)
continue
datetime = datetime.group(1)
datetime = dt.datetime.strptime(datetime, "%A, %B %d, %Y %I:%M %p")
yield from self.scrape_event_page(
session, chamber, event.attrib["href"], datetime
)
def scrape_committee_upcoming(self, session, chamber):
chid = {"upper": "S", "lower": "H", "other": "J"}[chamber]
url = (
"https://capitol.texas.gov/Committees/Committees.aspx" + "?Chamber=" + chid
)
page = self.lxmlize(url)
refs = page.xpath("//div[@id='content']//a")
for ref in refs:
yield from self.scrape_page(session, chamber, ref.attrib["href"])
url = (
"http://capitol.texas.gov/Committees/MeetingsUpcoming.aspx"
+ "?Chamber="
+ chid
)
yield from self.scrape_upcoming_page(session, chamber, url)
| gpl-3.0 | 7,876,155,375,849,433,000 | 34.345588 | 87 | 0.52798 | false | 3.598054 | false | false | false |
messense/teambition-api | teambition/api/works.py | 3 | 10083 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from optionaldict import optionaldict
from teambition.api.base import TeambitionAPI
class Works(TeambitionAPI):
def get(self, id=None, parent_id=None, page=None, count=None, all=None):
"""
获取文件信息
详情请参考
http://docs.teambition.com/wiki/works#works-get
:param id: 可选,文件 ID
:param parent_id: 可选,父级 ID
:param page: 可选,当前页,默认为 1
:param count: 可选,每页数量,默认为 30
:param all: 可选,若提供此参数则返回所有
:return: 返回的 JSON 数据包
"""
assert id or parent_id
params = optionaldict(
page=page,
count=count,
all=all
)
if id:
endpoint = 'api/works/{0}'.format(id)
elif parent_id:
endpoint = 'api/works'
params['_parentId'] = parent_id
return self._get(endpoint, params=params)
def create(self, parent_id, file_name, file_size, file_type, file_category,
file_key, image_width=None, image_height=None,
involve_members=None):
"""
新建文件
详情请参考
http://docs.teambition.com/wiki/works#works-create
:param parent_id: 所属目录 ID
:param file_name: 文件名
:param file_size: 文件大小
:param file_type: 文件类型
:param file_category: 文件类别
:param file_key: 使用 striker 服务上传后可得
:param image_width: 可选,图片宽度
:param image_height: 可选,图片高度
:param involve_members: 可选
:return: 返回的 JSON 数据包
"""
data = optionaldict(
_parentId=parent_id,
fileName=file_name,
fileSize=file_size,
fileType=file_type,
fileCategory=file_category,
fileKey=file_key,
imageWidth=image_width,
imageHeight=image_height,
involveMembers=involve_members
)
return self._post(
'api/works',
data=data
)
def like(self, id):
"""
赞文件
详情请参考
http://docs.teambition.com/wiki/works#works-like
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._post('api/works/{0}/like'.format(id))
def update(self, id, file_name, description=None):
"""
更新文件
详情请参考
http://docs.teambition.com/wiki/works#works-update
:param id: 文件 ID
:param file_name: 文件名
:param description: 可选,描述
:return: 返回的 JSON 数据包
"""
data = optionaldict(
fileName=file_name,
description=description
)
return self._put(
'api/works/{0}'.format(id),
data=data
)
def move(self, id, parent_id):
"""
移动文件
详情请参考
http://docs.teambition.com/wiki/works#works-move
:param id: 文件 ID
:param parent_id: 新的目录 ID
:return: 返回的 JSON 数据包
"""
return self._put(
'api/works/{0}'.format(id),
data={
'_parentId': parent_id
}
)
def delete(self, id):
"""
删除文件
详情请参考
http://docs.teambition.com/wiki/works#works-delete
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._delete('api/works/{0}'.format(id))
def update_members(self, id, members):
"""
更新文件参与者
详情请参考
http://docs.teambition.com/wiki/works#works-update-involvemembers
:param id: 文件 ID
:param members: 参与者 ID 列表
:return: 返回的 JSON 数据包
"""
return self._put(
'api/works/{0}/involveMembers'.format(id),
data={
'involveMembers': members
}
)
def get_tags(self, id):
"""
获取任务标签列表
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get('api/works/{0}/tags'.format(id))
def remove_tag(self, id, tag_id):
"""
移除标签
:param id: 文件 ID
:param name: 标签 ID
:return: 返回的 JSON 数据包
"""
return self._delete('api/works/{0}/tags/{1}'.format(id, tag_id))
def add_tag(self, id, tag_id):
"""
关联标签
:param id: 文件 ID
:param tag_id: 标签 ID
:return: 返回的 JSON 数据包
"""
return self._put('api/works/{0}/tags/{1}'.format(id, tag_id))
def get_objectlinks(self, id):
"""
获取文件关联的 objectlink 列表
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get('api/works/{0}/objectlinks'.format(id))
def create_objectlink(self, id, linked_id, linked_type):
"""
关联对象
:param id: 文件 ID
:param linked_id: 关联对象 ID
:param linked_type: 关联对象类型
:return: 返回的 JSON 数据包
"""
return self._post(
'api/objectlinks',
data={
'_parentId': id,
'parentType': 'work',
'_linkedId': linked_id,
'linkedType': linked_type
}
)
def get_versions(self, id):
"""
获取文件关联的历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-list
:param id: 文件 ID
:return: 历史版本列表
"""
return self._get('api/works/{0}/versions'.format(id))
def get_version(self, id, version_id):
"""
获取单个历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-get
:param id: 文件 ID
:param version_id: 历史版本 ID
:return: 历史版本信息
"""
return self._get('api/works/{0}/versions/{1}'.format(id, version_id))
def update_version(self, id, version_id, file_name=None, description=None):
"""
获取单个历史版本信息
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-update
:param id: 文件 ID
:param version_id: 历史版本 ID
:param file_name: 可选,文件名
:param description: 可选,描述
:return: 返回的 JSON 数据包
"""
data = optionaldict(fileName=file_name, description=description)
return self._put(
'api/works/{0}/versions/{1}'.format(id, version_id),
data=data
)
def delete_version(self, id, version_id):
"""
删除单个历史版本
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-delete
:param id: 文件 ID
:param version_id: 历史版本 ID
:return: 返回的 JSON 数据包
"""
return self._delete(
'api/works/{0}/versions/{1}'.format(id, version_id)
)
def create_version(self, id, file_name, file_size, file_type,
file_category, file_key, image_width=None,
image_height=None, involve_members=None):
"""
新建文件
详情请参考
http://docs.teambition.com/wiki/works-versions#works-versions-post
:param id: 文件 ID
:param file_name: 文件名
:param file_size: 文件大小
:param file_type: 文件类型
:param file_category: 文件类别
:param file_key: 使用 striker 服务上传后可得
:param image_width: 可选,图片宽度
:param image_height: 可选,图片高度
:param involve_members: 可选
:return: 返回的 JSON 数据包
"""
data = optionaldict(
fileName=file_name,
fileSize=file_size,
fileType=file_type,
fileCategory=file_category,
fileKey=file_key,
imageWidth=image_width,
imageHeight=image_height,
involveMembers=involve_members
)
return self._post(
'api/works/{0}/versions'.format(id),
data=data
)
def link_task(self, id, linked_id):
"""
关联任务
:param id: 任务 ID
:param linked_id: 关联任务 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'task')
def link_post(self, id, linked_id):
"""
关联分享
:param id: 任务 ID
:param linked_id: 关联分享 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'post')
def link_event(self, id, linked_id):
"""
关联日程
:param id: 任务 ID
:param linked_id: 关联日程 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'event')
def link_work(self, id, linked_id):
"""
关联文件
:param id: 任务 ID
:param linked_id: 关联文件 ID
:return: 返回的 JSON 数据包
"""
return self.create_objectlink(id, linked_id, 'work')
def get_activities(self, id):
"""
获取文件动态
:param id: 文件 ID
:return: 返回的 JSON 数据包
"""
return self._get(
'api/activities',
params={'_boundToObjectId': id}
)
| mit | 208,490,466,223,318,500 | 24.182584 | 79 | 0.51411 | false | 2.935494 | false | false | false |
ThomasZh/legend-club-wxpub | foo/wx/wx_order.py | 1 | 3884 | #!/usr/bin/env python
# _*_ coding: utf-8_*_
#
# Copyright 2016 planc2c.com
# dev@tripc2c.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tornado.web
import logging
import uuid
import time
import re
import json as JSON # 启用别名,不会跟方法里的局部变量混淆
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../dao"))
from tornado.escape import json_encode, json_decode
from tornado.httpclient import HTTPClient
from tornado.httputil import url_concat
from bson import json_util
from comm import *
from dao import budge_num_dao
from dao import category_dao
from dao import activity_dao
from dao import group_qrcode_dao
from dao import cret_template_dao
from dao import bonus_template_dao
from dao import bonus_dao
from dao import apply_dao
from dao import order_dao
from dao import group_qrcode_dao
from dao import vendor_member_dao
from dao import voucher_dao
from dao import insurance_template_dao
from dao import contact_dao
from dao import vendor_hha_dao
from dao import voucher_pay_dao
from dao import vendor_wx_dao
from dao import voucher_order_dao
from dao import trip_router_dao
from dao import triprouter_share_dao
from dao import club_dao
from dao import activity_share_dao
from foo.wx import wx_wrap
from xml_parser import parseWxOrderReturn, parseWxPayReturn
from global_const import *
# 俱乐部操作员查看订单详情
class WxVendorOrderInfoHandler(BaseHandler):
def get(self, club_id, order_id):
logging.info("GET %r", self.request.uri)
access_token = DEFAULT_USER_ID
order = self.get_order_index(order_id)
order['create_time'] = timestamp_datetime(order['create_time'])
order['amount'] = float(order['amount']) / 100
order['actual_payment'] = float(order['actual_payment']) / 100
if order['pay_status'] == 30:
order['pay_status'] = u"支付成功"
elif order['pay_status'] == 31:
order['pay_status'] = u"支付失败"
elif order['pay_status'] == 21:
order['pay_status'] = u"下单失败"
elif order['pay_status'] == 20:
order['pay_status'] = u"未支付"
if order['_status'] == 0:
order['_status'] = u"未填报"
if order['_status'] == 50:
order['_status'] = u"填报成功"
activity = self.get_activity(order['item_id'])
params = {"filter":"order", "order_id":order_id, "page":1, "limit":20}
url = url_concat(API_DOMAIN + "/api/applies", params)
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
rs = data['rs']
applies = rs['data']
for _apply in applies:
# 下单时间,timestamp -> %m月%d 星期%w
_apply['create_time'] = timestamp_datetime(float(_apply['create_time']))
if _apply['gender'] == 'male':
_apply['gender'] = u'男'
else:
_apply['gender'] = u'女'
self.render('order/order.html',
activity=activity,
applies=applies,
order=order)
| apache-2.0 | 223,390,345,666,522,800 | 32.274336 | 84 | 0.657447 | false | 3.360143 | false | false | false |
jhesketh/zookeepr | zk/model/product.py | 3 | 25319 | """The application's model objects"""
import sqlalchemy as sa
from meta import Base
from meta import Session
from ceiling import Ceiling
from product_category import ProductCategory
from product_ceiling_map import product_ceiling_map
def setup(meta):
category_ticket = ProductCategory.find_by_name('Ticket')
ceiling_conference = Ceiling.find_by_name('conference-paid')
ceiling_all_conference = Ceiling.find_by_name('conference-all')
ceiling_earlybird = Ceiling.find_by_name('conference-earlybird')
ceiling_nonearlybird = Ceiling.find_by_name('conference-non-earlybird')
# Tickets
ticket_student = Product(category=category_ticket, active=True, description="Student Ticket",
cost="12500", auth=None, validate=None)
ticket_student.ceilings.append(ceiling_conference)
ticket_student.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_student);
ticket_hobbyist_eb = Product(category=category_ticket, active=True, description="Earlybird Hobbyist Ticket",
cost="29900", auth=None, validate=None)
ticket_hobbyist_eb.ceilings.append(ceiling_conference)
ticket_hobbyist_eb.ceilings.append(ceiling_all_conference)
ticket_hobbyist_eb.ceilings.append(ceiling_earlybird)
meta.Session.add(ticket_hobbyist_eb);
ticket_hobbyist = Product(category=category_ticket, active=True, description="Hobbyist Ticket",
cost="37500", auth=None, validate=None)
ticket_hobbyist.ceilings.append(ceiling_conference)
ticket_hobbyist.ceilings.append(ceiling_all_conference)
ticket_hobbyist.ceilings.append(ceiling_nonearlybird)
meta.Session.add(ticket_hobbyist);
ticket_professional_eb = Product(category=category_ticket, active=True, description="Earlybird Professional Ticket",
cost="63500", auth=None, validate=None)
ticket_professional_eb.ceilings.append(ceiling_conference)
ticket_professional_eb.ceilings.append(ceiling_all_conference)
ticket_professional_eb.ceilings.append(ceiling_earlybird)
meta.Session.add(ticket_professional_eb);
ticket_professional = Product(category=category_ticket, active=True, description="Professional Ticket",
cost="79500", auth=None, validate=None)
ticket_professional.ceilings.append(ceiling_conference)
ticket_professional.ceilings.append(ceiling_all_conference)
ticket_professional.ceilings.append(ceiling_nonearlybird)
meta.Session.add(ticket_professional);
ticket_fairy_penguin = Product(category=category_ticket, active=True, description="Fairy Penguin Sponsor",
cost="150000", auth=None, validate=None)
ticket_fairy_penguin.ceilings.append(ceiling_conference)
ticket_fairy_penguin.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_fairy_penguin);
ticket_speaker = Product(category=category_ticket, active=True, description="Speaker Ticket",
cost="0", auth="self.is_speaker()", validate=None)
ticket_speaker.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_speaker);
ticket_miniconf = Product(category=category_ticket, active=True, description="Miniconf Organiser Ticket",
cost="0", auth="self.is_miniconf_org()", validate=None)
ticket_miniconf.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_miniconf);
ticket_volunteer_free = Product(category=category_ticket, active=True, description="Volunteer Ticket (Free)",
cost="0", auth="self.is_volunteer(product)", validate=None)
ticket_volunteer_free.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_volunteer_free);
ticket_volunteer_paid = Product(category=category_ticket, active=True, description="Volunteer Ticket (paid)",
cost="12500", auth="self.is_volunteer(product)", validate=None)
ticket_volunteer_paid.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_volunteer_paid);
ticket_press = Product(category=category_ticket, active=True, description="Press Ticket",
cost="0", auth="self.is_role('press')", validate=None)
ticket_press.ceilings.append(ceiling_all_conference)
meta.Session.add(ticket_press)
ticket_team = Product(category=category_ticket, active=True, description="Team Ticket",
cost="0", auth="self.is_role('team')", validate=None)
# Miniconfs
category_miniconf = ProductCategory.find_by_name('Miniconfs')
ceiling_miniconf_all = Ceiling.find_by_name('miniconf-all')
ceiling_miniconf_monday = Ceiling.find_by_name('miniconf-monday')
ceiling_miniconf_tuesday = Ceiling.find_by_name('miniconf-tuesday')
ceiling_rocketry = Ceiling.find_by_name('miniconf-rocketry')
product = Product(category=category_miniconf, active=True, description="Monday Southern Plumbers",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday Haecksen",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday Multimedia + Music",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday Arduino",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday Open Programming",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday The Business of Open Source",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Monday Freedom in the cloud",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Multicore and Parallel Computing",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Rocketry",
cost="20000", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
product.ceilings.append(ceiling_rocketry)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Systems Administration",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Open in the public sector ",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Mobile FOSS",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Data Storage",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Research and Student Innovation",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_tuesday)
meta.Session.add(product)
product = Product(category=category_miniconf, active=True, description="Tuesday Libre Graphics Day",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_miniconf_all)
product.ceilings.append(ceiling_miniconf_monday)
meta.Session.add(product)
# Shirts
category_shirt = ProductCategory.find_by_name('T-Shirt')
ceiling_shirt_all = Ceiling.find_by_name('shirt-all')
ceiling_shirt_men = Ceiling.find_by_name('shirt-men')
ceiling_shirt_women = Ceiling.find_by_name('shirt-women')
product = Product(category=category_shirt, active=True, description="Men's Small", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's Medium", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's Large", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's XL", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's 2XL", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's 3XL", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's 5XL", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Men's 7XL", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_men)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 6", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 8", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 10", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 12", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 14", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 16", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 18", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 20", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
product = Product(category=category_shirt, active=True, description="Women's Size 22", cost="2500", auth=None, validate=None)
product.ceilings.append(ceiling_shirt_all)
product.ceilings.append(ceiling_shirt_women)
meta.Session.add(product)
# Penguin Dinner
category_penguin = ProductCategory.find_by_name('Penguin Dinner Ticket')
ceiling_penguin_all = Ceiling.find_by_name('penguindinner-all')
product = Product(category=category_penguin, active=True, description="Adult", cost="9000", auth=None, validate="ProDinner(dinner_field='product_Penguin Dinner Ticket_Adult_qty',ticket_category='category_Ticket',ticket_id=[4,5,6,7,8,11,12])")
product.ceilings.append(ceiling_penguin_all)
meta.Session.add(product)
product = Product(category=category_penguin, active=True, description="Child", cost="2000", auth=None, validate=None)
product.ceilings.append(ceiling_penguin_all)
meta.Session.add(product)
Product(category=category_penguin, active=True, description="Infant", cost="0", auth=None, validate=None)
meta.Session.add(product)
# Speakers Dinner
category_speakers = ProductCategory.find_by_name('Speakers Dinner Ticket')
ceiling_speakers_all = Ceiling.find_by_name('speakersdinner-all')
product = Product(category=category_speakers, active=True, description="Adult", cost="0", validate="ProDinner(dinner_field='product_Speakers Dinner Ticket_Adult_qty',ticket_category='category_Ticket',ticket_id=[7,8,12])", auth="self.is_speaker() or self.is_miniconf_org() or self.is_role('team')")
product.ceilings.append(ceiling_speakers_all)
meta.Session.add(product)
product = Product(category=category_speakers, active=True, description="Child", cost="0", validate=None , auth="self.is_speaker() or self.is_miniconf_org() or self.is_role('team')")
product.ceilings.append(ceiling_speakers_all)
meta.Session.add(product)
product = Product(category=category_speakers, active=True, description="Infant", cost="0", validate=None , auth="self.is_speaker() or self.is_miniconf_org() or self.is_role('team')")
meta.Session.add(product)
# Accommodation
category_accomodation = ProductCategory.find_by_name('Accommodation')
ceiling_accom_all = Ceiling.find_by_name('accomodation-all')
product = Product(category=category_accomodation, active=True, description="I will organise my own",
cost="0", auth=None, validate=None)
product.ceilings.append(ceiling_accom_all)
meta.Session.add(product);
# Partners' Programme
category_partners = ProductCategory.find_by_name('Partners\' Programme')
ceiling_partners_all = Ceiling.find_by_name('partners-all')
partners_adult = Product(category=category_partners, active=True, description="Adult", cost="23500", auth=None, validate="PPDetails(adult_field='product_Partners Programme_Adult_qty', email_field='partner_email', name_field='partner_name', mobile_field='partner_mobile')")
partners_adult.ceilings.append(ceiling_partners_all)
meta.Session.add(partners_adult);
product = Product(category=category_partners, active=True, description="Child (3-14 years old)", cost="16500", auth=None, validate="PPChildrenAdult(current_field='product_Partners Programme_Child (3_14 years old)_qty',adult_field='product_Partners Programme_Adult_qty')")
product.ceilings.append(ceiling_partners_all)
meta.Session.add(product);
product = Product(category=category_partners, active=True, description="Infant (0-2 years old)", cost="0", auth=None, validate="PPChildrenAdult(current_field='product_Partners Programme_Child (0_2 years old)_qty',adult_field='product_Partners Programme_Adult_qty')")
product.ceilings.append(ceiling_partners_all)
meta.Session.add(product);
# Product includes
meta.Session.add_all(
[
# Include 1 Shirt in all registration types
ProductInclude(product=ticket_student, include_category=category_shirt, include_qty='1'), # Student
ProductInclude(product=ticket_hobbyist_eb, include_category=category_shirt, include_qty='1'), # Hobbyist EB
ProductInclude(product=ticket_hobbyist, include_category=category_shirt, include_qty='1'), # Hobbyist
ProductInclude(product=ticket_professional_eb, include_category=category_shirt, include_qty='1'), # Pro EB
ProductInclude(product=ticket_professional, include_category=category_shirt, include_qty='1'), # Pro
ProductInclude(product=ticket_fairy_penguin, include_category=category_shirt, include_qty='1'), # Fairy
ProductInclude(product=ticket_speaker, include_category=category_shirt, include_qty='1'), # Speaker
ProductInclude(product=ticket_miniconf, include_category=category_shirt, include_qty='1'), # Miniconf
ProductInclude(product=ticket_volunteer_free, include_category=category_shirt, include_qty='2'), # Volunteer
ProductInclude(product=ticket_volunteer_paid, include_category=category_shirt, include_qty='2'), # Volunteer
ProductInclude(product=ticket_press, include_category=category_shirt, include_qty='1'), # Press
ProductInclude(product=ticket_team, include_category=category_shirt, include_qty='6'), # Team
#ProductInclude(product=partners_adult, include_category=category_shirt, include_qty='1'), # Partner's Programme get a t-shirt
# Include 1 Dinner for Professional+miniconf and for Speaker registrations
ProductInclude(product=ticket_professional_eb, include_category=category_penguin, include_qty='1'), # Pro EB
ProductInclude(product=ticket_professional, include_category=category_penguin, include_qty='1'), # Pro
ProductInclude(product=ticket_fairy_penguin, include_category=category_penguin, include_qty='1'), # Fairy
ProductInclude(product=ticket_speaker, include_category=category_penguin, include_qty='1'), # Speaker
ProductInclude(product=ticket_miniconf, include_category=category_penguin, include_qty='1'), # Miniconf
ProductInclude(product=ticket_press, include_category=category_penguin, include_qty='1'), # Press
ProductInclude(product=ticket_team, include_category=category_penguin, include_qty='2'), # Team
# Include 2 partners in the partners program for speakers
ProductInclude(product=ticket_speaker, include_category=category_partners, include_qty='2'),
]
)
class Product(Base):
"""Stores the products used for registration
"""
# table
__tablename__ = 'product'
__table_args__ = (
# Descriptions should be unique within a category
sa.UniqueConstraint('category_id', 'description'),
{}
)
id = sa.Column(sa.types.Integer, primary_key=True)
category_id = sa.Column(sa.types.Integer, sa.ForeignKey('product_category.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=True)
fulfilment_type_id = sa.Column(sa.types.Integer, sa.ForeignKey('fulfilment_type.id'), nullable=True)
display_order = sa.Column(sa.types.Integer, nullable=False, default=10)
active = sa.Column(sa.types.Boolean, nullable=False)
description = sa.Column(sa.types.Text, nullable=False)
badge_text = sa.Column(sa.types.Text, nullable=True)
cost = sa.Column(sa.types.Integer, nullable=False)
auth = sa.Column(sa.types.Text, nullable=True)
validate = sa.Column(sa.types.Text, nullable=True)
# relations
category = sa.orm.relation(ProductCategory, lazy=True, backref=sa.orm.backref('products', order_by=lambda: [Product.display_order, Product.cost]))
ceilings = sa.orm.relation(Ceiling, secondary=product_ceiling_map, lazy=True, order_by=Ceiling.name, backref='products')
fulfilment_type = sa.orm.relation('FulfilmentType')
def __init__(self, **kwargs):
super(Product, self).__init__(**kwargs)
@classmethod
def find_all(self):
return Session.query(Product).order_by(Product.display_order).order_by(Product.cost).all()
@classmethod
def find_by_id(cls, id):
return Session.query(Product).filter_by(id=id).first()
@classmethod
def find_by_category(cls, id):
return Session.query(Product).filter_by(category_id=id).order_by(Product.display_order).order_by(Product.cost)
def qty_free(self):
qty = 0
for ii in self.invoice_items:
if not ii.invoice.void and ii.invoice.is_paid:
qty += ii.free_qty
return qty
def qty_sold(self):
qty = 0
for ii in self.invoice_items:
if not ii.invoice.void and ii.invoice.is_paid:
qty += (ii.qty - ii.free_qty)
return qty
def qty_invoiced(self, date=True):
# date: bool? only count items that are not overdue
qty = 0
for ii in self.invoice_items:
# also count sold items as invoiced since they are valid
if not ii.invoice.void and ((ii.invoice.is_paid or not ii.invoice.is_overdue or not date)):
qty += ii.qty
return qty
def remaining(self):
max_ceiling = None
for c in self.ceilings:
if c.remaining() > max_ceiling:
max_ceiling = c.remaining
return max_ceiling
def available(self, stock=True, qty=0):
# bool stock: care about if the product is in stock (ie sold out?)
if self.active:
for c in self.ceilings:
if not c.available(stock, qty):
return False
return True
else:
return False
def can_i_sell(self, person, qty):
if not self.available():
return False
if not self.category.can_i_sell(person, qty):
return False
for c in self.ceiling:
if not c.can_i_sell(qty):
return False
return True
def available_until(self):
until = []
for ceiling in self.ceilings:
if ceiling.available_until != None:
until.append(ceiling.available_until)
if len(until) > 0:
return max(until)
def clean_description(self, category=False):
if category == True:
return self.category.clean_name() + '_' + self.description.replace('-','_').replace("'",'')
else:
return self.description.replace('-','_').replace("'",'');
def __repr__(self):
return '<Product id=%r active=%r description=%r cost=%r auth=%r validate%r>' % (self.id, self.active, self.description, self.cost, self.auth, self.validate)
class ProductInclude(Base):
"""Stores the product categories that are included in another product
"""
__tablename__ = 'product_include'
product_id = sa.Column(sa.types.Integer, sa.ForeignKey('product.id'), primary_key=True)
include_category_id = sa.Column(sa.types.Integer, sa.ForeignKey('product_category.id'), primary_key=True)
include_qty = sa.Column(sa.types.Integer, nullable=False)
product = sa.orm.relation(Product, backref='included', lazy=False)
include_category = sa.orm.relation(ProductCategory)
def __init__(self, **kwargs):
super(ProductInclude, self).__init__(**kwargs)
@classmethod
def find_by_category(cls, id):
return Session.query(ProductInclude).filter_by(include_category_id=id)
@classmethod
def find_by_product(cls, id):
return Session.query(ProductInclude).filter_by(product_id=id)
| gpl-2.0 | -1,276,178,584,637,553,400 | 49.638 | 301 | 0.69829 | false | 3.530749 | false | false | false |
openstack/python-designateclient | designateclient/v2/tsigkeys.py | 1 | 1715 | # Copyright 2017 SAP SE
#
# Author: Rudolf Vriend <rudolf.vriend@sap.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designateclient.v2.base import V2Controller
from designateclient.v2 import utils as v2_utils
class TSIGKeysController(V2Controller):
def create(self, name, algorithm, secret, scope, resource_id):
data = {
'name': name,
'algorithm': algorithm,
'secret': secret,
'scope': scope,
'resource_id': resource_id
}
return self._post('/tsigkeys', data=data)
def list(self, criterion=None, marker=None, limit=None):
url = self.build_url('/tsigkeys', criterion, marker, limit)
return self._get(url, response_key='tsigkeys')
def get(self, tsigkey):
tsigkey = v2_utils.resolve_by_name(self.list, tsigkey)
return self._get('/tsigkeys/%s' % tsigkey)
def update(self, tsigkey, values):
tsigkey = v2_utils.resolve_by_name(self.list, tsigkey)
return self._patch('/tsigkeys/%s' % tsigkey, data=values)
def delete(self, tsigkey):
tsigkey = v2_utils.resolve_by_name(self.list, tsigkey)
return self._delete('/tsigkeys/%s' % tsigkey)
| apache-2.0 | 5,331,771,878,661,510,000 | 33.3 | 75 | 0.670554 | false | 3.528807 | false | false | false |
wenli810620/python-django | django_cron/management/commands/runcrons.py | 1 | 1651 | import sys
from optparse import make_option
import traceback
from django.core.management.base import BaseCommand
from django.conf import settings
from django_cron import CronJobManager, get_class
from django.db import close_connection
DEFAULT_LOCK_TIME = 24 * 60 * 60 # 24 hours
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--force', action='store_true', help='Force cron runs'),
make_option('--silent', action='store_true', help='Do not push any message on console'),
)
def handle(self, *args, **options):
"""
Iterates over all the CRON_CLASSES (or if passed in as a commandline argument)
and runs them.
"""
if args:
cron_class_names = args
else:
cron_class_names = getattr(settings, 'CRON_CLASSES', [])
try:
crons_to_run = map(lambda x: get_class(x), cron_class_names)
except:
error = traceback.format_exc()
print('Make sure these are valid cron class names: %s\n%s' % (cron_class_names, error))
sys.exit()
for cron_class in crons_to_run:
run_cron_with_cache_check(cron_class, force=options['force'],
silent=options['silent'])
close_connection()
def run_cron_with_cache_check(cron_class, force=False, silent=False):
"""
Checks the cache and runs the cron or not.
@cron_class - cron class to run.
@force - run job even if not scheduled
@silent - suppress notifications
"""
with CronJobManager(cron_class, silent) as manager:
manager.run(force)
| mit | 5,591,232,158,106,197,000 | 29.018182 | 99 | 0.623864 | false | 3.921615 | false | false | false |
Svolcano/python_exercise | dianhua/worker/crawler/china_mobile/jiangxi/des_js.py | 1 | 23467 | # -*- coding: utf-8 -*-
import execjs
import sys
reload(sys)
sys.setdefaultencoding("utf8")
def des_encode(encode_data):
des_js = execjs.compile(des_js_code)
encoded_string = des_js.call("enString", str(encode_data))
return encoded_string
des_js_code = """
function enString(data){
var key1 = "YHXWWLKJYXGS";
var key2 = "ZFCHHYXFL10C";
var key3 = "DES";
var enchex = strEnc(data,key1,key2,key3);
return enchex;
}
/**
RSA加/解密
**/
function EncryString(enString)
{
if(window.ActiveXObject) // IE浏览器
{
xmlHttpRequest = new ActiveXObject("Microsoft.XMLHTTP");
}
else if(window.XMLHttpRequest) // 除IE以外的其他浏览器
{
xmlHttpRequest = new XMLHttpRequest();
}
if(null != xmlHttpRequest)
{
xmlHttpRequest.open("POST", "http://127.0.0.1:8081/sso6qtn/EncryptServlet", false);
// 当发生状态变化时就调用这个回调函数
//xmlHttpRequest.onreadystatechange = ajaxCallBack;
// 使用post提交时必须加上下面这行代码
xmlHttpRequest.setRequestHeader("Content-Type","application/x-www-form-urlencoded");
// 向服务器发出一个请求
xmlHttpRequest.send("enString="+enString);
return xmlHttpRequest.responseText;
}
}
function ajaxCallBack()
{
if(xmlHttpRequest.readyState == 4)
{
if(xmlHttpRequest.status == 200)
{
var content = xmlHttpRequest.responseText;
}
}
}
/**
* DES加密/解密
* @Copyright Copyright (c) 2009
* @author linsi
*/
/*
* encrypt the string to string made up of hex
* return the encrypted string
*/
function strEnc(data,firstKey,secondKey,thirdKey){
var leng = data.length;
var encData = "";
var firstKeyBt,secondKeyBt,thirdKeyBt,firstLength,secondLength,thirdLength;
if(firstKey != null && firstKey != ""){
firstKeyBt = getKeyBytes(firstKey);
firstLength = firstKeyBt.length;
}
if(secondKey != null && secondKey != ""){
secondKeyBt = getKeyBytes(secondKey);
secondLength = secondKeyBt.length;
}
if(thirdKey != null && thirdKey != ""){
thirdKeyBt = getKeyBytes(thirdKey);
thirdLength = thirdKeyBt.length;
}
if(leng > 0){
if(leng < 4){
var bt = strToBt(data);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x = 0;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData = bt64ToHex(encByte);
}else{
var iterator = parseInt(leng/4);
var remainder = leng%4;
var i=0;
for(i = 0;i < iterator;i++){
var tempData = data.substring(i*4+0,i*4+4);
var tempByte = strToBt(tempData);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData += bt64ToHex(encByte);
}
if(remainder > 0){
var remainderData = data.substring(iterator*4+0,leng);
var tempByte = strToBt(remainderData);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData += bt64ToHex(encByte);
}
}
}
return encData;
}
/*
* chang the string into the bit array
*
* return bit array(it's length % 64 = 0)
*/
function getKeyBytes(key){
var keyBytes = new Array();
var leng = key.length;
var iterator = parseInt(leng/4);
var remainder = leng%4;
var i = 0;
for(i = 0;i < iterator; i ++){
keyBytes[i] = strToBt(key.substring(i*4+0,i*4+4));
}
if(remainder > 0){
keyBytes[i] = strToBt(key.substring(i*4+0,leng));
}
return keyBytes;
}
/*
* chang the string(it's length <= 4) into the bit array
*
* return bit array(it's length = 64)
*/
function strToBt(str){
var leng = str.length;
var bt = new Array(64);
if(leng < 4){
var i=0,j=0,p=0,q=0;
for(i = 0;i<leng;i++){
var k = str.charCodeAt(i);
for(j=0;j<16;j++){
var pow=1,m=0;
for(m=15;m>j;m--){
pow *= 2;
}
bt[16*i+j]=parseInt(k/pow)%2;
}
}
for(p = leng;p<4;p++){
var k = 0;
for(q=0;q<16;q++){
var pow=1,m=0;
for(m=15;m>q;m--){
pow *= 2;
}
bt[16*p+q]=parseInt(k/pow)%2;
}
}
}else{
for(i = 0;i<4;i++){
var k = str.charCodeAt(i);
for(j=0;j<16;j++){
var pow=1;
for(m=15;m>j;m--){
pow *= 2;
}
bt[16*i+j]=parseInt(k/pow)%2;
}
}
}
return bt;
}
/*
* chang the bit(it's length = 4) into the hex
*
* return hex
*/
function bt4ToHex(binary) {
var hex;
switch (binary) {
case "0000" : hex = "0"; break;
case "0001" : hex = "1"; break;
case "0010" : hex = "2"; break;
case "0011" : hex = "3"; break;
case "0100" : hex = "4"; break;
case "0101" : hex = "5"; break;
case "0110" : hex = "6"; break;
case "0111" : hex = "7"; break;
case "1000" : hex = "8"; break;
case "1001" : hex = "9"; break;
case "1010" : hex = "A"; break;
case "1011" : hex = "B"; break;
case "1100" : hex = "C"; break;
case "1101" : hex = "D"; break;
case "1110" : hex = "E"; break;
case "1111" : hex = "F"; break;
}
return hex;
}
/*
* chang the hex into the bit(it's length = 4)
*
* return the bit(it's length = 4)
*/
function hexToBt4(hex) {
var binary;
switch (hex) {
case "0" : binary = "0000"; break;
case "1" : binary = "0001"; break;
case "2" : binary = "0010"; break;
case "3" : binary = "0011"; break;
case "4" : binary = "0100"; break;
case "5" : binary = "0101"; break;
case "6" : binary = "0110"; break;
case "7" : binary = "0111"; break;
case "8" : binary = "1000"; break;
case "9" : binary = "1001"; break;
case "A" : binary = "1010"; break;
case "B" : binary = "1011"; break;
case "C" : binary = "1100"; break;
case "D" : binary = "1101"; break;
case "E" : binary = "1110"; break;
case "F" : binary = "1111"; break;
}
return binary;
}
/*
* chang the bit(it's length = 64) into the string
*
* return string
*/
function byteToString(byteData){
var str="";
for(i = 0;i<4;i++){
var count=0;
for(j=0;j<16;j++){
var pow=1;
for(m=15;m>j;m--){
pow*=2;
}
count+=byteData[16*i+j]*pow;
}
if(count != 0){
str+=String.fromCharCode(count);
}
}
return str;
}
function bt64ToHex(byteData){
var hex = "";
for(i = 0;i<16;i++){
var bt = "";
for(j=0;j<4;j++){
bt += byteData[i*4+j];
}
hex+=bt4ToHex(bt);
}
return hex;
}
function hexToBt64(hex){
var binary = "";
for(i = 0;i<16;i++){
binary+=hexToBt4(hex.substring(i,i+1));
}
return binary;
}
/*
* the 64 bit des core arithmetic
*/
function enc(dataByte,keyByte){
var keys = generateKeys(keyByte);
var ipByte = initPermute(dataByte);
var ipLeft = new Array(32);
var ipRight = new Array(32);
var tempLeft = new Array(32);
var i = 0,j = 0,k = 0,m = 0, n = 0;
for(k = 0;k < 32;k ++){
ipLeft[k] = ipByte[k];
ipRight[k] = ipByte[32+k];
}
for(i = 0;i < 16;i ++){
for(j = 0;j < 32;j ++){
tempLeft[j] = ipLeft[j];
ipLeft[j] = ipRight[j];
}
var key = new Array(48);
for(m = 0;m < 48;m ++){
key[m] = keys[i][m];
}
var tempRight = xor(pPermute(sBoxPermute(xor(expandPermute(ipRight),key))), tempLeft);
for(n = 0;n < 32;n ++){
ipRight[n] = tempRight[n];
}
}
var finalData =new Array(64);
for(i = 0;i < 32;i ++){
finalData[i] = ipRight[i];
finalData[32+i] = ipLeft[i];
}
return finallyPermute(finalData);
}
function dec(dataByte,keyByte){
var keys = generateKeys(keyByte);
var ipByte = initPermute(dataByte);
var ipLeft = new Array(32);
var ipRight = new Array(32);
var tempLeft = new Array(32);
var i = 0,j = 0,k = 0,m = 0, n = 0;
for(k = 0;k < 32;k ++){
ipLeft[k] = ipByte[k];
ipRight[k] = ipByte[32+k];
}
for(i = 15;i >= 0;i --){
for(j = 0;j < 32;j ++){
tempLeft[j] = ipLeft[j];
ipLeft[j] = ipRight[j];
}
var key = new Array(48);
for(m = 0;m < 48;m ++){
key[m] = keys[i][m];
}
var tempRight = xor(pPermute(sBoxPermute(xor(expandPermute(ipRight),key))), tempLeft);
for(n = 0;n < 32;n ++){
ipRight[n] = tempRight[n];
}
}
var finalData =new Array(64);
for(i = 0;i < 32;i ++){
finalData[i] = ipRight[i];
finalData[32+i] = ipLeft[i];
}
return finallyPermute(finalData);
}
function initPermute(originalData){
var ipByte = new Array(64);
for (i = 0, m = 1, n = 0; i < 4; i++, m += 2, n += 2) {
for (j = 7, k = 0; j >= 0; j--, k++) {
ipByte[i * 8 + k] = originalData[j * 8 + m];
ipByte[i * 8 + k + 32] = originalData[j * 8 + n];
}
}
return ipByte;
}
function expandPermute(rightData){
var epByte = new Array(48);
for (i = 0; i < 8; i++) {
if (i == 0) {
epByte[i * 6 + 0] = rightData[31];
} else {
epByte[i * 6 + 0] = rightData[i * 4 - 1];
}
epByte[i * 6 + 1] = rightData[i * 4 + 0];
epByte[i * 6 + 2] = rightData[i * 4 + 1];
epByte[i * 6 + 3] = rightData[i * 4 + 2];
epByte[i * 6 + 4] = rightData[i * 4 + 3];
if (i == 7) {
epByte[i * 6 + 5] = rightData[0];
} else {
epByte[i * 6 + 5] = rightData[i * 4 + 4];
}
}
return epByte;
}
function xor(byteOne,byteTwo){
var xorByte = new Array(byteOne.length);
for(i = 0;i < byteOne.length; i ++){
xorByte[i] = byteOne[i] ^ byteTwo[i];
}
return xorByte;
}
function sBoxPermute(expandByte){
var sBoxByte = new Array(32);
var binary = "";
var s1 = [
[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7],
[0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8],
[4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0],
[15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13 ]];
/* Table - s2 */
var s2 = [
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10],
[3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5],
[0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15],
[13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9 ]];
/* Table - s3 */
var s3= [
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8],
[13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1],
[13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7],
[1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12 ]];
/* Table - s4 */
var s4 = [
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15],
[13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9],
[10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4],
[3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14 ]];
/* Table - s5 */
var s5 = [
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9],
[14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6],
[4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14],
[11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3 ]];
/* Table - s6 */
var s6 = [
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11],
[10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8],
[9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6],
[4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13 ]];
/* Table - s7 */
var s7 = [
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1],
[13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6],
[1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2],
[6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12]];
/* Table - s8 */
var s8 = [
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7],
[1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2],
[7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8],
[2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11]];
for(m=0;m<8;m++){
var i=0,j=0;
i = expandByte[m*6+0]*2+expandByte[m*6+5];
j = expandByte[m * 6 + 1] * 2 * 2 * 2
+ expandByte[m * 6 + 2] * 2* 2
+ expandByte[m * 6 + 3] * 2
+ expandByte[m * 6 + 4];
switch (m) {
case 0 :
binary = getBoxBinary(s1[i][j]);
break;
case 1 :
binary = getBoxBinary(s2[i][j]);
break;
case 2 :
binary = getBoxBinary(s3[i][j]);
break;
case 3 :
binary = getBoxBinary(s4[i][j]);
break;
case 4 :
binary = getBoxBinary(s5[i][j]);
break;
case 5 :
binary = getBoxBinary(s6[i][j]);
break;
case 6 :
binary = getBoxBinary(s7[i][j]);
break;
case 7 :
binary = getBoxBinary(s8[i][j]);
break;
}
sBoxByte[m*4+0] = parseInt(binary.substring(0,1));
sBoxByte[m*4+1] = parseInt(binary.substring(1,2));
sBoxByte[m*4+2] = parseInt(binary.substring(2,3));
sBoxByte[m*4+3] = parseInt(binary.substring(3,4));
}
return sBoxByte;
}
function pPermute(sBoxByte){
var pBoxPermute = new Array(32);
pBoxPermute[ 0] = sBoxByte[15];
pBoxPermute[ 1] = sBoxByte[ 6];
pBoxPermute[ 2] = sBoxByte[19];
pBoxPermute[ 3] = sBoxByte[20];
pBoxPermute[ 4] = sBoxByte[28];
pBoxPermute[ 5] = sBoxByte[11];
pBoxPermute[ 6] = sBoxByte[27];
pBoxPermute[ 7] = sBoxByte[16];
pBoxPermute[ 8] = sBoxByte[ 0];
pBoxPermute[ 9] = sBoxByte[14];
pBoxPermute[10] = sBoxByte[22];
pBoxPermute[11] = sBoxByte[25];
pBoxPermute[12] = sBoxByte[ 4];
pBoxPermute[13] = sBoxByte[17];
pBoxPermute[14] = sBoxByte[30];
pBoxPermute[15] = sBoxByte[ 9];
pBoxPermute[16] = sBoxByte[ 1];
pBoxPermute[17] = sBoxByte[ 7];
pBoxPermute[18] = sBoxByte[23];
pBoxPermute[19] = sBoxByte[13];
pBoxPermute[20] = sBoxByte[31];
pBoxPermute[21] = sBoxByte[26];
pBoxPermute[22] = sBoxByte[ 2];
pBoxPermute[23] = sBoxByte[ 8];
pBoxPermute[24] = sBoxByte[18];
pBoxPermute[25] = sBoxByte[12];
pBoxPermute[26] = sBoxByte[29];
pBoxPermute[27] = sBoxByte[ 5];
pBoxPermute[28] = sBoxByte[21];
pBoxPermute[29] = sBoxByte[10];
pBoxPermute[30] = sBoxByte[ 3];
pBoxPermute[31] = sBoxByte[24];
return pBoxPermute;
}
function finallyPermute(endByte){
var fpByte = new Array(64);
fpByte[ 0] = endByte[39];
fpByte[ 1] = endByte[ 7];
fpByte[ 2] = endByte[47];
fpByte[ 3] = endByte[15];
fpByte[ 4] = endByte[55];
fpByte[ 5] = endByte[23];
fpByte[ 6] = endByte[63];
fpByte[ 7] = endByte[31];
fpByte[ 8] = endByte[38];
fpByte[ 9] = endByte[ 6];
fpByte[10] = endByte[46];
fpByte[11] = endByte[14];
fpByte[12] = endByte[54];
fpByte[13] = endByte[22];
fpByte[14] = endByte[62];
fpByte[15] = endByte[30];
fpByte[16] = endByte[37];
fpByte[17] = endByte[ 5];
fpByte[18] = endByte[45];
fpByte[19] = endByte[13];
fpByte[20] = endByte[53];
fpByte[21] = endByte[21];
fpByte[22] = endByte[61];
fpByte[23] = endByte[29];
fpByte[24] = endByte[36];
fpByte[25] = endByte[ 4];
fpByte[26] = endByte[44];
fpByte[27] = endByte[12];
fpByte[28] = endByte[52];
fpByte[29] = endByte[20];
fpByte[30] = endByte[60];
fpByte[31] = endByte[28];
fpByte[32] = endByte[35];
fpByte[33] = endByte[ 3];
fpByte[34] = endByte[43];
fpByte[35] = endByte[11];
fpByte[36] = endByte[51];
fpByte[37] = endByte[19];
fpByte[38] = endByte[59];
fpByte[39] = endByte[27];
fpByte[40] = endByte[34];
fpByte[41] = endByte[ 2];
fpByte[42] = endByte[42];
fpByte[43] = endByte[10];
fpByte[44] = endByte[50];
fpByte[45] = endByte[18];
fpByte[46] = endByte[58];
fpByte[47] = endByte[26];
fpByte[48] = endByte[33];
fpByte[49] = endByte[ 1];
fpByte[50] = endByte[41];
fpByte[51] = endByte[ 9];
fpByte[52] = endByte[49];
fpByte[53] = endByte[17];
fpByte[54] = endByte[57];
fpByte[55] = endByte[25];
fpByte[56] = endByte[32];
fpByte[57] = endByte[ 0];
fpByte[58] = endByte[40];
fpByte[59] = endByte[ 8];
fpByte[60] = endByte[48];
fpByte[61] = endByte[16];
fpByte[62] = endByte[56];
fpByte[63] = endByte[24];
return fpByte;
}
function getBoxBinary(i) {
var binary = "";
switch (i) {
case 0 :binary = "0000";break;
case 1 :binary = "0001";break;
case 2 :binary = "0010";break;
case 3 :binary = "0011";break;
case 4 :binary = "0100";break;
case 5 :binary = "0101";break;
case 6 :binary = "0110";break;
case 7 :binary = "0111";break;
case 8 :binary = "1000";break;
case 9 :binary = "1001";break;
case 10 :binary = "1010";break;
case 11 :binary = "1011";break;
case 12 :binary = "1100";break;
case 13 :binary = "1101";break;
case 14 :binary = "1110";break;
case 15 :binary = "1111";break;
}
return binary;
}
/*
* generate 16 keys for xor
*
*/
function generateKeys(keyByte){
var key = new Array(56);
var keys = new Array();
keys[ 0] = new Array();
keys[ 1] = new Array();
keys[ 2] = new Array();
keys[ 3] = new Array();
keys[ 4] = new Array();
keys[ 5] = new Array();
keys[ 6] = new Array();
keys[ 7] = new Array();
keys[ 8] = new Array();
keys[ 9] = new Array();
keys[10] = new Array();
keys[11] = new Array();
keys[12] = new Array();
keys[13] = new Array();
keys[14] = new Array();
keys[15] = new Array();
var loop = [1,1,2,2,2,2,2,2,1,2,2,2,2,2,2,1];
for(i=0;i<7;i++){
for(j=0,k=7;j<8;j++,k--){
key[i*8+j]=keyByte[8*k+i];
}
}
var i = 0;
for(i = 0;i < 16;i ++){
var tempLeft=0;
var tempRight=0;
for(j = 0; j < loop[i];j ++){
tempLeft = key[0];
tempRight = key[28];
for(k = 0;k < 27 ;k ++){
key[k] = key[k+1];
key[28+k] = key[29+k];
}
key[27]=tempLeft;
key[55]=tempRight;
}
var tempKey = new Array(48);
tempKey[ 0] = key[13];
tempKey[ 1] = key[16];
tempKey[ 2] = key[10];
tempKey[ 3] = key[23];
tempKey[ 4] = key[ 0];
tempKey[ 5] = key[ 4];
tempKey[ 6] = key[ 2];
tempKey[ 7] = key[27];
tempKey[ 8] = key[14];
tempKey[ 9] = key[ 5];
tempKey[10] = key[20];
tempKey[11] = key[ 9];
tempKey[12] = key[22];
tempKey[13] = key[18];
tempKey[14] = key[11];
tempKey[15] = key[ 3];
tempKey[16] = key[25];
tempKey[17] = key[ 7];
tempKey[18] = key[15];
tempKey[19] = key[ 6];
tempKey[20] = key[26];
tempKey[21] = key[19];
tempKey[22] = key[12];
tempKey[23] = key[ 1];
tempKey[24] = key[40];
tempKey[25] = key[51];
tempKey[26] = key[30];
tempKey[27] = key[36];
tempKey[28] = key[46];
tempKey[29] = key[54];
tempKey[30] = key[29];
tempKey[31] = key[39];
tempKey[32] = key[50];
tempKey[33] = key[44];
tempKey[34] = key[32];
tempKey[35] = key[47];
tempKey[36] = key[43];
tempKey[37] = key[48];
tempKey[38] = key[38];
tempKey[39] = key[55];
tempKey[40] = key[33];
tempKey[41] = key[52];
tempKey[42] = key[45];
tempKey[43] = key[41];
tempKey[44] = key[49];
tempKey[45] = key[35];
tempKey[46] = key[28];
tempKey[47] = key[31];
switch(i){
case 0: for(m=0;m < 48 ;m++){ keys[ 0][m] = tempKey[m]; } break;
case 1: for(m=0;m < 48 ;m++){ keys[ 1][m] = tempKey[m]; } break;
case 2: for(m=0;m < 48 ;m++){ keys[ 2][m] = tempKey[m]; } break;
case 3: for(m=0;m < 48 ;m++){ keys[ 3][m] = tempKey[m]; } break;
case 4: for(m=0;m < 48 ;m++){ keys[ 4][m] = tempKey[m]; } break;
case 5: for(m=0;m < 48 ;m++){ keys[ 5][m] = tempKey[m]; } break;
case 6: for(m=0;m < 48 ;m++){ keys[ 6][m] = tempKey[m]; } break;
case 7: for(m=0;m < 48 ;m++){ keys[ 7][m] = tempKey[m]; } break;
case 8: for(m=0;m < 48 ;m++){ keys[ 8][m] = tempKey[m]; } break;
case 9: for(m=0;m < 48 ;m++){ keys[ 9][m] = tempKey[m]; } break;
case 10: for(m=0;m < 48 ;m++){ keys[10][m] = tempKey[m]; } break;
case 11: for(m=0;m < 48 ;m++){ keys[11][m] = tempKey[m]; } break;
case 12: for(m=0;m < 48 ;m++){ keys[12][m] = tempKey[m]; } break;
case 13: for(m=0;m < 48 ;m++){ keys[13][m] = tempKey[m]; } break;
case 14: for(m=0;m < 48 ;m++){ keys[14][m] = tempKey[m]; } break;
case 15: for(m=0;m < 48 ;m++){ keys[15][m] = tempKey[m]; } break;
}
}
return keys;
}
//end-------------------------------------------------------------------------------------------------------------
"""
if __name__ == '__main__':
print des_encode("15070876044")
print type(des_encode("15070876044"))
print des_encode("440678")
print type(des_encode("440678"))
| mit | -5,001,706,511,498,749,000 | 26.17695 | 123 | 0.514072 | false | 2.554158 | false | false | false |
matt-gardner/deep_qa | deep_qa/models/multiple_choice_qa/tuple_inference.py | 1 | 17079 | from typing import Dict, List
import textwrap
from keras.layers import Input
from overrides import overrides
import numpy
from ...data.instances.multiple_choice_qa import TupleInferenceInstance
from ...layers import NoisyOr
from ...layers.attention import MaskedSoftmax
from ...layers.backend import RepeatLike
from ...layers.subtract_minimum import SubtractMinimum
from ...layers.tuple_matchers import tuple_matchers
from ...training import TextTrainer
from ...training.models import DeepQaModel
from ...common.params import Params
class TupleInferenceModel(TextTrainer):
"""
This ``TextTrainer`` implements the TupleEntailment model of Tushar. It takes a set of tuples
from the question and its answer candidates and a set of background knowledge tuples and looks
for entailment between the corresponding tuple slots. The result is a probability distribution
over the answer options based on how well they align with the background tuples, given the
question text. We consider this alignment to be a form of soft inference, hence the model
name.
Parameters
----------
tuple_matcher: Dict[str, Any]
Parameters for selecting and then initializing the inner entailment model, one of the
TupleMatch models.
noisy_or_param_init: str, default='uniform'
The initialization for the noise parameters in the ``NoisyOr`` layers.
num_question_tuples: int, default=10
The number of tuples for each of the answer candidates in the question.
num_background_tuples: int, default=10
The number of tuples for the background knowledge.
num_tuple_slots: int, default=4
The number of slots in each tuple.
num_slot_words: int, default=5
The number of words in each slot of the tuples.
num_options: int, default=4
The number of answer options/candidates.
normalize_tuples_across_answers: bool, default=False
Whether or not to normalize each question tuple's score across the answer options. This
assumes that the tuples are in the same order for all answer options. Normalization is
currently done by subtracting the minimum score for a given tuple "position" from all the
tuples in that position.
display_text_wrap: int, default=150
This is used by the debug output methods to wrap long tuple strings.
display_num_tuples: int, default=5
This is used by the debug output methods. It determines how many background tuples to display for
each of the answer tuples in a given instance when displaying the tuple match scores.
"""
def __init__(self, params: Params):
self.noisy_or_param_init = params.pop('noisy_or_param_init', 'uniform')
self.num_question_tuples = params.pop('num_question_tuples', None)
self.num_background_tuples = params.pop('num_background_tuples', None)
self.num_tuple_slots = params.pop('num_tuple_slots', None)
self.num_slot_words = params.pop('num_slot_words', None)
self.num_options = params.pop('num_answer_options', None)
self.normalize_tuples_across_answers = params.pop('normalize_tuples_across_answers', False)
self.display_text_wrap = params.pop('display_text_wrap', 150)
self.display_num_tuples = params.pop('display_num_tuples', 5)
tuple_matcher_params = params.pop('tuple_matcher', {})
tuple_matcher_choice = tuple_matcher_params.pop_choice("type", list(tuple_matchers.keys()),
default_to_first_choice=True)
tuple_matcher_class = tuple_matchers[tuple_matcher_choice]
self.tuple_matcher = tuple_matcher_class(self, tuple_matcher_params)
self.tuple_matcher.name = "match_layer"
super(TupleInferenceModel, self).__init__(params)
self.name = 'TupleInferenceModel'
@overrides
def _instance_type(self):
return TupleInferenceInstance
@classmethod
@overrides
def _get_custom_objects(cls):
custom_objects = super(TupleInferenceModel, cls)._get_custom_objects()
for tuple_matcher in tuple_matchers.values():
custom_objects.update(tuple_matcher.get_custom_objects())
custom_objects['MaskedSoftmax'] = MaskedSoftmax
custom_objects['NoisyOr'] = NoisyOr
custom_objects['RepeatLike'] = RepeatLike
custom_objects['SubtractMinimum'] = SubtractMinimum
return custom_objects
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
padding_lengths = super(TupleInferenceModel, self).get_padding_lengths()
padding_lengths['num_question_tuples'] = self.num_question_tuples
padding_lengths['num_background_tuples'] = self.num_background_tuples
padding_lengths['num_slots'] = self.num_tuple_slots
padding_lengths['num_sentence_words'] = self.num_slot_words
padding_lengths['num_options'] = self.num_options
return padding_lengths
@overrides
def get_instance_sorting_keys(self) -> List[str]: # pylint: disable=no-self-use
return ['num_sentence_words', 'num_background_tuples', 'num_question_tuples', 'num_slots']
@overrides
def _set_padding_lengths(self, padding_lengths: Dict[str, int]):
super(TupleInferenceModel, self)._set_padding_lengths(padding_lengths)
# The number of tuple slots determines the shape of some of the weights in our model, so we
# need to keep this constant.
if self.num_tuple_slots is None:
self.num_tuple_slots = padding_lengths['num_slots']
if self.data_generator is not None and self.data_generator.dynamic_padding:
return
if self.num_question_tuples is None:
self.num_question_tuples = padding_lengths['num_question_tuples']
if self.num_background_tuples is None:
self.num_background_tuples = padding_lengths['num_background_tuples']
if self.num_slot_words is None:
self.num_slot_words = padding_lengths['num_sentence_words']
if self.num_options is None:
self.num_options = padding_lengths['num_options']
@overrides
def get_padding_memory_scaling(self, padding_lengths: Dict[str, int]) -> int:
num_question_tuples = padding_lengths['num_question_tuples']
num_background_tuples = padding_lengths['num_background_tuples']
num_sentence_words = padding_lengths['num_sentence_words']
num_options = padding_lengths['num_options']
return num_question_tuples * num_background_tuples * num_sentence_words * num_options
@overrides
def _set_padding_lengths_from_model(self):
self.num_background_tuples = self.model.get_input_shape_at(0)[1][1]
self.num_options = self.model.get_input_shape_at(0)[0][1]
self.num_question_tuples = self.model.get_input_shape_at(0)[0][2]
self.num_tuple_slots = self.model.get_input_shape_at(0)[0][3]
self.num_slot_words = self.model.get_input_shape_at(0)[0][4]
self._set_text_lengths_from_model_input = self.model.get_input_shape_at(0)[0][4:]
@overrides
def _build_model(self):
r"""
The basic outline of the model is that the question input, :math:`\mathcal{A}` (which consists of the
inputs for each of the answer choices, i.e., each :math:`A^c \in \mathcal{A}`), and the background input,
:math:`\mathcal{K}`, get tiled to be the same size. They are then aligned tuple-by-tuple: each of the
background tuples, :math:`k_j` is compared to each of the answer tuples, :math:`a_i^c`, to create a
support/entailment score, :math:`s_{ij}^c`. This score is determined using the selected ``TupleMatch``
layer.
Then, for each answer tuple, :math:`a_i^c \in A^c` we combine
the scores for each :math:`k_j \in K` using noisy-or to get the entailment score for the given answer
choice tuple::
:math:`s_i^c = 1 - \prod_{j=1:J}(1 - q_1 * s_{ij}^c)`
where q_1 is the noise parameter for this first noisy-or. Next, we combine these scores for each answer
choice again using the noisy-or to get the entailment score for the answer candidate::
:math:`s^c = 1 - \prod_{i=1:N}(1 - q_2 * s_{i}^c)`
where q_2 is the noise parameter for this second noisy-or. At this point, we have a score for each of
the answer candidates, and so we perform a softmax to determine which is the best answer.
"""
# shape: (batch size, num_options, num_question_tuples, num_tuple_slots, num_slot_words)
slot_shape = self._get_sentence_shape(self.num_slot_words)
question_input_shape = (self.num_options, self.num_question_tuples, self.num_tuple_slots) + slot_shape
question_input = Input(question_input_shape, dtype='int32', name='question_input')
# shape: (batch size, num_background_tuples, num_tuple_slots, num_slot_words)
background_input_shape = (self.num_background_tuples, self.num_tuple_slots) + slot_shape
background_input = Input(background_input_shape, dtype='int32', name='background_input')
# Expand and tile the question input to be:
# shape: (batch size, num_options, num_question_tuples, num_background_tuples, num_tuple_slots,
# num_slot_words)
tiled_question = RepeatLike(axis=3, copy_from_axis=1)([question_input, background_input])
# Expand and tile the background input to match question input.
# shape: (batch size, num_options, num_question_tuples, num_background_tuples, num_tuple_slots,
# num_slot_words)
# First, add num_options.
tiled_background = RepeatLike(axis=1, copy_from_axis=1)([background_input, question_input])
# Next, add num_question_tuples.
tiled_background = RepeatLike(axis=2, copy_from_axis=2)([tiled_background, question_input])
# Find the matches between the question and background tuples.
# shape: (batch size, num_options, num_question_tuples, num_background_tuples)
matches = self.tuple_matcher([tiled_question, tiled_background])
# Find the probability that any given question tuple is entailed by the given background tuples.
# shape: (batch size, num_options, num_question_tuples)
combine_background_evidence = NoisyOr(axis=-1, param_init=self.noisy_or_param_init)
combine_background_evidence.name = "noisy_or_1"
qi_probabilities = combine_background_evidence(matches)
# If desired, peek across the options, and normalize the amount that a given answer tuple template "counts"
# towards a correct answer.
if self.normalize_tuples_across_answers:
normalize_across_options = SubtractMinimum(axis=1)
qi_probabilities = normalize_across_options(qi_probabilities)
# Find the probability that any given option is correct, given the entailement scores of each of its
# question tuples given the set of background tuples.
# shape: (batch size, num_options)
combine_question_evidence = NoisyOr(axis=-1, param_init=self.noisy_or_param_init)
combine_question_evidence.name = "noisy_or_2"
options_probabilities = combine_question_evidence(qi_probabilities)
# Softmax over the options to choose the best one.
final_output = MaskedSoftmax(name="masked_softmax")(options_probabilities)
return DeepQaModel(input=[question_input, background_input], output=[final_output])
@overrides
def _instance_debug_output(self, instance: TupleInferenceInstance, outputs: Dict[str, numpy.array]) -> str:
num_to_display = 5
result = ""
result += "\n====================================================================\n"
result += "Instance: %s\n" % instance.index
result += "Question Text: %s\n" % instance.question_text
result += "Label: %s\n" % instance.label
result += "Num tuples per answer option: %s\n" % [len(answer) for answer in instance.answer_tuples]
result += "(limiting display to top %s at various levels)\n" % num_to_display
result += "====================================================================\n"
answer_scores = []
index_of_chosen = None
softmax_output = outputs.get("masked_softmax", None)
if softmax_output is not None:
answer_scores = list(enumerate(softmax_output))
sorted_answer_scores = sorted(answer_scores, key=lambda tup: tup[1], reverse=True)
# TODO(becky): not handling ties
index_of_chosen = sorted_answer_scores[0][0]
result += "Final scores: %s\n" % answer_scores
if index_of_chosen is None:
result += "ERROR: no answer chosen\n"
elif index_of_chosen == instance.label:
result += " Answered correctly!\n"
else:
result += " Answered incorrectly\n"
result += "====================================================================\n"
# Output of the tuple matcher layer:
# shape: (num_options, num_question_tuples, num_background_tuples)
tuple_matcher_output = outputs.get('match_layer', None)
if tuple_matcher_output is not None:
# correct answer:
# Keep only the first tuples (depending on model setting) because when we padded we set
# truncate_from_right to False.
correct_tuples = instance.answer_tuples[instance.label][:self.num_question_tuples]
background_tuples = instance.background_tuples[:self.num_background_tuples]
result += "-----------------------------------\n"
result += " GOLD ANSWER: (Final score: {0})\n".format(answer_scores[instance.label][1])
result += "-----------------------------------\n"
result += self._render_tuple_match_scores(correct_tuples,
background_tuples,
tuple_matcher_output[instance.label],
instance)
result += "-------------------\n"
result += " Incorrect Answers: \n"
result += "-------------------\n"
# NOTE: that extra padded "options" are added on the right, so this should be fine.
for option in range(len(instance.answer_tuples)):
chosen_status = ""
if option != instance.label:
option_tuples = instance.answer_tuples[option][:self.num_question_tuples]
if option == index_of_chosen:
chosen_status = "(Chosen)"
result += "\nOption {0} {1}: (Final Score: {2})\n".format(option,
chosen_status,
answer_scores[option][1])
result += self._render_tuple_match_scores(option_tuples,
background_tuples,
tuple_matcher_output[option],
instance)
result += "\n"
return result
def _render_tuple_match_scores(self, answer_tuples, background_tuples, tuple_matcher_output, instance):
result = ""
for i, answer_tuple in enumerate(answer_tuples):
answer_tuple_string = "\n\t".join(textwrap.wrap(answer_tuple.display_string(), self.display_text_wrap))
result += "Question (repeated): %s\n" % instance.question_text
result += "Answer_tuple_{0} : \n\t{1}\n\n".format(i, answer_tuple_string)
result += "Top {0} (out of {1}) highest scoring background tuples:\n\n".format(self.display_num_tuples,
len(background_tuples))
tuple_match_scores = []
for j, background_tuple in enumerate(background_tuples):
tuple_match_score = tuple_matcher_output[i, j]
tuple_match_scores.append((tuple_match_score, j, background_tuple))
# Sort descending by tuple match score
sorted_by_score = sorted(tuple_match_scores, key=lambda tup: tup[0],
reverse=True)[:self.display_num_tuples]
for scored in sorted_by_score:
background_tuple_index = scored[1]
background_tuple_string = scored[2].display_string()
wrapped_tuple = "\n\t".join(textwrap.wrap(background_tuple_string, self.display_text_wrap))
result += " (TupleMatch Score %s) " % scored[0]
result += "\tbg_tuple_{0} \n\t{1}\n".format(background_tuple_index, wrapped_tuple)
result += "\n"
return result
| apache-2.0 | -5,671,962,694,802,139,000 | 53.39172 | 115 | 0.619006 | false | 4.087841 | false | false | false |
mayfield/ecmcli | ecmcli/commands/wifi.py | 1 | 3219 | """
WiFi commands.
"""
import collections
import shellish
from . import base
from .. import ui
class AccessPoints(base.ECMCommand):
""" List access points seen by site surveys. """
name = 'aps'
def setup_args(self, parser):
self.add_router_argument('idents', nargs='*')
self.add_argument('-v', '--verbose', action='store_true', help='More '
'verbose display.')
self.inject_table_factory()
super().setup_args(parser)
def run(self, args):
if args.idents:
ids = ','.join(self.api.get_by_id_or_name('routers', x)['id']
for x in args.idents)
filters = {"survey__router__in": ids}
else:
filters = {}
check = '<b>%s</b>' % shellish.beststr('✓', '*')
if args.verbose:
fields = collections.OrderedDict((
('SSID', 'wireless_ap.ssid'),
('BSSID', 'wireless_ap.bssid'),
('Manufacturer', 'wireless_ap.manufacturer'),
('Band', 'survey.band'),
('Mode', 'wireless_ap.mode'),
('Auth', 'wireless_ap.authmode'),
('Channel', 'survey.channel'),
('RSSI', 'survey.rssi'),
('First Seen', lambda x: ui.time_since(x['survey.created'])),
('Last Seen', lambda x: ui.time_since(x['survey.updated'])),
('Seen By', 'survey.router.name'),
('Trusted', lambda x: check if x['trust.trusted'] else ''),
))
else:
fields = collections.OrderedDict((
('SSID', 'wireless_ap.ssid'),
('Manufacturer', 'wireless_ap.manufacturer'),
('Band', 'survey.band'),
('Auth', 'wireless_ap.authmode'),
('Last Seen', lambda x: ui.time_since(x['survey.updated'])),
('Seen By', 'survey.router.name'),
('Trusted', lambda x: check if x['trust.trusted'] else ''),
))
survey = self.api.get_pager('wireless_ap_survey_view',
expand='survey.router,trust,wireless_ap',
**filters)
with self.make_table(headers=fields.keys(),
accessors=fields.values()) as t:
t.print(map(dict, map(base.totuples, survey)))
class Survey(base.ECMCommand):
""" Start a WiFi site survey on connected router(s). """
name = 'survey'
use_pager = False
def setup_args(self, parser):
self.add_router_argument('idents', nargs='*')
def run(self, args):
if args.idents:
ids = [self.api.get_by_id_or_name('routers', x)['id']
for x in args.idents]
else:
ids = [x['id'] for x in self.api.get_pager('routers')]
self.api.post('wireless_site_survey', ids)
class WiFi(base.ECMCommand):
""" WiFi access points info and surveys. """
name = 'wifi'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_subcommand(AccessPoints, default=True)
self.add_subcommand(Survey)
command_classes = [WiFi]
| mit | -2,996,600,358,786,054,700 | 33.591398 | 78 | 0.508237 | false | 3.816133 | false | false | false |
mikhail-gorobets/chipsec | chipsec/modules/tools/vmm/virtio.py | 1 | 4457 | #CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#chipsec@intel.com
#
"""
QEMU VirtIO info tool
Usage:
``chipsec_main.py -i -m tools.vmm.virtio``
``chipsec_main.py -i -m tools.vmm.virtio -a 0:6.0``
"""
import re
from chipsec.module_common import *
from chipsec.hal.pci import *
from chipsec.hal.mmio import *
VENDORS[0x1AF4] = 'Red Hat, Inc.'
DEVICES[0x1AF4] = {
0x1000: 'VirtIO Network',
0x1001: 'VirtIO Block',
0x1002: 'VirtIO Baloon',
0x1003: 'VirtIO Console',
0x1004: 'VirtIO SCSI',
0x1005: 'VirtIO RNG',
0x1009: 'VirtIO filesystem',
0x1041: 'VirtIO network (1.0)',
0x1042: 'VirtIO block (1.0)',
0x1043: 'VirtIO console (1.0)',
0x1044: 'VirtIO RNG (1.0)',
0x1045: 'VirtIO memory balloon (1.0)',
0x1046: 'VirtIO SCSI (1.0)',
0x1049: 'VirtIO filesystem (1.0)',
0x1050: 'VirtIO GPU (1.0)',
0x1052: 'VirtIO input (1.0)',
0x1110: 'VirtIO Inter-VM shared memory'
}
VIRTIO_VENDORS = [0x1AF4]
class VirtIO_MMIO_Device(BaseModule):
def __init__(self, b, d, f):
BaseModule.__init__(self)
self.bus = b
self.dev = d
self.fun = f
def get_bars(self):
return [self.cs.pci.read_dword(self.bus, self.dev, self.fun, x) for x in xrange(0x10, 0x28, 4)]
def print_virtio_device(self):
self.logger.log("")
self.logger.log("VirtIO Device %02x:%02x.%01x" % (self.bus, self.dev, self.fun))
bars = self.get_bars()
for i in xrange(len(bars)):
if bars[i] in [0x0, 0xFFFFFFFF]: continue
if bars[i] & 0x1 == 0:
base = bars[i] & 0xFFFFFFF0
data = struct.unpack("<1024L", self.cs.mem.read_physical_mem(base, 0x1000))
else:
base = bars[i] & 0xFFFFFFFC
data = [self.cs.io.read_port_dword(x) for x in xrange(base, base + 0x100, 4)]
self.logger.log(" BAR%d: 0x%08x (assuming size is 4096 bytes)" % (i, base))
for x in xrange(len(data)):
if data[x] in [0x0, 0xFFFFFFFF]: continue
self.logger.log(" BAR + 0x%04x: 0x%08x" % (x * 4, data[x]))
return
class VirtIO(BaseModule):
def __init__(self):
BaseModule.__init__(self)
def get_virtio_devices(self, devices):
virtio_devices = []
for (b, d, f, vid, did) in devices:
if vid in VIRTIO_VENDORS:
virtio_devices.append((b, d, f, vid, did))
return virtio_devices
def run(self, module_argv):
self.logger.start_test("QEMU VirtIO info tool")
pcie_dev = []
if len(module_argv) >= 1:
match = re.search(r"^([0-9a-f]{1,2}):([0-1]?[0-9a-f]{1})\.([0-7]{1})$", module_argv[0])
if match:
_bus = int(match.group(1), 16) & 0xFF
_dev = int(match.group(2), 16) & 0x1F
_fun = int(match.group(3), 16) & 0x07
vid = self.cs.pci.read_word(_bus, _dev, _fun, 0)
did = self.cs.pci.read_word(_bus, _dev, _fun, 2)
dev = (_bus, _dev, _fun, vid, did)
pcie_dev = [dev]
virt_dev = [dev]
else:
self.logger.log("ERROR: Invalid B:D.F (%s)" % module_argv[0])
return ModuleResult.ERROR
else:
self.logger.log("Enumerating available PCI devices..")
pcie_dev = self.cs.pci.enumerate_devices()
virt_dev = self.get_virtio_devices(pcie_dev)
self.logger.log("PCI devices:")
print_pci_devices(virt_dev)
for (b, d, f, vid, did) in virt_dev:
dev = VirtIO_MMIO_Device(b, d, f)
dev.print_virtio_device()
return ModuleResult.PASSED
| gpl-2.0 | 2,480,034,442,802,938,000 | 33.820313 | 103 | 0.578416 | false | 3.078039 | false | false | false |
shishkander/recipes-py | recipe_modules/time/api.py | 1 | 1092 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
import datetime
import time
class TimeApi(recipe_api.RecipeApi):
def __init__(self, **kwargs):
super(TimeApi, self).__init__(**kwargs)
self._fake_time = None
self._fake_step = None
if self._test_data.enabled:
self._fake_time = self._test_data.get('seed', 1337000000.0)
self._fake_step = self._test_data.get('step', 1.5)
def time(self):
"""Return current timestamp as a float number of seconds since epoch."""
if self._test_data.enabled:
self._fake_time += self._fake_step
return self._fake_time
else: # pragma: no cover
return time.time()
def utcnow(self):
"""Return current UTC time as a datetime.datetime."""
if self._test_data.enabled:
self._fake_time += self._fake_step
return datetime.datetime.utcfromtimestamp(self._fake_time)
else: # pragma: no cover
return datetime.datetime.utcnow()
| bsd-3-clause | 358,891,948,077,505,340 | 32.090909 | 76 | 0.671245 | false | 3.676768 | false | false | false |
UndeadMastodon/Loltris | Menus.py | 1 | 17793 | #!/usr/bin/python2
#-*- coding: utf-8 -*-
## =====================================================================
## Menus for Loltris
## Copyright (C) 2014 Jonas Møller <jonasmo441@gmail.com>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
## =====================================================================
import Shared
import BlockText
import Core
import Load
import Jobs
import Log
import Factory
import Credits
import Save
import Matrix
import Utils
import webbrowser as Webbrowser
import os.path as Path
from functools import *
from pygame.locals import *
from Globals import *
from DataTypes import *
## Games
import HighscoreExplorer
import TetrisGame
import MakeTetromino
import SandBox
import TwoPlayerTetrisGame
# import LANTetrisGame
class MainMenu(Core.Menu):
def __init__(self, **kwargs):
super(MainMenu, self).__init__(
"MainMenu", onHeaderClick=lambda: Webbrowser.open(PROJECT_SITE),
header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, isroot=True, xcenter=True,
soundtrack=None, #Path.join(Load.MUSICDIR, "jazz_cat_infinite_loop_cut.ogg"),
sound_enabled=SOUND_ENABLED, **kwargs)
self.title_blocks = BlockText.render(TITLE_TEXT, font=Load.loadBlockFont("standard"))
blockwidth = (self.width) // len(self.title_blocks[0])
Log.debug("title_board.blockwidth = {}".format(blockwidth))
self.addJob("title_board",
Jobs.Board(
self,
y=SPACER,
height=len(self.title_blocks),
width=len(self.title_blocks[0]),
blockwidth=blockwidth,
bgcolor=self.bgcolor,
queue=100,
draw_grid=False,
draw_border=False,
)
)
self.jobs.title_board.x = (self.width // 2) - (self.jobs.title_board.width // 2)
for x, y in Matrix.matrixToSet(self.title_blocks):
self.jobs.title_board.blocks[(x, y)] = (0xaa,0xaa,0xaa)
self.options_pos[1] = self.jobs.title_board.y + self.jobs.title_board.height + SPACER*2
self.menu = Factory.textBoxes([
("Single Player", lambda: self.call(TetrisGame.TetrisGame, caption="Loltris")),
("Two Player", lambda: self.call(TwoPlayerTetrisGame.TwoPlayerTetris, caption="Loltris - Two Player")),
#("LAN Play", lambda: self.call(LANTetrisGame.LANMenu, caption="Loltris - LAN play")),
("Create new blocks", lambda: self.call(MakeTetromino.MakeTetromino, caption="Loltris - Creator")),
("Options", lambda: self.call(OptionsMenu, caption="Loltris - Options")),
("Scores", lambda: self.call(HighscoreExplorer.HighscoreList, caption="Loltris - Highscores")),
("Credits", lambda: self.call(Credits.Credits, caption="Loltris - Credits")),
("Homepage", lambda: Webbrowser.open(PROJECT_SITE)),
#("SandBox", lambda: self.call(SandBox.SandBox, caption="Loltris - SandBox")),
("Exit", self.quit),
],
self,
font=MENU_OPTION_FONT,
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
colors={
"background":self.colorscheme["background"],
"font":self.colorscheme["option"],
},
)
self.setupObjects()
#self.loadHighscores()
## XXX: Temporary bugfix, scroll_filler is drawn on every frame while the board is not.
del self.jobs.scroll_filler
def loadHighscores(self):
""" Load scores from disk, then add the highscorelist job to see them """
self.highscores = Load.loadHighscores(top=HIGHSCORES)
Log.debug("Loaded new highscores from disk, displaying below")
Log.dump("".join(["{}: {}\n".format(d["name"], d["score"]) for d in self.highscores]))
if self.highscores:
self.addJob(
"highscorelist",
Jobs.TextBox(
self,
( "Top {} scores\n\n".format(HIGHSCORES) + ## Title
"".join(["{}: {}\n".format(x["name"], x["score"]) for x in self.highscores]) + ## The scores
("\n" * (HIGHSCORES - len(self.highscores))) ## Empty lines
),
y=self.menu[0].y+1,
textfit=True,
colors=HIGHSCORELIST_COLORSCHEME,
font=HIGHSCORELIST_FONT,
border=True,
background=True,
)
)
## The highscore-list should be 5 pixels from the right edge
self.jobs.highscorelist.x = SCREEN_WIDTH - self.jobs.highscorelist.width - 5
def launchTetrisGame(self):
self.call(TetrisGame.TetrisGame, caption="Loltris")
# self.loadHighscores()
def eventHandler(self, event):
super(MainMenu, self).eventHandler(event)
if event.type == KEYDOWN:
if event.key == K_TAB:
self.addJob(
"input",
Jobs.InputBox(self, "Input: ")
)
class PauseMenu(Core.Menu):
def __init__(self, **kwargs):
super(PauseMenu, self).__init__("PauseMenu", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, isroot=True, xcenter=True, **kwargs)
self.header = "Pause"
self.menu = Factory.textBoxes([
("Continue", self.quitGame),
("Exit to main menu", lambda: self.quitGame("MainMenu")),
("Exit Game", self.quit),
], self, font=MENU_OPTION_FONT, colors={"background":self.colorscheme["background"],
"font":self.colorscheme["option"], },
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
)
self.setupObjects()
self.running = self.mainLoop
## Placeholder, need to add sliders and other stuff to the Menu class
## for an option menu to be doable.
class OptionsMenu(Core.Menu):
def __init__(self, **kwargs):
super(OptionsMenu, self).__init__("OptionsMenu", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
self.header = "Options"
self.options = Load.loadOptions()
self.menu = Factory.textBoxes([
("Keymaps", lambda: self.call(KeymapMenu, caption=self.caption)),
], self, font=MENU_OPTION_FONT, colors={"background":self.colorscheme["background"],
"font":self.colorscheme["option"], },
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
)
## >inb4 immature jokes
def turnOn(option, options):
Log.debug(option)
if options.get(option) != None:
Log.warning("Turning on non-existent option: {}".format(repr(option)))
options[option] = True
Save.saveOptions()
def turnOff(option, options):
Log.debug(option)
if options.get(option) != None:
Log.warning("Turning off non-existent option: {}".format(repr(option)))
options[option] = False
Save.saveOptions()
self.menu.extend(
Factory.basicSwitches([
("Uber-Tetromino", "uber_tetromino"),
("Flip tetromino", "flip_tetromino"),
], self, turnOn, turnOff, Shared.options["gameplay"],
font=MENU_OPTION_FONT,
colors=SWITCH_OPTION_COLORS,
boxwidth=8,
box_center=True,
fill=MENU_3DBORDER_BACKGROUND,
)
)
self.setupObjects()
class Graphics(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu.Graphics, self).__init__("GraphicsMenu", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
## >inb4 immature jokes
def turnOn(option, options):
options[option] = True
Save.saveOptions()
def turnOff(option, options):
options[option] = False
Save.saveOptions()
self.menu = \
Factory.basicSwitches([
("Fullscreen", "fullscreen"),
], self, turnOn, turnOff, Shared.options["gameplay"],
font=MENU_OPTION_FONT,
colors=SWITCH_OPTION_COLORS,
boxwidth=8,
box_center=True,
fill=MENU_3DBORDER_BACKGROUND,
)
## Generates a mainloop for getting a single character.
## Used in KeymapMenu.*
def getKeyLoop(self, keys):
if not self.jobs.input_box.update_required:
Log.debug("Setting key {} to activate {}".format(Utils.keyToString(self.jobs.input_box.value), self.getting))
keys[self.getting] = self.jobs.input_box.value
self.removeJob("input_box")
Save.saveKeymap()
## Restore
self.running = self.mainLoop
## Sets the appropriate values for setting a key in a keymap.
def modifyKeymap(self, keys, getting):
self.addJob("input_box", Jobs.GetKeyBox(self, "Press key for {}".format(getting), font=MENU_OPTION_FONT, colors=SWITCH_OPTION_COLORS, queue=self.menu[0].queue+1))
self.getting = getting
self.running = partial(getKeyLoop, self, keys)
class KeymapMenu(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu, self).__init__("KeymapMenu", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
self.header = "Keymaps"
self.menu = Factory.textBoxes([
("Tetris", lambda: self.call(self.Tetris, caption="Loltris - Tetris keymap")),
("Menu", lambda: self.call(self.Menu, caption="Loltris - Menu keymap")),
], self, font=MENU_OPTION_FONT, colors={"background":self.colorscheme["background"],
"font":self.colorscheme["option"], },
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
)
self.setupObjects()
self.getting = None
class Tetris(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu.Tetris, self).__init__("KeymapMenu.Tetris", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
self.header = "Tetris-map"
self.menu = Factory.textBoxes(
[("Player 1", lambda: self.call(self.Player1, caption="Loltris - Tetris player 1 keymap")),
("Player 2", lambda: self.call(self.Player2, caption="Loltris - Tetris player 2 keymap")),
],
self,
font=MENU_OPTION_FONT,
colors={"background":self.colorscheme["background"], "font":self.colorscheme["option"]},
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
)
self.menu.extend(Factory.variableTextBoxes(
[( action.replace("_", " ").capitalize() + ": {key}",
## Nested lambdas are used here to cirumvent an issue with python closures. (http://code.activestate.com/recipes/502271/)
## Basically if you don't nest the lambdas, you will end up with every single functions having the last action in
## the list of dictionary keys.
{"key": (lambda action_: lambda _: Utils.keyToString(Shared.keymap["game"][action_]))(action) },
(lambda action_: lambda: modifyKeymap(self, Shared.keymap["game"], action_))(action))
for action in Shared.keymap["game"]
if isinstance(Shared.keymap["game"][action], int) ## Skip the player1 and player2 sub-dictionaries
],
self,
font=MENU_OPTION_FONT,
colors={"background":self.colorscheme["background"], "font":self.colorscheme["option"]},
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
))
self.setupObjects()
class Player1(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu.Tetris.Player1, self).__init__(
"KeymapMenu.Tetris.Player1", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
self.header = "Player1 Keymaps"
self.menu.extend(Factory.variableTextBoxes(
[( action.replace("_", " ").capitalize() + ": {key}",
## Nested lambdas are used here to cirumvent an issue with python closures. (http://code.activestate.com/recipes/502271/)
## Basically if you don't nest the lambdas, you will end up with every single functions having the last action in
## the list of dictionary keys.
{"key": (lambda action_: lambda _: Utils.keyToString(Shared.keymap["game"]["player1"][action_]))(action) },
(lambda action_: lambda: modifyKeymap(self, Shared.keymap["game"]["player1"], action_))(action),
)
for action in Shared.keymap["game"]["player1"]
],
self,
font=MENU_OPTION_FONT,
colors={"background":self.colorscheme["background"], "font":self.colorscheme["option"]},
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
))
self.setupObjects()
class Player2(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu.Tetris.Player2, self).__init__(
"KeymapMenu.Tetris.Player2", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs)
self.header = "Player2 Keymaps"
self.menu.extend(Factory.variableTextBoxes(
[( action.replace("_", " ").capitalize() + ": {key}",
## Nested lambdas are used here to cirumvent an issue with python closures. (http://code.activestate.com/recipes/502271/)
## Basically if you don't nest the lambdas, you will end up with every single functions having the last action in
## the list of dictionary keys.
{"key": (lambda action_: lambda _: Utils.keyToString(Shared.keymap["game"]["player2"][action_]))(action) },
(lambda action_: lambda: modifyKeymap(self, Shared.keymap["game"]["player2"], action_))(action),
)
for action in Shared.keymap["game"]["player2"]
],
self,
font=MENU_OPTION_FONT,
colors={"background":self.colorscheme["background"], "font":self.colorscheme["option"]},
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
))
self.setupObjects()
class Menu(Core.Menu):
def __init__(self, **kwargs):
super(KeymapMenu.Menu, self).__init__(
"KeymapMenu.Menu", header_font=MENU_HEADER_FONT, option_font=MENU_OPTION_FONT, xcenter=True, **kwargs
)
self.header = "Menu-map"
self.menu = Factory.variableTextBoxes(
[( action.replace("_", " ").capitalize() + ": {key}",
## Nested lambdas are used here to cirumvent an issue with python closures. (http://code.activestate.com/recipes/502271/)
## Basically if you don't nest the lambdas, you will end up with every single functions having the last action in
## the list of dictionary keys.
{"key": (lambda action_: lambda _: Utils.keyToString(Shared.keymap["menu"][action_]))(action) },
lambda: modifyKeymap(self, Shared.keymap["menu"], action))
for action in Shared.keymap["menu"] ],
self,
font=MENU_OPTION_FONT,
colors={
"background":self.colorscheme["background"],
"font":self.colorscheme["option"],
},
fill=MENU_3DBORDER_BACKGROUND,
xcenter=True,
)
self.setupObjects()
| gpl-3.0 | -6,757,752,329,227,821,000 | 48.837535 | 166 | 0.536646 | false | 4.25239 | false | false | false |
JaDogg/__py_playground | reference/sketchbook/regex/nfa_failstoploops.py | 1 | 2750 | """
Regular-expression matching by the Thompson construction.
Explained in C at http://swtch.com/~rsc/regexp/regexp1.html
"""
def match(re, s): return run(prepare(re), s)
def run(states, s):
for c in s:
states = set.union(*[state(c) for state in states])
return accepting_state in states
def accepting_state(c): return set()
def expecting_state(char, k): return lambda c: k() if c == char else set()
def state_node(state): return lambda: set([state])
def alt_node(k1, k2): return lambda: k1() | k2()
def loop_node(k, make_k):
def loop(): return k() | looping()
looping = make_k(loop)
return loop
def prepare((null, re)): return re(state_node(accepting_state))()
def lit(char):
return False, lambda k: state_node(expecting_state(char, k))
def alt((null1, re1), (null2, re2)):
return null1 or null2, lambda k: alt_node(re1(k), re2(k))
def many((null, re)):
assert not null, "I can't handle nested stars"
return True, lambda k: loop_node(k, re)
empty = (True, lambda k: k)
def seq((null1, re1), (null2, re2)):
return null1 and null2, lambda k: re1(re2(k))
## match(empty, '')
#. True
## match(empty, 'A')
#. False
## match(lit('x'), '')
#. False
## match(lit('x'), 'y')
#. False
## match(lit('x'), 'x')
#. True
## match(lit('x'), 'xx')
#. False
## match(seq(lit('a'), lit('b')), '')
#. False
## match(seq(lit('a'), lit('b')), 'ab')
#. True
## match(alt(lit('a'), lit('b')), 'b')
#. True
## match(alt(lit('a'), lit('b')), 'a')
#. True
## match(alt(lit('a'), lit('b')), 'x')
#. False
## match(many(lit('a')), '')
#. True
## match(many(lit('a')), 'a')
#. True
## match(many(lit('a')), 'x')
#. False
## match(many(lit('a')), 'aa')
#. True
## match(many(lit('a')), 'ax')
#. False
## complicated = seq(many(alt(seq(lit('a'), lit('b')), seq(lit('a'), seq(lit('x'), lit('y'))))), lit('z'))
## match(complicated, '')
#. False
## match(complicated, 'z')
#. True
## match(complicated, 'abz')
#. True
## match(complicated, 'ababaxyab')
#. False
## match(complicated, 'ababaxyabz')
#. True
## match(complicated, 'ababaxyaxz')
#. False
# N.B. infinite recursion, like Thompson's original code:
## match(many(many(lit('x'))), 'xxxx')
#. Traceback (most recent call last):
#. File "nfa_failstoploops.py", line 30, in many
#. assert not null, "I can't handle nested stars"
#. AssertionError: I can't handle nested stars
## match(many(many(lit('x'))), 'xxxxy')
#. Traceback (most recent call last):
#. File "nfa_failstoploops.py", line 30, in many
#. assert not null, "I can't handle nested stars"
#. AssertionError: I can't handle nested stars
# Had a bug: empty forced a match regardless of the continuation.
## match(seq(empty, lit('x')), '')
#. False
## match(seq(empty, lit('x')), 'x')
#. True
| mit | 6,375,176,381,203,522,000 | 26.5 | 106 | 0.609455 | false | 2.817623 | false | false | false |
RecursiveGreen/spradio-django | savepointradio/api/permissions.py | 1 | 1401 | from rest_framework import permissions
class IsAdminOrOwner(permissions.BasePermission):
message = 'Only an admin user or owner can access this.'
def has_object_permission(self, request, view, obj):
if request.user.is_authenticated():
return request.user.is_staff or request.user == obj.user
else:
return False
class IsAdminOrReadOnly(permissions.BasePermission):
message = 'Only an admin user can make changes.'
def has_permission(self, request, view):
if request.method in permissions.SAFE_METHODS:
return True
else:
return request.user.is_authenticated() and request.user.is_staff
class IsAdminOwnerOrReadOnly(permissions.BasePermission):
message = 'Only an admin user or the owner can change this object.'
def has_object_permission(self, request, view, obj):
if request.method in permissions.SAFE_METHODS:
return True
else:
if request.user.is_authenticated():
return request.user.is_staff or request.user == obj.user
else:
return False
class IsDJ(permissions.BasePermission):
message = 'Only the DJ can request the next song.'
def has_permission(self, request, view):
if request.user.is_authenticated():
return request.user.is_dj
else:
return False
| mit | 1,276,768,808,405,144,600 | 30.840909 | 76 | 0.653819 | false | 4.447619 | false | false | false |
glynjackson/ec2-deploy | ec2_deploy/utilities.py | 1 | 3698 | import time
import os
from fabric.api import *
from fabric.contrib.files import upload_template
from git import Repo
from ec2_deploy.notifications import Notification
def _run_task(task, start_message, finished_message):
"""
Tasks a task from tasks.py and runs through the commands on the server
"""
start = time.time()
Notification(start_message).info()
# Run the task items
for item in task:
try:
Notification("-" + item['message']).info()
except KeyError:
pass
globals()["_" + item['action']](item['params'])
Notification("%s in %.2fs" % (finished_message, time.time() - start)).success()
def _sudo(params):
"""
Run command as root.
"""
command = _render(params)
sudo(command)
def _local(params):
"""
Run command on local machine.
"""
command = _render(params)
local(command)
def _pip(params):
"""
Run pip install command.
"""
for item in params:
command = _render(item)
_sudo("pip install %s" % command)
def _upload_template(params):
"""
Run command to render and upload a template text file to a remote host.
"""
upload_template(filename=_render(params['filename']),
destination=_render(params['destination']), use_sudo=True)
def _render(template, context=env):
"""
Does variable replacement %(variable)s
"""
return template % context
def add_to_hosts(path, instance):
"""
Takes an instance ID and appends it to a list config/hosts.py
"""
list_string = get_hosts_list(path)
list_string.append(instance)
with open(path + '/hosts.py', 'w') as f:
f.write(str(list_string))
def get_hosts_list(path, staging=False):
"""
Reads the hosts.py file and returns the list.
"""
if staging:
filepath = path + '/hosts_staging.py'
else:
filepath = path + '/hosts.py'
if os.path.isfile(filepath):
with open(filepath, 'r') as f:
list_string = eval(f.readline())
else:
list_string = []
return list_string
def run_sanity_checks(env):
Notification("Running sanity checks...").info()
# Check for git branches master and develop.
repo = Repo(env.local_repo)
if repo.bare:
Notification("No 'git' repo setup.").error_exit()
if "develop" not in repo.branches:
Notification("Please create a `develop` branch in git for the staging environment.").error_exit()
# Check for requirements.text.
if not os.path.isfile(os.path.expanduser("{}/requirements.txt".format(env.local_repo))):
Notification("Your local repo does not appear to have a 'requirements.txt'. Please create one in your root.").error_exit()
# Check for environment vars.
for var_file in ['vars_production.env', 'vars_staging.env']:
if not os.path.isfile(
os.path.expanduser("{}/server_templates/{}/{}".format(env.local_repo, env.template, var_file))):
Notification("Cannot find environments variable file in server template.").error_exit()
d = {}
with open("{}/server_templates/{}/{}".format(env.local_repo, env.template, var_file)) as f:
for line in f:
(key, val) = line.split("=")
d[key] = val
if len(d) is 0:
Notification("You have not set any environments variables for {} ".format(var_file)).error_exit()
if not "EC2_DEPLOY_SERVER_REPO" in d:
Notification("Please set 'EC2_DEPLOY_SERVER_REPO' in {} ".format(var_file)).error_exit()
Notification("Passed all checks").success()
| mit | -393,535,179,141,996,800 | 22.1125 | 130 | 0.607896 | false | 3.921527 | false | false | false |
mleinart/launchpad2github | launchpad2github.py | 1 | 3950 | #!/usr/bin/env python
import os
import sys
import time
from getpass import getpass
from optparse import OptionParser
from termcolor import colored
from launchpadlib.launchpad import Launchpad
from github3 import login as github_login
from github3 import GitHubError
ACTIVE_STATUSES = [
"New",
"Confirmed",
"Triaged",
"In Progress"
]
IMPORTED_FIELDS = [
"owner",
"web_link",
"date_created",
"date_last_updated",
"tags",
]
def main(args):
usage = """%s: <lp project> <gh project>\n""" % (sys.argv[0],)
parser = OptionParser(usage=usage)
options, args = parser.parse_args(args=args)
if len(args) != 2:
parser.print_usage()
return 1
lp_project_name = args[0]
gh_project_name = args[1]
try:
gh_owner, gh_repo = gh_project_name.split('/')
except ValueError:
print "Unable to parse target Github repo: '%s'" % gh_project_name
print "Repo should be specified as <owner>/<repo>"
print "Authenticating with Launchpad"
launchpad = Launchpad.login_with(os.path.basename(sys.argv[0]), 'production')
print "Authenticating with Github"
github_user = raw_input("Github username: ")
github_pass = getpass("Github password: ")
try:
github = github_login(github_user, github_pass)
github.user()
except GitHubError:
raise SystemExit("Invalid Github login or problem contacting server")
# Validate launchpad project
try:
lp_project = launchpad.projects[lp_project_name]
except KeyError:
raise SystemExit("Unable to find project named '%s' on Launchpad" % lp_project_name)
# Validate github project
if github.repository(gh_owner, gh_repo) is None:
raise SystemExit("Unable to find Github project %s/%s" % (gh_owner, gh_repo))
# Begin migration
open_tasks = lp_project.searchTasks(status=ACTIVE_STATUSES)
for bug_task in open_tasks:
for field in IMPORTED_FIELDS:
print colored(field + ':', 'cyan') + colored(bug_task.bug.__getattr__(field), 'yellow')
print colored(bug_task.bug.description, 'yellow')
print
if confirm_or_exit(colored("Import?", 'cyan')):
title = bug_task.bug.title
description = format_description(bug_task.bug)
issue = github.create_issue(owner=gh_owner, repository=gh_repo, title=title, body=description)
for i, message in enumerate(bug_task.bug.messages):
if i == 0: continue # repeat of description
time.sleep(0.5)
comment = format_comment(message)
issue.create_comment(body=comment)
issue.add_labels('launchpad_import')
print colored("Created issue %d: %s" % (issue.number, issue.html_url), 'yellow')
if confirm_or_exit(colored("Close and update original?", 'cyan')):
bug_task.bug.newMessage(content="Migrated to Github: %s" % issue.html_url)
bug_task.status = "Won't Fix"
bug_task.bug.lp_save()
bug_task.lp_save()
def format_description(bug):
output = """#### Imported from %(web_link)s
|||
|----|----|
|Reported by|%(owner)s|
|Date Created|%(date_created)s|
""" % {
'web_link': bug.web_link,
'owner': format_user(bug.owner),
'date_created': bug.date_created.strftime("%b %d, %Y")
}
if bug.tags:
output += "|Tags|%s|" % bug.tags
output += bug.description.replace("Original description:\n", "")
return output
def format_user(user):
return "[%s](%s)" % (user.name, user.web_link)
def format_comment(message):
output = "#### Comment by %s on %s:\n" % \
(format_user(message.owner), message.date_created.strftime("%b %d, %Y"))
output += message.content
return output
def confirm_or_exit(prompt):
options = ['y','n','q']
option_prompt = '/'.join(options)
choice = None
while choice not in options:
choice = raw_input("%s (%s): " % (prompt, option_prompt)).lower()
if choice == 'y':
return True
if choice == 'n':
return False
if choice == 'q':
raise SystemExit(0)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| mit | 7,041,412,145,390,806,000 | 27.014184 | 100 | 0.654937 | false | 3.35599 | false | false | false |
powellc/findhistory | findhistory/apps/artifacts/riak_crud.py | 1 | 3048 | import riak
import uuid
# For regular HTTP...
# client = riak.RiakClient()
# For Protocol Buffers (go faster!)
client = riak.RiakClient(port=10018, transport_class=riak.RiakPbcTransport)
artifact_bucket = client.bucket('artifact')
def create_artifact(artifact_dict):
# ``artifact_dict`` should look something like:
# {
# 'title': 'Bangor Fire House circa 1908',
# 'description': 'A description of our bold artifact',
# 'slug': 'bangor-fire-house-circa-1908',
# 'address': '102 Broadway Street',
# 'city': 'Bangor',
# 'state': 'Maine',
# 'zipcode': '04401',
# 'image': 'path/to/image.jpg'
# 'created': time.time(),
# 'updated': time.time(),
# 'created_by': 'username',
# }
artifact = artifact_bucket.new(artifact_dict['slug'], data=artifact_dict)
artifact.store()
def get_artifact(artifact_slug):
artifact = artifact_bucket.get(artifact_slug)
return {
'artifact': artifact.get_data(),
}
'''
def create_comment(entry_slug, comment_dict):
# ``comment_dict`` should look something like:
# {
# 'author': 'Daniel',
# 'url': 'http://pragmaticbadger.com/',
# 'posted': time.time(),
# 'content': 'IS IT WEBSCALE? I HEARD /DEV/NULL IS WEBSCALE.',
# }
# Error handling omitted for brevity...
entry = artifact_bucket.get(entry_slug)
# Give it a UUID for the key.
comment = comment_bucket.new(str(uuid.uuid1()), data=comment_dict)
comment.store()
# Add the link.
entry.add_link(comment)
entry.store()
'''
'''
def get_entry_and_comments(entry_slug):
entry = artifact_bucket.get(entry_slug)
comments = []
# They come out in the order you added them, so there's no
# sorting to be done.
for comment_link in entry.get_links():
# Gets the related object, then the data out of it's value.
comments.append(comment_link.get().get_data())
return {
'entry': entry.get_data(),
'comments': comments,
}
'''
'''
# To test:
if __name__ == '__main__':
create_entry({
'title': 'First Post!',
'author': 'Daniel',
'slug': 'first-post',
'posted': time.time(),
'tease': 'A test post to my new Riak-powered blog.',
'content': 'Hmph. The tease kinda said it all...',
})
create_comment('first-post', {
'author': 'Matt',
'url': 'http://pragmaticbadger.com/',
'posted': time.time(),
'content': 'IS IT WEBSCALE? I HEARD /DEV/NULL IS WEBSCALE.',
})
create_comment('first-post', {
'author': 'Daniel',
'url': 'http://pragmaticbadger.com/',
'posted': time.time(),
'content': 'You better believe it!',
})
data = get_entry_and_comments('first-post')
print "Entry:"
print data['entry']['title']
print data['entry']['tease']
print
print "Comments:"
for comment in data['comments']:
print "%s - %s" % (comment['author'], comment['content'])
''' | bsd-3-clause | -8,908,347,754,971,077,000 | 26.718182 | 77 | 0.576772 | false | 3.356828 | false | false | false |
sohaibfarooqi/flask-stargate | stargate/exception.py | 1 | 5899 | """Application exceptions. Base Exception class for this app is `StargateException`.
All application exceptions are caught here and send back to client in a prescribed format.
Exceptions are further grouped so that we can located the part of code causing a specific
exception. Werkzeug exceptions are also mapped here.
"""
from flask import jsonify
from werkzeug.exceptions import NotAcceptable, Conflict, BadRequest, NotFound, InternalServerError, UnsupportedMediaType, UnprocessableEntity
from werkzeug.http import HTTP_STATUS_CODES
############################--MAIN APPLICATION ERROR CLASS--##################################
class StargateException(Exception):
werkzeug_exception = InternalServerError
def __init__(self, msg=None):
self.msg = msg
@property
def status_code(self):
return self.werkzeug_exception.code
def as_dict(self):
return {
'status': self.status_code,
'message': self.msg if self.msg else HTTP_STATUS_CODES.get(self.status_code, ''),
'details': {'_exception_class': type(self).__name__}
}
def get_response(self):
response = jsonify(self.as_dict())
response.status_code = self.status_code
return response
############################--NOT-FOUND ERRORS--############################################
class ResourceNotFound(StargateException):
werkzeug_exception = NotFound
def __init__(self, resource, id = None, msg = None):
super(ResourceNotFound, self).__init__()
self.resource = resource
self.msg = msg
self.id = id
def as_dict(self):
dct = super(ResourceNotFound, self).as_dict()
dct['details'].update({'resource' : self.resource, 'primary_key' : self.id})
return dct
############################--CONFLICT ERRORS--############################################
class ConflictException(StargateException):
werkzeug_exception = Conflict
def __init__(self, msg, **kwargs):
super(ConflictException, self).__init__()
self.msg = msg
############################--MEDIATYPE ERRORS--############################################
class MediaTypeNotSupported(StargateException):
werkzeug_exception = UnsupportedMediaType
class NotAcceptable(StargateException):
werkzeug_exception = NotAcceptable
############################--VALIDATION ERRORS--############################################
class ValidationError(StargateException):
werkzeug_exception = BadRequest
def __init__(self, msg, **kwargs):
super(ValidationError, self).__init__()
self.msg = msg
class ComparisonToNull(ValidationError):
def __init__(self, msg, **kwargs):
super(ComparisonToNull, self).__init__()
self.msg = msg
class UnknownField(ValidationError):
def __init__(self, field, resource):
self.field = field
self.resource = resource
self.msg = "Unknown field {0} in model {1}".format(field, resource)
super(UnknownField, self).__init__(self.msg)
def as_dict(self):
dct = super(UnknownField, self).as_dict()
dct['details'].update({'field' : self.field, 'resource': self.resource})
return dct
class UnknownRelation(ValidationError):
def __init__(self, relation, resource):
super(UnknownRelation, self).__init__()
self.relation = relation
self.resource = resource
self.msg = "Unknown relation {0} in model {1}".format(field, resource)
def as_dict(self):
dct = super(UnknownRelation, self).as_dict()
dct['details'].update({'relation' : self.relation, 'resource': self.resource})
return dct
class IllegalArgumentError(ValidationError):
def __init__(self, msg, **kwargs):
super(IllegalArgumentError, self).__init__()
self.msg = msg
class UnknownOperator(ValidationError):
def __init__(self, msg, **kwargs):
self.__name__ = 'UnknownOperator'
super(UnknownOperator, self).__init__(msg)
############################--PROCESSING ERRORS--############################################
class ProcessingException(StargateException):
werkzeug_exception = UnprocessableEntity
class MissingData(ProcessingException):
def __init__(self, model, *args, **kw):
super(MissingData, self).__init__(*args, **kw)
self.msg = "Missing `data` key for model {0}".format(model)
class MissingPrimaryKey(ProcessingException):
def __init__(self, model, *args, **kw):
super(MissingPrimaryKey, self).__init__(*args, **kw)
self.msg = "Missing `id` key for model {0}".format(model)
class DatabaseError(ProcessingException):
def __init__(self, msg, *args, **kw):
super(DatabaseError, self).__init__(*args, **kw)
self.msg = msg
class SerializationException(ProcessingException):
def __init__(self, instance, message=None, *args, **kw):
super(SerializationException, self).__init__(*args, **kw)
self.instance = instance
DEFAULT_MSG = "Failed to Deserialize Object"
self.msg = message if message is not None else DEFAULT_MSG
def as_dict(self):
dct = super(SerializationException, self).as_dict()
dct['details'].update({'instance' : self.instance})
return dct
class DeserializationException(ProcessingException):
def __init__(self, instance, message = None, *args, **kw):
super(DeserializationException, self).__init__(*args, **kw)
DEFAULT_MSG = "Failed to Deserialize Object"
self.msg = message if message is not None else DEFAULT_MSG
def as_dict(self):
dct = super(SerializationException, self).as_dict()
dct['details'].update({'instance' : self.instance})
return dct
############################################################################################### | agpl-3.0 | -6,817,977,582,220,370,000 | 37.815789 | 141 | 0.596033 | false | 4.405527 | false | false | false |
astrofrog/vispy-multivol | medical_rgba.py | 1 | 3214 | # This file is an example of using the multivol code, and is derived from an
# original example in vispy which is releaed under a BSD license included here:
#
# ===========================================================================
# Vispy is licensed under the terms of the (new) BSD license:
#
# Copyright (c) 2015, authors of Vispy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Vispy Development Team nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ===========================================================================
#
# This modified version is released under the BSD license given in the LICENSE
# file in this repository.
from itertools import cycle
import numpy as np
from vispy import app, scene, io
from vispy.scene import visuals
import vispy.visuals as impl_visuals
from vispy.color import get_colormaps, BaseColormap
from multivol import RGBAVolume
# Read volume
vol = np.load(io.load_data_file('volume/stent.npz'))['arr_0']
# Prepare canvas
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
canvas.measure_fps()
# Set up a viewbox to display the image with interactive pan/zoom
view = canvas.central_widget.add_view()
# Set up RGBA cube with the data we want
vol = (vol - vol.min()) / (vol.max() - vol.min())
data = np.zeros(vol.shape + (4,))
data[..., 0] = vol
data[..., 2] = vol[::-1,::-1,::-1]
data[..., 3] = vol + vol[::-1,::-1,::-1]
data /= 2.
volume = RGBAVolume(data, parent=view.scene)
volume.transform = scene.STTransform(translate=(64, 64, 0))
# Create three cameras (Fly, Turntable and Arcball)
fov = 60.
cam2 = scene.cameras.TurntableCamera(parent=view.scene, fov=fov,
name='Turntable')
view.camera = cam2 # Select turntable at first
canvas.update()
if __name__ == '__main__':
print(__doc__)
app.run()
| bsd-2-clause | -5,038,781,253,724,071,000 | 37.261905 | 79 | 0.700373 | false | 3.933905 | false | false | false |
audiolabs/mdct | mdct/windows.py | 1 | 1335 | """ Module for windowing functions not found in SciPy
"""
from __future__ import division
import numpy as np
from scipy.signal import kaiser
__all__ = [
'kaiser_derived',
]
def kaiser_derived(M, beta):
""" Return a Kaiser-Bessel derived window.
Parameters
----------
M : int
Number of points in the output window. If zero or less, an empty
array is returned.
beta : float
Kaiser-Bessel window shape parameter.
Returns
-------
w : ndarray
The window, normalized to fulfil the Princen-Bradley condition.
Notes
-----
This window is only defined for an even number of taps.
References
----------
.. [1] Wikipedia, "Kaiser window",
https://en.wikipedia.org/wiki/Kaiser_window
"""
M = int(M)
try:
from scipy.signal import kaiser_derived as scipy_kd
return scipy_kd(M, beta)
except ImportError:
pass
if M < 1:
return np.array([])
if M % 2:
raise ValueError(
"Kaiser Bessel Derived windows are only defined for even number "
"of taps"
)
w = np.zeros(M)
kaiserw = kaiser(M // 2 + 1, beta)
csum = np.cumsum(kaiserw)
halfw = np.sqrt(csum[:-1] / csum[-1])
w[:M//2] = halfw
w[-M//2:] = halfw[::-1]
return w
| mit | -1,062,535,999,254,185,200 | 20.190476 | 77 | 0.568539 | false | 3.541114 | false | false | false |
kanaka/spacewar | python/stars.py | 1 | 2141 | #player ship class
import pygame
from pygame.locals import *
import var, gfx, math
from random import randint
import gameinit
class Stars:
def __init__(self):
stars = []
scrwide, scrhigh = gfx.rect.size
self.maxstars = 800
for x in range(self.maxstars):
val = randint(1, 3)
color = val*40+60, val*35+50, val*22+100
speed = -val, val
rect = Rect(randint(0, scrwide), randint(0, scrhigh), 1, 1)
stars.append([rect, speed, color])
half = self.maxstars / 2
self.stars = stars[:half], stars[half:]
self.numstars = 50
self.dead = 0
self.odd = 0
def recalc_num_stars(self, fps):
if isinstance(var.handler, gameinit.GameInit):
#don't change stars while loading resources
return
change = int((fps - 35.0) * 1.8)
change = min(change, 12) #limit how quickly they can be added
numstars = self.numstars + change
numstars = max(min(numstars, self.maxstars/2), 0)
if numstars < self.numstars:
DIRTY, BGD = gfx.dirty, self.last_background
for rect, vel, col in self.stars[self.odd][numstars:self.numstars]:
DIRTY(BGD(rect))
self.numstars = numstars
#print 'STAR:', numstars, fps, change
def erase_tick_draw(self, background, gfx):
R, B = gfx.rect.bottomright
FILL, DIRTY = gfx.surface.fill, gfx.dirty
for s in self.stars[self.odd][:self.numstars]:
DIRTY(background(s[0]))
self.odd = not self.odd
for rect, (xvel, yvel), col in self.stars[self.odd][:self.numstars]:
rect.left = (rect.left + xvel) % R
rect.top = (rect.top + yvel) % B
DIRTY(FILL(col, rect))
self.last_background = background
def eraseall(self, background, gfx): #only on fullscreen switch
R, B = gfx.rect.bottomright
FILL = gfx.surface.fill
for s in self.stars[0][:self.numstars]:
background(s[0])
for s in self.stars[1][:self.numstars]:
background(s[0])
| lgpl-2.1 | -2,821,837,309,235,326,500 | 32.984127 | 79 | 0.573097 | false | 3.298921 | false | false | false |
MrTrustworthy/game_of_life | gol/mtAntenna.py | 1 | 1086 | __author__ = 'MrTrustworthy'
import inspect
class Antenna:
def __init__(self):
self.listeners = {}
def add_listener(self, channel, callback):
if len(inspect.signature(callback).parameters) == 0:
raise TypeError("Callback Function needs at least 1 parameter")
if channel not in self.listeners.keys():
self.listeners[channel] = []
self.listeners[channel].append(callback)
def remove_listener(self, channel, callback):
if channel in self.listeners.keys():
if callback in self.listeners[channel]:
self.listeners[channel].remove(callback)
if len(self.listeners[channel]) == 0:
del self.listeners[channel]
def dispatch_message(self, channel, info=None, fail_when_empty=False):
if channel not in self.listeners.keys():
if fail_when_empty:
raise KeyError("No listener on this channel")
else:
return
for callback in self.listeners[channel]:
callback(info)
| mit | -2,694,550,715,857,001,000 | 29.166667 | 75 | 0.597606 | false | 4.487603 | false | false | false |
strummerTFIU/TFG-IsometricMaps | src/read_lidar.py | 1 | 2782 | from laspy.file import File
import numpy as np
import colorsys, random, math, os, load_info
laszip = "../LAStools/bin/laszip"
def generate_spheres(lidar_list, areas_list, c1, c2):
"""
Create a string with the definition of spheres which represents points of the LiDAR file
included into de coordinates passed as parameters.
"""
print("Generating spheres...")
spheres = ""
for lidar_file in lidar_list:
lidar_file = lidar_file[0]
print("Generating spheres from " + lidar_file)
os.system(laszip + " -i " + lidar_file + " -o " + lidar_file[:-3] + "LAS")
inFile = File(lidar_file[:-3] + "LAS", mode='r')
point_records = inFile.points
x_scale = inFile.header.scale[0]
x_offset = inFile.header.offset[0]
y_scale = inFile.header.scale[1]
y_offset = inFile.header.offset[1]
z_scale = inFile.header.scale[2]
z_offset = inFile.header.offset[2]
final_points = []
count = 0
total = 0
number_points = len(point_records)
max_points = int(number_points / 3)
if max_points > 1000000:
max_points = 1000000
print("Reading all points...")
while(count < max_points and total < number_points):
rand = random.randint(0, number_points - 1)
point = point_records[rand]
# Take point coordinates
point = point[0]
x_coordinate = point[0] * x_scale + x_offset
y_coordinate = point[1] * y_scale + y_offset
z_coordinate = point[2] * z_scale + z_offset
total += 1
# In interesting zone?
interest = False
for area in areas_list:
if load_info.is_collision(float(area[0]), float(area[1]), float(area[2]), float(area[3]),
x_coordinate, y_coordinate, x_coordinate, y_coordinate):
if load_info.is_collision(float(c1[0]), float(c1[1]), float(c2[0]), float(c2[1]) - 500,
x_coordinate, y_coordinate, x_coordinate, y_coordinate):
interest = True
break
if interest == True:
red = str(point[10] / 65535)
green = str(point[11] / 65535)
blue = str(point[12] / 65535)
z_coordinate *= 1.85
z_coordinate -= 18
final_points.append([str(x_coordinate), str(z_coordinate), str(y_coordinate), red, green, blue])
count += 1
inFile.close()
os.system("rm " + lidar_file[:-3] + "LAS")
number_points = len(final_points)
#max_points = int(number_points / 3)
#max_points = number_points
if max_points > 1000000:
max_points = 1000000
count = 0
print("Taking " + str(number_points) + " points...")
for point in final_points:
#rand = random.randint(0, number_points - 1)
#point = final_points[rand]
spheres += ("sphere {\n<" + point[0] + ", " + point[1] + ", " + point[2] + ">, 2\ntexture {\npigment { color rgb <"
+ point[3] + ", " + point[4] + ", " + point[5] + "> }\n}\nno_shadow\n}\n")
count += 1
return spheres
| mit | 5,776,592,121,527,576,000 | 27.387755 | 119 | 0.627965 | false | 2.798793 | false | false | false |
ader1990/cloudbase-init | cloudbaseinit/conf/azure.py | 4 | 1594 | # Copyright 2017 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Config options available for the Azure metadata service."""
from oslo_config import cfg
from cloudbaseinit.conf import base as conf_base
class AzureOptions(conf_base.Options):
"""Config options available for the Azure metadata service."""
def __init__(self, config):
super(AzureOptions, self).__init__(config, group="azure")
self._options = [
cfg.StrOpt(
"transport_cert_store_name",
default="Windows Azure Environment",
help="Certificate store name for metadata certificates"),
]
def register(self):
"""Register the current options to the global ConfigOpts object."""
group = cfg.OptGroup(self.group_name, title='Azure Options')
self._config.register_group(group)
self._config.register_opts(self._options, group=group)
def list(self):
"""Return a list which contains all the available options."""
return self._options
| apache-2.0 | -1,838,495,556,245,789,400 | 36.069767 | 78 | 0.675659 | false | 4.403315 | true | false | false |
matthew-brett/draft-statsmodels | scikits/statsmodels/tests/results/results_discrete.py | 1 | 11493 | """
Test Results for discrete models from Stata
"""
import numpy as np
#### Discrete Model Tests ####
# Note that there is a slight refactor of the classes, so that one dataset
# might be used for more than one model
class Anes():
def __init__(self):
"""
Results are from Stata 11 (checked vs R nnet package).
"""
self.nobs = 944
def mnlogit_basezero(self):
params = [-.01153598, .29771435, -.024945, .08249144, .00519655,
-.37340167, -.08875065, .39166864, -.02289784, .18104276,
.04787398, -2.2509132, -.1059667, .57345051, -.01485121,
-.00715242, .05757516, -3.6655835, -.0915567, 1.2787718,
-.00868135, .19982796, .08449838, -7.6138431, -.0932846,
1.3469616, -.01790407, .21693885, .08095841, -7.0604782,
-.14088069, 2.0700801, -.00943265, .3219257, .10889408,
-12.105751]
self.params = np.reshape(params, (6,-1))
bse = [.0342823657, .093626795, .0065248584, .0735865799,
.0176336937, .6298376313, .0391615553, .1082386919,
.0079144618, .0852893563, .0222809297, .7631899491,
.0570382292, .1585481337, .0113313133, .1262913234,
.0336142088, 1.156541492, .0437902764, .1288965854,
.0084187486, .0941250559, .0261963632, .9575809602,
.0393516553, .1171860107, .0076110152, .0850070091,
.0229760791, .8443638283, .042138047, .1434089089,
.0081338625, .0910979921, .025300888, 1.059954821]
self.bse = np.reshape(bse, (6,-1))
self.cov_params = None
self.llf = -1461.922747312
self.llnull = -1750.34670999
self.llr = 576.8479253554
self.llr_pvalue = 1.8223179e-102
self.prsquared = .1647810465387
self.df_model = 30
self.df_resid = 944 - 36
self.J = 7
self.K = 6
self.aic = 2995.84549462
self.bic = 3170.45003661
z = [-.3364988051, 3.179798597, -3.823070772, 1.121012042,
.2946945327, -.5928538661, -2.266269864, 3.618564069,
-2.893164162, 2.122688754, 2.148652536, -2.949348555,
-1.857818873, 3.616885888, -1.310634214, -.0566342868,
1.712822091, -3.169435381, -2.090799808, 9.920912816,
-1.031191864, 2.123004903, 3.225576554, -7.951122047,
-2.370538224, 11.49421878, -2.352389066, 2.552011323,
3.523595639, -8.361890935, -3.34331327, 14.43480847,
-1.159676452, 3.533839715, 4.303962885, -11.42100649]
self.z = np.reshape(z, (6,-1))
pvalues = [0.7364947525, 0.0014737744, 0.0001317999, 0.2622827367,
0.7682272401, 0.5532789548, 0.0234348654, 0.0002962422,
0.0038138191, 0.0337799420, 0.0316619538, 0.0031844460,
0.0631947400, 0.0002981687, 0.1899813744, 0.9548365214,
0.0867452747, 0.0015273542, 0.0365460134, 3.37654e-23,
0.3024508550, 0.0337534410, 0.0012571921, 1.84830e-15,
0.0177622072, 1.41051e-30, 0.0186532528, 0.0107103038,
0.0004257334, 6.17209e-17, 0.0008278439, 3.12513e-47,
0.2461805610, 0.0004095694, 0.0000167770, 3.28408e-30]
self.pvalues = np.reshape(pvalues, (6,-1))
self.conf_int = [[[-0.0787282, 0.0556562], [0.1142092, 0.4812195],
[-0.0377335, -0.0121565], [-0.0617356, 0.2267185], [-0.0293649,
0.0397580], [-1.6078610, 0.8610574]], [[-0.1655059, -0.0119954],
[0.1795247, 0.6038126], [-0.0384099, -0.0073858], [0.0138787,
0.3482068], [0.0042042, 0.0915438], [-3.7467380, -0.7550884]],
[[-0.2177596, 0.0058262], [0.2627019, 0.8841991], [-0.0370602,
0.0073578], [-0.2546789, 0.2403740], [-0.0083075, 0.1234578],
[-5.9323630,-1.3988040]],[[-0.1773841, -0.0057293], [1.0261390,
1.5314040], [-0.0251818, 0.0078191], [0.0153462, 0.3843097],
[0.0331544, 0.1358423], [-9.4906670, -5.7370190]], [[-0.1704124,
-0.0161568], [1.1172810, 1.5766420], [-0.0328214, -0.0029868],
[0.0503282, 0.3835495], [0.0359261, 0.1259907], [-8.7154010,
-5.4055560]], [[-0.2234697, -0.0582916], [1.7890040, 2.3511560],
[-0.0253747, 0.0065094], [0.1433769, 0.5004745], [0.0593053,
0.1584829], [-14.1832200, -10.0282800]]]
class Spector():
"""
Results are from Stata 11
"""
def __init__(self):
self.nobs = 32
def logit(self):
self.params = [2.82611297201, .0951576702557, 2.37868772835,
-13.0213483201]
self.cov_params = [[1.59502033639, -.036920566629, .427615725153,
-4.57347950298], [-.036920566629, .0200375937069,
.0149126464275, -.346255757562], [.427615725153 ,
.0149126464275, 1.13329715236, -2.35916128427],
[-4.57347950298, -.346255757562, -2.35916128427,
24.3179625937]]
self.bse = [1.26294114526, .141554207662, 1.06456430165, 4.93132462871]
self.llf = -12.8896334653335
self.llnull = -20.5917296966173
self.df_model = 3
self.df_resid = 32 - 4 #TODO: is this right? not reported in stata
self.llr = 15.4041924625676
self.prsquared = .374038332124624
self.llr_pvalue = .00150187761112892
self.aic = 33.779266930667
self.bic = 39.642210541866
self.z = [2.237723415, 0.6722348408, 2.234423721, -2.640537645]
self.conf_int = [[.3507938,5.301432],[-.1822835,.3725988],[.29218,
4.465195],[-22.68657,-3.35613]]
self.pvalues = [.0252390974, .5014342039, .0254552063, .0082774596]
self.margeff_nodummy_dydx = [.36258084688424,.01220841099085,
.30517768382304]
self.margeff_nodummy_dydxmean = [.53385885781692,.01797548988961,
.44933926079386]
self.margeff_nodummy_dydxmedian = [.25009492465091,.00842091261329,
.2105003352955]
self.margeff_nodummy_dydxzero = [6.252993785e-06,2.105437138e-07,
5.263030788e-06]
self.margeff_nodummy_dyex = [1.1774000792198,.27896245178384,
.16960002159996]
self.margeff_nodummy_dyexmean = [1.6641381583512,.39433730945339,
.19658592659731]
self.margeff_nodummy_dyexmedian = [.76654095836557,.18947053379898,0]
self.margeff_nodummy_dyexzero = [0,0,0]
self.margeff_nodummy_eydx = [1.8546366266779,.06244722072812,
1.5610138123033]
self.margeff_nodummy_eydxmean = [2.1116143062702,.0710998816585,
1.7773072368626]
self.margeff_nodummy_eydxmedian = [2.5488082240624,.0858205793373,
2.1452853812126]
self.margeff_nodummy_eydxzero = [2.8261067189993,.0951574597115,
2.3786824653103]
self.margeff_nodummy_eyex = [5.4747106798973,1.3173389907576,
.44600395466634]
self.margeff_nodummy_eyexmean = [6.5822977203268,1.5597536538833,
.77757191612739]
self.margeff_nodummy_eyexmedian = [7.8120973525952,1.9309630350892,0]
self.margeff_nodummy_eyexzero = [0,0,0]
# for below GPA = 2.0, psi = 1
self.margeff_nodummy_atexog1 = [.1456333017086,.00490359933927,
.12257689308426]
# for below GPA at mean, tuce = 21, psi = 0
self.margeff_nodummy_atexog2 = [.25105129214546,.00845311433473,
.2113052923675]
self.margeff_dummy_dydx = [.36258084688424,.01220841099085,
.35751515254729]
self.margeff_dummy_dydxmean = [.53385885781692,.01797548988961,
.4564984096959]
# self.margeff_dummy_dydxmedian
# self.margeff_dummy_dydxzero
self.margeff_dummy_eydx = [1.8546366266779,.06244722072812,
1.5549034398832]
self.margeff_dummy_eydxmean = [2.1116143062702,.0710998816585,
1.6631775707188]
# self.margeff_dummy_eydxmedian
# self.margeff_dummy_eydxzero
# Factor variables not allowed in below
# self.margeff_dummy_dyex
# self.margeff_dummy_dyexmean
# self.margeff_dummy_dyexmedian
# self.margeff_dummy_dyexzero
# self.margeff_dummy_eyex
# self.margeff_dummy_eyex
# self.margeff_dummy_eyex
# self.margeff_dummy_eyex
# for below GPA = 2.0, psi = 1
self.margeff_dummy_atexog1 = [.1456333017086,.00490359933927,
.0494715429937]
# for below GPA at mean, tuce = 21, psi = 0
self.margeff_dummy_atexog2 = [.25105129214546,.00845311433473,
.44265645632553]
def probit(self):
self.params = [1.62581025407, .051728948442, 1.42633236818,
-7.45232041607]
self.cov_params = [[.481472955383, -.01891350017, .105439226234,
-1.1696681354], [-.01891350017, .00703757594, .002471864882,
-.101172838897], [.105439226234, .002471864882, .354070126802,
-.594791776765], [-1.1696681354, -.101172838897, -.594791776765,
6.46416639958]]
self.bse = [.693882522754, .083890261293, .595037920474, 2.54247249731]
self.llf = -12.8188033249334
self.llnull = -20.5917296966173
self.df_model = 3
self.df_resid = 32 - 4
self.llr = 15.5458527433678
self.prsquared = .377478069409622
self.llr_pvalue = .00140489496775855
self.aic = 33.637606649867
self.bic = 39.500550261066
self.z = [ 2.343062695, .6166263836, 2.397044489, -2.931131182]
self.conf_int = [[.2658255,2.985795],[-.1126929,.2161508],[.2600795,
2.592585],[-12.43547,-2.469166]]
self.pvalues = [.0191261688, .537481188, .0165279168, .0033773013]
class RandHIE():
"""
Results obtained from Stata 11
"""
def __init__(self):
self.nobs = 20190
def poisson(self):
self.params = [-.052535114675, -.247086797633, .035290201794,
-.03457750643, .271713973711, .033941474461, -.012635035534,
.054056326828, .206115121809, .700352877227]
self.cov_params = None
self.bse = [.00288398915279, .01061725196728, .00182833684966,
.00161284852954, .01223913844387, .00056476496963,
.00925061122826, .01530987068312, .02627928267502,
.01116266712362]
self.llf = -62419.588535018
self.llnull = -66647.181687959
self.df_model = 9
self.df_resid = self.nobs - self.df_model - 1
self.llr = 8455.186305881856
self.prsquared = .0634324369893758
self.llr_pvalue = 0
self.aic = 124859.17707
self.bic = 124938.306497
self.z = [-18.21612769, -23.27219872, 19.30180524, -21.43878101,
22.20041672, 60.09840604, -1.36585953, 3.53081538, 7.84325525,
62.74063980]
self.conf_int = [[ -.0581876, -.0468826],[-0.2678962, -0.2262774],
[0.0317067, 0.0388737],[-0.0377386, -0.0314164],
[0.2477257, 0.2957022], [0.0328346, 0.0350484],[-0.0307659,
0.0054958], [0.0240495, 0.0840631],[0.1546087, 0.2576216],
[0.6784745, 0.7222313]]
self.pvalues = [3.84415e-74, 8.4800e-120, 5.18652e-83, 5.8116e-102,
3.4028e-109, 0, .1719830562, .0004142808, 4.39014e-15, 0]
| bsd-3-clause | 7,447,180,080,470,792,000 | 47.699153 | 79 | 0.591577 | false | 2.564257 | false | false | false |
MarcoVigelini/crashmanager | crashmanagergui.py | 1 | 28284 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'crashmanager-gui.ui'
#
# Created: Tue Apr 22 07:51:21 2014
# by: pyside-uic 0.2.13 running on PySide 1.1.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1219, 564)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(9, 9, 631, 321))
self.tabWidget.setToolTip("")
self.tabWidget.setObjectName("tabWidget")
self.tab = QtGui.QWidget()
self.tab.setObjectName("tab")
self.gridLayoutWidget = QtGui.QWidget(self.tab)
self.gridLayoutWidget.setGeometry(QtCore.QRect(0, 30, 621, 231))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.radioButton_SYSTEMTBS = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_SYSTEMTBS.setObjectName("radioButton_SYSTEMTBS")
self.gridLayout.addWidget(self.radioButton_SYSTEMTBS, 5, 1, 1, 1)
self.radioButton_ALLDATA = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_ALLDATA.setObjectName("radioButton_ALLDATA")
self.gridLayout.addWidget(self.radioButton_ALLDATA, 4, 0, 1, 1)
self.radioButton_NONSYSTEMDATA = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_NONSYSTEMDATA.setObjectName("radioButton_NONSYSTEMDATA")
self.gridLayout.addWidget(self.radioButton_NONSYSTEMDATA, 0, 0, 1, 1)
self.radioButton_UNDODATA = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_UNDODATA.setObjectName("radioButton_UNDODATA")
self.gridLayout.addWidget(self.radioButton_UNDODATA, 3, 0, 1, 1)
self.radioButton_10 = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_10.setEnabled(True)
self.radioButton_10.setObjectName("radioButton_10")
self.gridLayout.addWidget(self.radioButton_10, 1, 1, 1, 1)
self.radioButton_SYSTEMDATA = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_SYSTEMDATA.setObjectName("radioButton_SYSTEMDATA")
self.gridLayout.addWidget(self.radioButton_SYSTEMDATA, 2, 0, 1, 1)
self.radioButton_SPFILE = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_SPFILE.setObjectName("radioButton_SPFILE")
self.gridLayout.addWidget(self.radioButton_SPFILE, 0, 2, 1, 1)
self.radioButton_NONSYSTEMTBS = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_NONSYSTEMTBS.setObjectName("radioButton_NONSYSTEMTBS")
self.gridLayout.addWidget(self.radioButton_NONSYSTEMTBS, 3, 1, 1, 1)
self.radioButton_21 = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_21.setEnabled(False)
self.radioButton_21.setObjectName("radioButton_21")
self.gridLayout.addWidget(self.radioButton_21, 2, 1, 1, 1)
self.radioButton_TEMPORARYTBS = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_TEMPORARYTBS.setObjectName("radioButton_TEMPORARYTBS")
self.gridLayout.addWidget(self.radioButton_TEMPORARYTBS, 4, 1, 1, 1)
self.radioButton_TEMPORARYDATA = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_TEMPORARYDATA.setObjectName("radioButton_TEMPORARYDATA")
self.gridLayout.addWidget(self.radioButton_TEMPORARYDATA, 1, 0, 1, 1)
self.radioButton_READONLYTBS = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_READONLYTBS.setObjectName("radioButton_READONLYTBS")
self.gridLayout.addWidget(self.radioButton_READONLYTBS, 0, 1, 1, 1)
self.radioButton_UNDOTBS = QtGui.QRadioButton(self.gridLayoutWidget)
self.radioButton_UNDOTBS.setObjectName("radioButton_UNDOTBS")
self.gridLayout.addWidget(self.radioButton_UNDOTBS, 6, 1, 1, 1)
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName("tab_2")
self.gridLayoutWidget_2 = QtGui.QWidget(self.tab_2)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(0, 20, 621, 231))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.radioButton_22 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_22.setObjectName("radioButton_22")
self.gridLayout_2.addWidget(self.radioButton_22, 5, 1, 1, 1)
self.radioButton_23 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_23.setObjectName("radioButton_23")
self.gridLayout_2.addWidget(self.radioButton_23, 4, 0, 1, 1)
self.radioButton_24 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_24.setObjectName("radioButton_24")
self.gridLayout_2.addWidget(self.radioButton_24, 0, 0, 1, 1)
self.radioButton_25 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_25.setObjectName("radioButton_25")
self.gridLayout_2.addWidget(self.radioButton_25, 3, 0, 1, 1)
self.radioButton_26 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_26.setObjectName("radioButton_26")
self.gridLayout_2.addWidget(self.radioButton_26, 1, 1, 1, 1)
self.radioButton_27 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_27.setObjectName("radioButton_27")
self.gridLayout_2.addWidget(self.radioButton_27, 2, 0, 1, 1)
self.radioButton_28 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_28.setObjectName("radioButton_28")
self.gridLayout_2.addWidget(self.radioButton_28, 0, 2, 1, 1)
self.radioButton_29 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_29.setObjectName("radioButton_29")
self.gridLayout_2.addWidget(self.radioButton_29, 3, 1, 1, 1)
self.radioButton_30 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_30.setObjectName("radioButton_30")
self.gridLayout_2.addWidget(self.radioButton_30, 2, 1, 1, 1)
self.radioButton_31 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_31.setObjectName("radioButton_31")
self.gridLayout_2.addWidget(self.radioButton_31, 4, 1, 1, 1)
self.radioButton_32 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_32.setObjectName("radioButton_32")
self.gridLayout_2.addWidget(self.radioButton_32, 1, 0, 1, 1)
self.radioButton_33 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_33.setObjectName("radioButton_33")
self.gridLayout_2.addWidget(self.radioButton_33, 0, 1, 1, 1)
self.radioButton_34 = QtGui.QRadioButton(self.gridLayoutWidget_2)
self.radioButton_34.setObjectName("radioButton_34")
self.gridLayout_2.addWidget(self.radioButton_34, 6, 1, 1, 1)
self.tabWidget.addTab(self.tab_2, "")
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName("tab_3")
self.gridLayoutWidget_3 = QtGui.QWidget(self.tab_3)
self.gridLayoutWidget_3.setGeometry(QtCore.QRect(0, 20, 621, 231))
self.gridLayoutWidget_3.setObjectName("gridLayoutWidget_3")
self.gridLayout_3 = QtGui.QGridLayout(self.gridLayoutWidget_3)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.radioButton_35 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_35.setObjectName("radioButton_35")
self.gridLayout_3.addWidget(self.radioButton_35, 5, 1, 1, 1)
self.radioButton_36 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_36.setObjectName("radioButton_36")
self.gridLayout_3.addWidget(self.radioButton_36, 4, 0, 1, 1)
self.radioButton_37 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_37.setObjectName("radioButton_37")
self.gridLayout_3.addWidget(self.radioButton_37, 0, 0, 1, 1)
self.radioButton_38 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_38.setObjectName("radioButton_38")
self.gridLayout_3.addWidget(self.radioButton_38, 3, 0, 1, 1)
self.radioButton_39 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_39.setObjectName("radioButton_39")
self.gridLayout_3.addWidget(self.radioButton_39, 1, 1, 1, 1)
self.radioButton_40 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_40.setObjectName("radioButton_40")
self.gridLayout_3.addWidget(self.radioButton_40, 2, 0, 1, 1)
self.radioButton_41 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_41.setObjectName("radioButton_41")
self.gridLayout_3.addWidget(self.radioButton_41, 0, 2, 1, 1)
self.radioButton_42 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_42.setObjectName("radioButton_42")
self.gridLayout_3.addWidget(self.radioButton_42, 3, 1, 1, 1)
self.radioButton_43 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_43.setObjectName("radioButton_43")
self.gridLayout_3.addWidget(self.radioButton_43, 2, 1, 1, 1)
self.radioButton_44 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_44.setObjectName("radioButton_44")
self.gridLayout_3.addWidget(self.radioButton_44, 4, 1, 1, 1)
self.radioButton_45 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_45.setObjectName("radioButton_45")
self.gridLayout_3.addWidget(self.radioButton_45, 1, 0, 1, 1)
self.radioButton_46 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_46.setObjectName("radioButton_46")
self.gridLayout_3.addWidget(self.radioButton_46, 0, 1, 1, 1)
self.radioButton_47 = QtGui.QRadioButton(self.gridLayoutWidget_3)
self.radioButton_47.setObjectName("radioButton_47")
self.gridLayout_3.addWidget(self.radioButton_47, 6, 1, 1, 1)
self.tabWidget.addTab(self.tab_3, "")
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName("tab_4")
self.radioButton_7 = QtGui.QRadioButton(self.tab_4)
self.radioButton_7.setGeometry(QtCore.QRect(40, 40, 147, 18))
self.radioButton_7.setObjectName("radioButton_7")
self.radioButton_8 = QtGui.QRadioButton(self.tab_4)
self.radioButton_8.setGeometry(QtCore.QRect(40, 70, 171, 18))
self.radioButton_8.setObjectName("radioButton_8")
self.tabWidget.addTab(self.tab_4, "")
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName("tab_5")
self.gridLayoutWidget_5 = QtGui.QWidget(self.tab_5)
self.gridLayoutWidget_5.setGeometry(QtCore.QRect(0, 10, 670, 151))
self.gridLayoutWidget_5.setObjectName("gridLayoutWidget_5")
self.gridLayout_5 = QtGui.QGridLayout(self.gridLayoutWidget_5)
self.gridLayout_5.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.gridLayout_5.setContentsMargins(0, 0, 0, 0)
self.gridLayout_5.setObjectName("gridLayout_5")
self.radioButton_18 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_18.setObjectName("radioButton_18")
self.gridLayout_5.addWidget(self.radioButton_18, 0, 0, 1, 1)
self.radioButton_20 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_20.setObjectName("radioButton_20")
self.gridLayout_5.addWidget(self.radioButton_20, 0, 1, 1, 1)
self.radioButton_15 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_15.setObjectName("radioButton_15")
self.gridLayout_5.addWidget(self.radioButton_15, 1, 0, 1, 1)
self.radioButton_17 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_17.setObjectName("radioButton_17")
self.gridLayout_5.addWidget(self.radioButton_17, 2, 0, 1, 1)
self.radioButton_19 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_19.setObjectName("radioButton_19")
self.gridLayout_5.addWidget(self.radioButton_19, 1, 1, 1, 1)
self.radioButton_16 = QtGui.QRadioButton(self.gridLayoutWidget_5)
self.radioButton_16.setObjectName("radioButton_16")
self.gridLayout_5.addWidget(self.radioButton_16, 2, 1, 1, 1)
self.tabWidget.addTab(self.tab_5, "")
self.verticalLayoutWidget_3 = QtGui.QWidget(self.centralwidget)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(10, 330, 631, 137))
self.verticalLayoutWidget_3.setObjectName("verticalLayoutWidget_3")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label.setObjectName("label")
self.verticalLayout_3.addWidget(self.label)
self.textBrowser = QtGui.QTextBrowser(self.verticalLayoutWidget_3)
self.textBrowser.setObjectName("textBrowser")
self.verticalLayout_3.addWidget(self.textBrowser)
self.horizontalLayoutWidget = QtGui.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(30, 470, 188, 41))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtGui.QPushButton(self.horizontalLayoutWidget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_2 = QtGui.QPushButton(self.horizontalLayoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayoutWidget = QtGui.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(649, 9, 561, 461))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label_2 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.textBrowser_ALERTLOG = QtGui.QTextBrowser(self.verticalLayoutWidget)
self.textBrowser_ALERTLOG.setObjectName("textBrowser_ALERTLOG")
self.verticalLayout.addWidget(self.textBrowser_ALERTLOG)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1219, 24))
self.menubar.setNativeMenuBar(False)
self.menubar.setObjectName("menubar")
self.menuAbout = QtGui.QMenu(self.menubar)
self.menuAbout.setObjectName("menuAbout")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuMode = QtGui.QMenu(self.menubar)
self.menuMode.setObjectName("menuMode")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionClose = QtGui.QAction(MainWindow)
self.actionClose.setObjectName("actionClose")
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionComplete_Recovery = QtGui.QAction(MainWindow)
self.actionComplete_Recovery.setObjectName("actionComplete_Recovery")
self.actionIncomplete_Recovery = QtGui.QAction(MainWindow)
self.actionIncomplete_Recovery.setObjectName("actionIncomplete_Recovery")
self.actionFlashback_Recovery = QtGui.QAction(MainWindow)
self.actionFlashback_Recovery.setObjectName("actionFlashback_Recovery")
self.actionContent = QtGui.QAction(MainWindow)
self.actionContent.setObjectName("actionContent")
self.menuAbout.addAction(self.actionContent)
self.menuAbout.addAction(self.actionAbout)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionClose)
self.menuMode.addAction(self.actionComplete_Recovery)
self.menuMode.addAction(self.actionIncomplete_Recovery)
self.menuMode.addAction(self.actionFlashback_Recovery)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuMode.menuAction())
self.menubar.addAction(self.menuAbout.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.actionClose, QtCore.SIGNAL("activated()"), MainWindow.close)
QtCore.QObject.connect(self.actionComplete_Recovery, QtCore.SIGNAL("activated()"), self.tabWidget.show)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_SYSTEMTBS.setText(QtGui.QApplication.translate("MainWindow", "Loss of the SYSTEM tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_ALLDATA.setText(QtGui.QApplication.translate("MainWindow", "Loss of all datafiles", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_NONSYSTEMDATA.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_UNDODATA.setText(QtGui.QApplication.translate("MainWindow", "Loss of an UNDO datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_10.setText(QtGui.QApplication.translate("MainWindow", "Loss of an Index tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_SYSTEMDATA.setText(QtGui.QApplication.translate("MainWindow", "Loss of a SYSTEM datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_SPFILE.setText(QtGui.QApplication.translate("MainWindow", "Loss of the spfile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_NONSYSTEMTBS.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_21.setText(QtGui.QApplication.translate("MainWindow", "Loss of all indexes in USERS tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_TEMPORARYTBS.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_TEMPORARYDATA.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_READONLYTBS.setText(QtGui.QApplication.translate("MainWindow", "Loss of a Read-Only tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_UNDOTBS.setText(QtGui.QApplication.translate("MainWindow", "Loss of the UNDO tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QtGui.QApplication.translate("MainWindow", "Complete Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_22.setText(QtGui.QApplication.translate("MainWindow", "Loss of the SYSTEM tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_23.setText(QtGui.QApplication.translate("MainWindow", "Loss of all datafiles", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_24.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_25.setText(QtGui.QApplication.translate("MainWindow", "Loss of an UNDO datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_26.setText(QtGui.QApplication.translate("MainWindow", "Loss of an Index tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_27.setText(QtGui.QApplication.translate("MainWindow", "Loss of a SYSTEM datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_28.setText(QtGui.QApplication.translate("MainWindow", "Loss of the spfile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_29.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_30.setText(QtGui.QApplication.translate("MainWindow", "Loss of all indexes in USERS tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_31.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_32.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_33.setText(QtGui.QApplication.translate("MainWindow", "Loss of a Read-Only tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_34.setText(QtGui.QApplication.translate("MainWindow", "Loss of the UNDO tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), QtGui.QApplication.translate("MainWindow", "Incomplete Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_35.setText(QtGui.QApplication.translate("MainWindow", "Loss of the SYSTEM tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_36.setText(QtGui.QApplication.translate("MainWindow", "Loss of all datafiles", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_37.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_38.setText(QtGui.QApplication.translate("MainWindow", "Loss of an UNDO datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_39.setText(QtGui.QApplication.translate("MainWindow", "Loss of an Index tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_40.setText(QtGui.QApplication.translate("MainWindow", "Loss of a SYSTEM datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_41.setText(QtGui.QApplication.translate("MainWindow", "Loss of the spfile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_42.setText(QtGui.QApplication.translate("MainWindow", "Loss of a non-system tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_43.setText(QtGui.QApplication.translate("MainWindow", "Loss of all indexes in USERS tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_44.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_45.setText(QtGui.QApplication.translate("MainWindow", "Loss of a temporary datafile", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_46.setText(QtGui.QApplication.translate("MainWindow", "Loss of a Read-Only tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_47.setText(QtGui.QApplication.translate("MainWindow", "Loss of the UNDO tablespace", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), QtGui.QApplication.translate("MainWindow", "Flashback Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_7.setText(QtGui.QApplication.translate("MainWindow", "Loss of a control file", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_8.setText(QtGui.QApplication.translate("MainWindow", "Loss of all control files", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), QtGui.QApplication.translate("MainWindow", "Control file", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_18.setText(QtGui.QApplication.translate("MainWindow", "Loss of a redo log file group member", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_20.setText(QtGui.QApplication.translate("MainWindow", "Loss of all redo log members of an INACTIVE group", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_15.setText(QtGui.QApplication.translate("MainWindow", "Loss of a redo log file group", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_17.setText(QtGui.QApplication.translate("MainWindow", "Loss of redo log member of a multiplexed group", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_19.setText(QtGui.QApplication.translate("MainWindow", "Loss of all redo log members of an ACTIVE group", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_16.setText(QtGui.QApplication.translate("MainWindow", "Loss of all redo log members of CURRENT group", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_5), QtGui.QApplication.translate("MainWindow", "Redo log", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "Description:", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setToolTip(QtGui.QApplication.translate("MainWindow", "<html><head/><body><p>Press this button if you want to perform a crash of your instance according to the radio button selected. In this way you have to recover your instance: so be sure to have at least one useful backup of your database.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("MainWindow", "Crash it !", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setToolTip(QtGui.QApplication.translate("MainWindow", "<html><head/><body><p>Press this button if you want to have no radio button selected. In this way even if you push the "Crash it !" button it won\'t crash your instance.</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("MainWindow", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Alert log:", None, QtGui.QApplication.UnicodeUTF8))
self.menuAbout.setTitle(QtGui.QApplication.translate("MainWindow", "Help", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "File", None, QtGui.QApplication.UnicodeUTF8))
self.menuMode.setTitle(QtGui.QApplication.translate("MainWindow", "Mode", None, QtGui.QApplication.UnicodeUTF8))
self.actionClose.setText(QtGui.QApplication.translate("MainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout.setText(QtGui.QApplication.translate("MainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionComplete_Recovery.setText(QtGui.QApplication.translate("MainWindow", "Complete Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.actionIncomplete_Recovery.setText(QtGui.QApplication.translate("MainWindow", "Incomplete Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.actionFlashback_Recovery.setText(QtGui.QApplication.translate("MainWindow", "Flashback Recovery", None, QtGui.QApplication.UnicodeUTF8))
self.actionContent.setText(QtGui.QApplication.translate("MainWindow", "Content", None, QtGui.QApplication.UnicodeUTF8))
| gpl-3.0 | -8,960,481,038,980,291,000 | 78.898305 | 376 | 0.735151 | false | 3.825781 | false | false | false |
langkilde/epic | src/main/python/moveBatch.py | 1 | 2766 | #import pymongo
import sys
import os
##from pymongo import MongoClient
from makeConllFromDBOutput import makeConll
from getJustSentences import getJustSentences
# python will convert \n to os.linesep
def moveBatch(randomIds,noise):
pathToEpic = os.getcwd()
pathToEpic = pathToEpic[0:pathToEpic.rfind("epic")+4]
returnString = "Tmp file: "
print "Inside moveBatch"
# Move Batch between databases
#client = MongoClient('mon-entity-event-r13-2.recfut.com:27016')
#db = client.rf_entity_curation
#labeled = db.malware_labeled
#unlabeled = db.malware_unlabeled
batch = open(os.path.expanduser(pathToEpic + "/data/PoolData/batch.txt"),'w')
readUnlabeled = open(os.path.expanduser(pathToEpic + "/data/PoolData/unlabeledPool.txt"), 'r')
lines = readUnlabeled.readlines()
readUnlabeled.close()
writeUnlabeled = open(os.path.expanduser(pathToEpic + "/data/PoolData/unlabeledPool.txt"), 'w')
print "Unlabeled openened for writing"
#print "randomIds " + str(randomIds)
################## Batch moved in database #############
#for oneId in randomIds:
# tmpId = unlabeled.find({"random" : oneId})
# labeled.insert(tmpId)
# unlabeled.remove({"random" : oneId})
# tmpId = labeled.find({"random" : oneId})
# batch.write(str(tmpId[0]))
# batch.write("\n")
#print "Starting to remove id from textfile"
for line in lines:
idFound = False
for oneID in randomIds:
if not (line.find(str(oneID)[0:len(str(oneID))-2])==-1):
idFound = True
#print str(idFound)+" " +str(oneID)[0:len(str(oneID))-2] +"\n"+line
if not idFound:
#print "Write \""+line+"\" to unlabeled"
writeUnlabeled.write(line)
else:
#print "Write \""+line+"\" to batch"
batch.write(line)
#print line + " does not include " +oneId
#print str(idFound)+" " + +"\n"+line
#returnString += str(idFound) + " " + line + "\n"
writeUnlabeled.close()
batch.close()
# Get Conll of the batches and add these to all conll's of labeled pool
makeConll(pathToEpic + "/data/PoolData/batch.txt", pathToEpic + "/data/PoolData/batchConll.conll", noise)
labeledOrig = open(os.path.expanduser(pathToEpic + "/data/PoolData/labeledPool.txt"), 'a')
labeledOrigConll = open(os.path.expanduser(pathToEpic + "/data/PoolData/labeledPool.conll"),'a')
batch = open(os.path.expanduser(pathToEpic + "/data/PoolData/batch.txt"),'r')
batchConll = open(os.path.expanduser(pathToEpic + "/data/PoolData/batchConll.conll"),'r')
labeledOrig.write(batch.read())
labeledOrigConll.write(batchConll.read())
labeledOrig.close()
labeledOrigConll.close()
batch.close()
batchConll.close()
#os.remove(os.path.expanduser(pathToEpic + "/data/PoolData/batch.txt"))
#os.remove(os.path.expanduser(pathToEpic + "/data/PoolData/batchConll.conll"))
return returnString
| apache-2.0 | 2,568,398,231,805,329,000 | 27.22449 | 106 | 0.702097 | false | 2.810976 | false | false | false |
arcivanov/pybuilder | src/unittest/python/plugins/python/pylint_plugin_tests.py | 3 | 4800 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from pybuilder.core import Project, Logger
from pybuilder.errors import BuildFailedException
from pybuilder.plugins.python.pylint_plugin import (check_pylint_availability,
init_pylint,
execute_pylint,
DEFAULT_PYLINT_OPTIONS)
from test_utils import Mock, patch, ANY
PYLINT_ERROR_OUTPUT = [
'************* Module mode.file',
'src/main/python/module/file.py:34:0: C0301: Line too long (365/100) (line-too-long)',
'src/main/python/module/file.py:34:0: R1705: Unnecessary "else" after "return" (no-else-return)',
'Your code has been rated at 9.79/10 (previous run: 9.79/10, +0.00)',
''
]
PYLINT_NORMAL_OUTPUT = [
'Your code has been rated at 9.79/10 (previous run: 9.79/10, +0.00)',
''
]
class PylintPluginTests(TestCase):
def setUp(self):
self.project = Project("basedir")
self.project.set_property("dir_source_main_python", "source")
self.project.set_property("dir_reports", "reports")
self.reactor = Mock()
self.reactor.python_env_registry = {}
self.reactor.python_env_registry["pybuilder"] = pyb_env = Mock()
pyb_env.environ = {}
self.reactor.pybuilder_venv = pyb_env
def test_should_check_that_pylint_can_be_executed(self):
mock_logger = Mock(Logger)
check_pylint_availability(self.project, mock_logger, self.reactor)
self.reactor.pybuilder_venv.verify_can_execute.assert_called_with(['pylint'], 'pylint', 'plugin python.pylint')
def test_should_run_pylint_with_default_options(self):
init_pylint(self.project)
execute_pylint(self.project, Mock(Logger), self.reactor)
self.reactor.pybuilder_venv.execute_command.assert_called_with(["pylint"] + DEFAULT_PYLINT_OPTIONS, ANY,
env=ANY)
def test_should_run_pylint_with_custom_options(self):
init_pylint(self.project)
self.project.set_property("pylint_options", ["--test", "-f", "--x=y"])
execute_pylint(self.project, Mock(Logger), self.reactor)
self.reactor.pybuilder_venv.execute_command.assert_called_with(["pylint", "--test", "-f", "--x=y"], ANY,
env=ANY)
@patch('pybuilder.plugins.python.pylint_plugin.read_file', return_value=PYLINT_ERROR_OUTPUT)
@patch('pybuilder.plugins.python.pylint_plugin.execute_tool_on_modules')
def test_should_break_build_when_warnings_and_set(self, *_):
init_pylint(self.project)
self.project.set_property("pylint_break_build", True)
with self.assertRaises(BuildFailedException):
execute_pylint(self.project, Mock(Logger), self.reactor)
@patch('pybuilder.plugins.python.pylint_plugin.read_file', return_value=PYLINT_ERROR_OUTPUT)
@patch('pybuilder.plugins.python.pylint_plugin.execute_tool_on_modules')
def test_should_not_break_build_when_warnings_and_not_set(self, *_):
init_pylint(self.project)
self.project.set_property("pylint_break_build", False)
execute_pylint(self.project, Mock(Logger), self.reactor)
@patch('pybuilder.plugins.python.pylint_plugin.read_file', return_value=PYLINT_NORMAL_OUTPUT)
@patch('pybuilder.plugins.python.pylint_plugin.execute_tool_on_modules')
def test_should_not_break_build_when_no_warnings_and_set(self, *_):
init_pylint(self.project)
self.project.set_property("pylint_break_build", True)
execute_pylint(self.project, Mock(Logger), self.reactor)
@patch('pybuilder.plugins.python.pylint_plugin.read_file', return_value=PYLINT_NORMAL_OUTPUT)
@patch('pybuilder.plugins.python.pylint_plugin.execute_tool_on_modules')
def test_should_not_break_build_when_no_warnings_and_not_set(self, *_):
init_pylint(self.project)
self.project.set_property("pylint_break_build", False)
execute_pylint(self.project, Mock(Logger), self.reactor)
| apache-2.0 | 1,745,801,525,207,836,000 | 42.636364 | 119 | 0.652292 | false | 3.633611 | true | false | false |
brad999/nikita-client | client/conversation.py | 2 | 2068 | # -*- coding: utf-8-*-
import logging
from notifier import Notifier
from brain import Brain
class Conversation(object):
def __init__(self, persona, mic, profile):
self._logger = logging.getLogger(__name__)
self.persona = persona
self.mic = mic
self.profile = profile
self.brain = Brain(mic, profile)
# Initialize notifier if server
if self.profile['server'] == 'Y':
self.notifier = Notifier(profile)
def handleForever(self):
"""
Delegates user input to the handling function when activated.
"""
self._logger.info("Starting to handle conversation with keyword '%s'.",
self.persona)
while True:
# If server, handle passive modules (scheduled backgroud tasks)
if self.profile['server'] == 'Y':
# Print notifications until empty
notifications = self.notifier.getAllNotifications()
for notif in notifications:
self._logger.info("Received notification: '%s'", str(notif))
self._logger.debug("Started listening for keyword '%s'",
self.persona)
threshold, transcribed = self.mic.passiveListen(self.persona)
self._logger.debug("Stopped listening for keyword '%s'",
self.persona)
if not transcribed or not threshold:
self._logger.info("Nothing has been said or transcribed.")
continue
self._logger.info("Keyword '%s' has been said!", self.persona)
self._logger.debug("Started to listen actively with threshold: %r",
threshold)
input = self.mic.activeListenToAllOptions(threshold)
self._logger.debug("Stopped to listen actively with threshold: %r",
threshold)
if input:
self.brain.query(input)
else:
self.mic.say('A', "Pardon?")
| mit | -7,085,864,070,392,753,000 | 38.018868 | 80 | 0.554159 | false | 4.809302 | false | false | false |
openregister/country-data | bin/countries.py | 1 | 1041 | #!/usr/bin/env python3
"""Import iso country codes"""
import csv
import io
import sys
column_to_field = [['The two-letter ISO 3166-1 code', 'country'],
['start-date', 'start-date'],
['end-date', 'end-date'],
['Country', 'name'],
['Official Name', 'official-name'],
['Name for Citizen', 'citizen-names']]
if __name__ == '__main__':
# the countries csv seems to be in cp1252, not utf-8
input_stream = io.TextIOWrapper(sys.stdin.buffer, encoding='cp1252')
reader = csv.DictReader(input_stream)
print (*[field for column, field in column_to_field], sep='\t')
# GB isn't included in the csv for some reason
print ('GB', '', '', 'United Kingdom', 'The United Kingdom of Great Britain and Northern Ireland', 'Briton, British citizen', sep='\t')
for num, row in enumerate(reader):
row['start-date'] = ''
row['end-date'] = ''
print (*[row[column] for column, field in column_to_field], sep='\t')
| mit | -4,535,095,615,233,608,000 | 34.896552 | 139 | 0.574448 | false | 3.481605 | false | false | false |
DosAmp/radiosteuerung | radiosteuerung.py | 1 | 1498 | #!/usr/bin/python3
import subprocess
import bottle as b
DEAD = 0
PLAYING = 1
PAUSED = 2
STOPPED = 3
UNKNOWN = 4
def _run_mocp(switch):
p = subprocess.Popen(['mocp', switch], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
(output, dummy) = p.communicate()
return (output, p.returncode)
def get_state():
r = _run_mocp('-i')
if r[1] == 2:
return DEAD
else:
for e in filter(lambda b: b.startswith(b'State: '), r[0].split(b'\n')):
# iterating over one element at most is such a 2AM solution
verb = e[7:]
if verb == b'PLAY':
return PLAYING
elif verb == b'PAUSE':
return PAUSED
elif verb == b'STOP':
return STOPPED
return UNKNOWN
def cmd_start():
_run_mocp('-S')
def cmd_play():
_run_mocp('-p')
def cmd_stop():
_run_mocp('-s')
@b.route('/')
def index():
playstate = get_state() == PLAYING
return b.template('radiosteuerung', playstate=playstate)
@b.route('/do/<cmd>')
def command(cmd):
method = 'cmd_' + cmd
# XXX: wrap these functions in a class, use getattr() instead
if method in globals():
globals()[method]()
return 'OK'
else:
b.abort(404, 'Method not found')
if get_state() == DEAD:
cmd_start()
b.TEMPLATE_PATH.insert(0, '/insertpathhere/') # location of template
#b.run(host='localhost', port=8088, debug=True) # not recommended
b.run(host='0.0.0.0', port=8088)
| mit | 5,292,195,454,860,712,000 | 23.16129 | 93 | 0.578104 | false | 3.242424 | false | false | false |
colloquium/spacewalk | backend/server/rhnMapping.py | 1 | 1548 | #
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
from spacewalk.server import rhnSQL
# maps a funky product release to something that is canonical
# and has a chance of hitting some channels
def real_version(version):
version = str(version)
h = rhnSQL.prepare("""
select canon_version
from rhnRedHatCanonVersion rhcv
where rhcv.version = :version
""")
h.execute(version = version)
ret = h.fetchone_dict()
if not ret:
return version
return ret["canon_version"]
# checks if an arch is for real
def check_package_arch(name):
name = str(name)
if name is None or len(name) == 0:
return None
h = rhnSQL.prepare("select id from rhnPackageArch where label = :label")
h.execute(label=name)
ret = h.fetchone_dict()
if not ret:
return None
return name
if __name__ == '__main__':
"""Test code.
"""
rhnSQL.initDB()
print real_version('7.1sbe')
print check_package_arch('i386')
| gpl-2.0 | 8,066,852,062,321,704,000 | 28.769231 | 76 | 0.68863 | false | 3.650943 | false | false | false |
dreamhost/akanda-ceilometer | setup.py | 1 | 1286 | # Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from setuptools import setup, find_packages
setup(
name='akanda-ceilometer',
version='0.1.0',
description='Ceilometer plugin for processing Akanda notifications',
author='DreamHost',
author_email='dev-community@dreamhost.com',
url='http://github.com/dreamhost/akanda',
license='BSD',
install_requires=['ceilometer'],
namespace_packages=['akanda'],
packages=find_packages(exclude=['test', 'smoke']),
entry_points={
'ceilometer.collector': [
'akanda_bandwidth = akanda.ceilometer.notifications'
':NetworkBandwidthNotification',
],
},
include_package_data=True,
zip_safe=False,
)
| apache-2.0 | -3,929,586,663,161,005,000 | 31.974359 | 75 | 0.700622 | false | 3.944785 | false | false | false |
ipapi/ipap | ipap/core/filter.py | 1 | 1870 | from math import sqrt, exp
import numpy as np
def distance(u, v, dft):
return sqrt((u - (dft.shape[0] / 2))**2 + (v - (dft.shape[1] / 2))**2)
def ideal(value, cutoff, ftype='lowpass', bwidth=1.0):
if ftype == 'lowpass':
return 1 if value <= cutoff else 0
elif ftype == 'highpass':
return 0 if value <= cutoff else 1
elif ftype == 'bandreject':
min = cutoff - (bwidth / 2)
max = cutoff + (bwidth / 2)
return 0 if min <= value <= max else 1
elif ftype == 'bandpass':
min = cutoff - (bwidth / 2)
max = cutoff + (bwidth / 2)
return 1 if min <= value <= max else 0
def gauss(value, cutoff, ftype='lowpass', bwidth=1.0):
if ftype == 'lowpass':
return exp(-(value**2) / (2 * cutoff**2))
elif ftype == 'highpass':
return 1 - exp(-(value**2 / (2 * cutoff**2)))
elif ftype == 'bandreject':
return 1 - exp(-((value**2 - cutoff**2) / ((1+value) * bwidth))**2)
elif ftype == 'bandpass':
return exp(-((value**2 - cutoff**2) / ((1+value) * bwidth))**2)
def butterworth(value, cutoff, n, ftype='lowpass', bwidth=1.0):
if ftype == 'lowpass':
return 1 / (1 + (value / cutoff)**(2*n))
elif ftype == 'highpass':
return 1 / (1 + (cutoff / (1+value))**(2*n))
elif ftype == 'bandreject':
return 1 / (1 + ((value * bwidth) / (1+(value**2 - cutoff**2)))**(2*n))
elif ftype == 'bandpass':
return 1 / (1 + ((value**2 - cutoff**2) / ((1+value) * bwidth))**(2*n))
def apply_filter(data, filter):
filtered = np.empty(data.shape, dtype=complex)
for c in range(0, data.shape[0]):
for u in range(0, data.shape[1]):
for v in range(0, data.shape[2]):
value = distance(u, v, data[c])
filtered[c][u][v] = data[c][u][v] * filter(value)
return filtered
| gpl-3.0 | -8,124,891,274,376,917,000 | 33 | 79 | 0.532086 | false | 3.096026 | false | false | false |
zamattiac/SHARE | tests/share/models/test_rawdata.py | 1 | 2632 | import pytest
import hashlib
from django.core import exceptions
from django.db.utils import IntegrityError
from share.models import RawData
@pytest.mark.django_db
class TestRawData:
def test_doesnt_mangle_data(self, share_source):
rd = RawData(source=share_source, app_label='foo', data=b'This is just some data')
rd.save()
assert RawData.objects.first().data == 'This is just some data'
def test_must_have_data(self, share_source):
rd = RawData(source=share_source, app_label='foo')
with pytest.raises(exceptions.ValidationError) as e:
rd.clean_fields()
rd.save()
assert 'This field cannot be blank.' == e.value.message_dict['data'][0]
def test_must_have_source(self):
rd = RawData(data='SomeData', app_label='foo')
with pytest.raises(IntegrityError) as e:
rd.save()
assert 'null value in column "source_id" violates not-null constraint' in e.value.args[0]
def test_store_data(self, share_source):
rd = RawData.objects.store_data('myid', b'mydatums', share_source, 'applabel')
assert rd.date_seen is not None
assert rd.date_harvested is not None
assert rd.data == b'mydatums'
assert rd.source == share_source
assert rd.app_label == 'applabel'
assert rd.provider_doc_id == 'myid'
assert rd.sha256 == hashlib.sha256(b'mydatums').hexdigest()
def test_store_data_dedups_simple(self, share_source):
rd1 = RawData.objects.store_data('myid', b'mydatums', share_source, 'applabel')
rd2 = RawData.objects.store_data('myid', b'mydatums', share_source, 'applabel')
assert rd1.pk == rd2.pk
assert rd1.date_seen < rd2.date_seen
assert rd1.date_harvested == rd2.date_harvested
def test_store_data_dedups_complex(self, share_source):
data = b'{"providerUpdatedDateTime":"2016-08-25T11:37:40Z","uris":{"canonicalUri":"https://provider.domain/files/7d2792031","providerUris":["https://provider.domain/files/7d2792031"]},"contributors":[{"name":"Person1","email":"one@provider.domain"},{"name":"Person2","email":"two@provider.domain"},{"name":"Person3","email":"three@provider.domain"},{"name":"Person4","email":"dxm6@psu.edu"}],"title":"ReducingMorbiditiesinNeonatesUndergoingMRIScannig"}'
rd1 = RawData.objects.store_data('myid', data, share_source, 'applabel')
rd2 = RawData.objects.store_data('myid', data, share_source, 'applabel')
assert rd1.pk == rd2.pk
assert rd1.date_seen < rd2.date_seen
assert rd1.date_harvested == rd2.date_harvested
| apache-2.0 | -1,687,409,344,621,288,400 | 40.777778 | 461 | 0.661474 | false | 3.340102 | true | false | false |
david-shu/lxml-mate | lxmlmate.py | 1 | 14915 | # -*- coding: utf-8 -*-
'''
lxml mate.
'''
__ver_major__ = 0
__ver_minor__ = 5
__ver_patch__ = 2
__ver_sub__ = ""
__version__ = "%d.%d.%d%s" % (__ver_major__,__ver_minor__,__ver_patch__,__ver_sub__)
from lxml import etree, objectify
import types
class ObjectifiedElementProxy( object ):
u'''Proxy class for objectify.ObjectifiedElement instance which can be created by objectify.Element() or SubElement() or XML() or fromstring().
main purpose is to intercept AttributeException when access a non-existent tag.
How to access xml tag
---------------------
.
such as root.element.name
[]
such as root['element']['name']
hybrid
such as root['element'].name
note
The tag can only be accessed by [] when it is one of the reserved keywords.
Tag not in the xml tree can be accessed directly. A new tag will be created. No exceptions will be raised.
How to access xml tag's attributes
----------------------------------
[]
.attrib['style'], an exception will be raised when style dos'nt exist.
.attrib['style'] = 'big'
.get/set
.attrib.get( 'style', None )
.attrib.set( 'style', 'big' )
How to access class attributes and methods
------------------------------------------
.
attributes are reserved keywords and they can only be accessed by this way, for example
.pyval
.text
.insert etc.
or they are considered xml tags rather than attributes.
Reserved keywords
-----------------
The following keywords are used as methods or attributes' names.
**pyval** : returns the python value carried by leaf tag. read-only.
**text** : returns leaf tag's text content. read-only.
**obj** : returns ObjectifiedElement object referenced by this class instance. read-only.
**tag** : returns tag names. can be modified by . way such as \*.tag='newTagName'. readable and writable.
**attrib** : returns tag attributes dict. readable and writeable.
**parent** : returns parent node. read-only.
**children** : returns all children's list. read-only.
**len** : returns the number of children.
**insert** : insert a child node at the specified position.
**remove** : remove a child node at the specified position.
**index** : returns the position of the specified object.
**swap** : swap two nodes' position.
Examples
--------
create a brand new xml:
>>> p = ObjectifiedElmentProxy( rootag='Person' )
>>> p.name = 'peter'
>>> p.age = 13
>>> print( p )
<Person>
<name>peter</name>
<age>13</age>
</Person>
create from xml string:
>>> p = ObjectifiedElementProxy( xmlStr="<Person><name>peter</name><age>13</age></Person>" )
>>> print( p )
<Person>
<name>peter</name>
<age>13</age>
</Person>
multiple levels examples:
>>> r = ObjectifiedElementProxy()
>>> r.person.name = 'jack'
>>> r.person.age = 10
>>> print( r )
<root>
<person>
<name>jack</name>
<age>10</age>
</person>
</root>
to insert child like '<person><name>peter</name><age>13</age></person>':
>>> r.insert( 'person' )('name','peter')('age',13)
>>> p = r('person').person[-1]
>>> p.name = 'david'
>>> p.age = 16
>>> print( r )
<root>
<person>
<name>jack</name>
<age>10</age>
</person>
<person>
<name>peter</name>
<age>13</age>
</person>
<person>
<name>david</name>
<age>16</age>
</person>
</root>
>>> print( r.station[1].name.pyval )
peter
Notes
-----
xml attrib names and tag names are case insensitive.
Nodes with text attribute are called leaf nodes. Theoretically, leaf nodes should not have children, but not required.
'''
def __init__( self, objectifiedElement=None, xmlFileName=None, xmlStr=None, rootag='root', attrib=None, nsmap=None, **kwargs ):
u'''
initialize from ObjectifiedElement or xml file or xml string or create a brand new.
Arguments
---------
objectifiedElement : ObjectifiedElement, optional
an ObjectifiedElement object.
xmlFileName : str, optional
xml's filename.
xmlStr : str, optional
xml's content.
rootag : str, optional
create ObjectifiedElement instance which root tag's name is rootag.
attrib, nsmap, kwargs : optional
refer to objectify.Element()
'''
self._____o____ = None
if objectifiedElement is not None:
self._____o____ = objectifiedElement
elif xmlFileName:
self._____o____ = objectify.XML( xmlFileName )
elif xmlStr:
self._____o____ = objectify.fromstring( xmlStr )
else:
self._____o____ = objectify.Element( rootag, attrib=attrib, nsmap=nsmap, **kwargs )
def __call__( self, tag, pyval=None, attrib=None, nsmap=None, **kwargs ):
u'''Insert a new child node.
insert a new child node to the end.
Arguments
---------
e : str
the new tag to be inserted.
pyval : legal python data type
tag's python value.
attrib,nsmap,kwargs : optional
attribs for the new tag. see also objectify.Element() or SubElement().
Returns
-------
ObjectifiedElementProxy instance
See Also
--------
insert
note the difference between the two methods' return values.
Examples
--------
>>> p=ObjectifiedElementProxy( rootag='Person' )
>>> p( 'name', pyval='jack' )('age', pyval=13 )
>>> print( p )
<Person>
<name py:pytype="str">jack</name>
<age py:pytype="int">13</age>
</Person>
'''
self.insert( tag, None, attrib, nsmap, **kwargs )
self [ tag ][-1] = pyval
return self
def __getattr__( self, name ):
if name == '_____o____':
return object.__getattribute__(name)
if hasattr( self._____o____, name ):
e = getattr( self._____o____, name )
if name in ( 'tag','pyval','text', 'attrib' ) or isinstance( e, ( types.FunctionType, types.BuiltinFunctionType ) ):
return e
else:
#if has no attr named name, created a new one.
e = objectify.SubElement( self._____o____, name )
return ObjectifiedElementProxy( e )
def __setattr__( self, name, value ):
if name == '_____o____':
object.__setattr__( self, name, value )
return
setattr( self._____o____, name, value )
def __delattr__( self, e ):
self._____o____.__delattr__( e )
def __len__( self ):
u'''children's number'''
return len( self.children )
def __getitem__( self, name ):
if isinstance( name, int ):
return ObjectifiedElementProxy( self._____o____[name] )
if isinstance( name, slice ):
return [ ObjectifiedElementProxy( o ) for o in self._____o____[name] ]
if isinstance( name, str ):
if name == '_____o____':
return object.__getattribute__( name )
o = self._____o____
try:
e = o.__getitem__( name )
except:
e = objectify.SubElement( self._____o____, name )
return ObjectifiedElementProxy( e )
raise Exception
def __setitem__( self, e, v ):
if e == '_____o____':
object.__setitem__( self, e, v )
return
self._____o____[e] = v
def __delitem__( self, e ):
if isinstance( e, ObjectifiedElementProxy ):
self._____o____.__delattr__( e.tag )
else:
self._____o____.__delattr__( e )
def insert( self, e, i=None, attrib=None, nsmap=None, **kwargs ):
u'''Insert a new child node.
insert a new child node at the specified position.
Arguments
---------
e : str
the new tag to be inserted.
i : int, optional
if i is integer : position of the new tag. else append to the end.
attrib,nsmap,kwargs : optional
attribs for the new tag. see also objectify.Element() or SubElement().
'''
v = objectify.SubElement( self._____o____, e, attrib=attrib, nsmap=nsmap, **kwargs )
s = ObjectifiedElementProxy( v )
if i is not None:
self._____o____.insert( i, v )
return s
def swap( self, i, j ):
u'''swap two child nodes' position.
Arguments
---------
i,j : int
position of the child nodes to be swapped.
'''
self._____o____[i] = self._____o____[j]
def remove( self, i ):
u'''remove the child node.
Arguments
---------
i : int or ObjectifiedElement or ObjectifiedElementProxy or list
position of the child node or Element which will be removed.
'''
if isinstance( i, list ):
for k in i:
self.remove( k )
elif isinstance( i, int ):
return self.obj.remove( self.children[i].obj )
elif isinstance( i, objectify.ObjectifiedElement ):
return self.obj.remove( i )
elif isinstance( i, ObjectifiedElementProxy ):
return self.obj.remove( i.obj )
def index( self, o ):
u'''return the position of o.
Arguments
---------
o : ObjectifiedElementProxy
the ObjectifiedElementProxy instance to be positioned.
Returns
-------
int
'''
return self._____o____.index( o.obj )
def xpath( self, path ):
u'''find elements list in accordance with path.
Arguments
---------
path : str
please refer to lxml.objectify.ObjectifiedElement.xpath.
Returns
-------
list
a list of ObjectifiedElementProxy instance.
Xpath grammer review
--------------------
========== ===========
expression description
========== ===========
nodename to select all children of the node name
/ select from root node.
// select from current node
. select the current code.
.. select the parent node of the current node.
@ select attrib.
[] condition
text() tag text
* arbitrary node
========== ============
'''
return [ ObjectifiedElementProxy(k) for k in self._____o____.xpath( path ) ]
@property
def children( self, **kwargs ):
return [ ObjectifiedElementProxy( e ) for e in self._____o____.getchildren( **kwargs ) ]
@property
def parent( self ):
return ObjectifiedElementProxy( self._____o____.getparent() )
@property
def root( self):
parent = self._____o____.getparent()
while parent:
parent1 = parent.getparent()
if parent1 is None:
break
parent = parent1
return ObjectifiedElementProxy( parent )
@property
def obj( self ):
return self._____o____
@property
def pyval( self ):
if hasattr( self._____o____, 'pyval' ):
if isinstance( self._____o____, objectify.StringElement ):
return self._____o____.pyval.strip()
return self._____o____.pyval
def __nonzero__( self ):
return self.is_empty()
def is_empty( self ):
u'''To determine whether a null node.
no text \ no attribs \ no children.
'''
o = self._____o____
if o.text and o.text.strip():
return False
n = 0
for k in o.attrib:
if k[0] != '{':
n += 1
if n > 0:
return False
n = 0
for c in self.children:
if not c.is_empty():
n += 1
if n > 0:
return False
return True
def clean( self ):
u'''clean all null nodes.
'''
for c in self.children:
if c.is_empty():
c.parent.obj.__delattr__( c.tag )
else:
c.clean()
def tostring( self, encoding='utf-8', xml_declaration=True, standalone=None, with_comments=True,
pytype=False, xsi=True, xsi_nil=True, cleanup_namespaces=True, doctype=None,
with_tail=True, exclusive=False, inclusive_ns_prefixes=None ):
#self.clean()
objectify.deannotate( self._____o____, pytype=pytype, xsi=xsi, xsi_nil=xsi_nil, cleanup_namespaces=cleanup_namespaces )
s = etree.tostring( self._____o____, encoding=encoding, pretty_print= True,
xml_declaration=xml_declaration, with_tail=with_tail,
standalone=standalone, doctype=doctype,
exclusive=exclusive, with_comments=with_comments,
inclusive_ns_prefixes=inclusive_ns_prefixes )
return s
def __str__( self ):
#s = self.tostring( pytype=True, xml_declaration=False , encoding='unicode' )
s = self.tostring( pytype=True, xml_declaration=False ).decode()
return s
def dump( self, xmlFile, encoding='utf-8' ):
'''save xml to file.
Arguments
---------
xmlFile : str
xml's filename.
'''
f = open( xmlFile, 'w' )
s = self.tostring( encoding=encoding ).decode()
f.write( s )
f.close()
if __name__ == '__main__':
r = ObjectifiedElementProxy()
r.person.name = 'jack'
r.person.age = 10
r.insert( 'person' )('name','peter')('age',13)
p = r('person').person[-1]
p.name = 'david'
p.age = 16
print( r )
print( r.tostring().decode() )
print( r.person[1].name.pyval )
r.dump( 'x.xml' )
| mit | -8,272,102,064,105,168,000 | 25.258803 | 147 | 0.4942 | false | 4.267525 | false | false | false |
swiftstack/swift | swift/common/bufferedhttp.py | 2 | 12433 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Monkey Patch httplib.HTTPResponse to buffer reads of headers. This can improve
performance when making large numbers of small HTTP requests. This module
also provides helper functions to make HTTP connections using
BufferedHTTPResponse.
.. warning::
If you use this, be sure that the libraries you are using do not access
the socket directly (xmlrpclib, I'm looking at you :/), and instead
make all calls through httplib.
"""
from swift.common import constraints
import logging
import time
import socket
import eventlet
from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \
HTTPResponse, HTTPSConnection, _UNKNOWN
from six.moves.urllib.parse import quote, parse_qsl, urlencode
import six
if six.PY2:
httplib = eventlet.import_patched('httplib')
from eventlet.green import httplib as green_httplib
else:
httplib = eventlet.import_patched('http.client')
from eventlet.green.http import client as green_httplib
# Apparently http.server uses this to decide when/whether to send a 431.
# Give it some slack, so the app is more likely to get the chance to reject
# with a 400 instead.
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
green_httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
class BufferedHTTPResponse(HTTPResponse):
"""HTTPResponse class that buffers reading of headers"""
def __init__(self, sock, debuglevel=0, strict=0,
method=None): # pragma: no cover
self.sock = sock
# sock is an eventlet.greenio.GreenSocket
if six.PY2:
# sock.fd is a socket._socketobject
# sock.fd._sock is a _socket.socket object, which is what we want.
self._real_socket = sock.fd._sock
else:
# sock.fd is a socket.socket, which should have a _real_close
self._real_socket = sock.fd
self.fp = sock.makefile('rb')
self.debuglevel = debuglevel
self.strict = strict
self._method = method
self.headers = self.msg = None
# from the Status-Line of the response
self.version = _UNKNOWN # HTTP-Version
self.status = _UNKNOWN # Status-Code
self.reason = _UNKNOWN # Reason-Phrase
self.chunked = _UNKNOWN # is "chunked" being used?
self.chunk_left = _UNKNOWN # bytes left to read in current chunk
self.length = _UNKNOWN # number of bytes left in response
self.will_close = _UNKNOWN # conn will close at end of response
self._readline_buffer = b''
if not six.PY2:
def begin(self):
HTTPResponse.begin(self)
header_payload = self.headers.get_payload()
if isinstance(header_payload, list) and len(header_payload) == 1:
header_payload = header_payload[0].get_payload()
if header_payload:
# This shouldn't be here. We must've bumped up against
# https://bugs.python.org/issue37093
for line in header_payload.rstrip('\r\n').split('\n'):
if ':' not in line or line[:1] in ' \t':
# Well, we're no more broken than we were before...
# Should we support line folding?
# How can/should we handle a bad header line?
break
header, value = line.split(':', 1)
value = value.strip(' \t\n\r')
self.headers.add_header(header, value)
def expect_response(self):
if self.fp:
self.fp.close()
self.fp = None
self.fp = self.sock.makefile('rb', 0)
version, status, reason = self._read_status()
if status != CONTINUE:
self._read_status = lambda: (version, status, reason)
self.begin()
else:
self.status = status
self.reason = reason.strip()
self.version = 11
if six.PY2:
# Under py2, HTTPMessage.__init__ reads the headers
# which advances fp
self.msg = HTTPMessage(self.fp, 0)
# immediately kill msg.fp to make sure it isn't read again
self.msg.fp = None
else:
# py3 has a separate helper for it
self.headers = self.msg = httplib.parse_headers(self.fp)
def read(self, amt=None):
if not self._readline_buffer:
return HTTPResponse.read(self, amt)
if amt is None:
# Unbounded read: send anything we have buffered plus whatever
# is left.
buffered = self._readline_buffer
self._readline_buffer = b''
return buffered + HTTPResponse.read(self, amt)
elif amt <= len(self._readline_buffer):
# Bounded read that we can satisfy entirely from our buffer
res = self._readline_buffer[:amt]
self._readline_buffer = self._readline_buffer[amt:]
return res
else:
# Bounded read that wants more bytes than we have
smaller_amt = amt - len(self._readline_buffer)
buf = self._readline_buffer
self._readline_buffer = b''
return buf + HTTPResponse.read(self, smaller_amt)
def readline(self, size=1024):
# You'd think Python's httplib would provide this, but it doesn't.
# It does, however, provide a comment in the HTTPResponse class:
#
# # XXX It would be nice to have readline and __iter__ for this,
# # too.
#
# Yes, it certainly would.
while (b'\n' not in self._readline_buffer
and len(self._readline_buffer) < size):
read_size = size - len(self._readline_buffer)
chunk = HTTPResponse.read(self, read_size)
if not chunk:
break
self._readline_buffer += chunk
line, newline, rest = self._readline_buffer.partition(b'\n')
self._readline_buffer = rest
return line + newline
def nuke_from_orbit(self):
"""
Terminate the socket with extreme prejudice.
Closes the underlying socket regardless of whether or not anyone else
has references to it. Use this when you are certain that nobody else
you care about has a reference to this socket.
"""
if self._real_socket:
if six.PY2:
# this is idempotent; see sock_close in Modules/socketmodule.c
# in the Python source for details.
self._real_socket.close()
else:
# Hopefully this is equivalent?
# TODO: verify that this does everything ^^^^ does for py2
self._real_socket._real_close()
self._real_socket = None
self.close()
def close(self):
HTTPResponse.close(self)
self.sock = None
self._real_socket = None
class BufferedHTTPConnection(HTTPConnection):
"""HTTPConnection class that uses BufferedHTTPResponse"""
response_class = BufferedHTTPResponse
def connect(self):
self._connected_time = time.time()
ret = HTTPConnection.connect(self)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return ret
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
'''Send a request to the server.
:param method: specifies an HTTP request method, e.g. 'GET'.
:param url: specifies the object being requested, e.g. '/index.html'.
:param skip_host: if True does not add automatically a 'Host:' header
:param skip_accept_encoding: if True does not add automatically an
'Accept-Encoding:' header
'''
self._method = method
self._path = url
return HTTPConnection.putrequest(self, method, url, skip_host,
skip_accept_encoding)
def putheader(self, header, value):
if not isinstance(header, bytes):
header = header.encode('latin-1')
HTTPConnection.putheader(self, header, value)
def getexpect(self):
kwargs = {'method': self._method}
if hasattr(self, 'strict'):
kwargs['strict'] = self.strict
response = BufferedHTTPResponse(self.sock, **kwargs)
response.expect_response()
return response
def getresponse(self):
response = HTTPConnection.getresponse(self)
logging.debug("HTTP PERF: %(time).5f seconds to %(method)s "
"%(host)s:%(port)s %(path)s)",
{'time': time.time() - self._connected_time,
'method': self._method, 'host': self.host,
'port': self.port, 'path': self._path})
return response
def http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
"""
Helper function to create an HTTPConnection object. If ssl is set True,
HTTPSConnection will be used. However, if ssl=False, BufferedHTTPConnection
will be used, which is buffered for backend Swift services.
:param ipaddr: IPv4 address to connect to
:param port: port to connect to
:param device: device of the node to query
:param partition: partition on the device
:param method: HTTP method to request ('GET', 'PUT', 'POST', etc.)
:param path: request path
:param headers: dictionary of headers
:param query_string: request query string
:param ssl: set True if SSL should be used (default: False)
:returns: HTTPConnection object
"""
if isinstance(path, six.text_type):
path = path.encode("utf-8")
if isinstance(device, six.text_type):
device = device.encode("utf-8")
if isinstance(partition, six.text_type):
partition = partition.encode('utf-8')
elif isinstance(partition, six.integer_types):
partition = str(partition).encode('ascii')
path = quote(b'/' + device + b'/' + partition + path)
return http_connect_raw(
ipaddr, port, method, path, headers, query_string, ssl)
def http_connect_raw(ipaddr, port, method, path, headers=None,
query_string=None, ssl=False):
"""
Helper function to create an HTTPConnection object. If ssl is set True,
HTTPSConnection will be used. However, if ssl=False, BufferedHTTPConnection
will be used, which is buffered for backend Swift services.
:param ipaddr: IPv4 address to connect to
:param port: port to connect to
:param method: HTTP method to request ('GET', 'PUT', 'POST', etc.)
:param path: request path
:param headers: dictionary of headers
:param query_string: request query string
:param ssl: set True if SSL should be used (default: False)
:returns: HTTPConnection object
"""
if not port:
port = 443 if ssl else 80
if ssl:
conn = HTTPSConnection('%s:%s' % (ipaddr, port))
else:
conn = BufferedHTTPConnection('%s:%s' % (ipaddr, port))
if query_string:
# Round trip to ensure proper quoting
if six.PY2:
query_string = urlencode(parse_qsl(
query_string, keep_blank_values=True))
else:
query_string = urlencode(
parse_qsl(query_string, keep_blank_values=True,
encoding='latin1'),
encoding='latin1')
path += '?' + query_string
conn.path = path
conn.putrequest(method, path, skip_host=(headers and 'Host' in headers))
if headers:
for header, value in headers.items():
conn.putheader(header, str(value))
conn.endheaders()
return conn
| apache-2.0 | 6,961,866,339,083,148,000 | 38.722045 | 79 | 0.609185 | false | 4.299101 | false | false | false |
leifj/PyKCS11 | PyKCS11/__init__.py | 1 | 33528 | # Copyright (C) 2006-2010 Ludovic Rousseau (ludovic.rousseau@free.fr)
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# $Id$
import PyKCS11.LowLevel
import os
# redefine PKCS#11 constants
CK_TRUE = PyKCS11.LowLevel.CK_TRUE
CK_FALSE = PyKCS11.LowLevel.CK_FALSE
CK_UNAVAILABLE_INFORMATION = PyKCS11.LowLevel.CK_UNAVAILABLE_INFORMATION
CK_EFFECTIVELY_INFINITE = PyKCS11.LowLevel.CK_EFFECTIVELY_INFINITE
CK_INVALID_HANDLE = PyKCS11.LowLevel.CK_INVALID_HANDLE
CKM = {}
CKR = {}
CKA = {}
CKO = {}
CKU = {}
CKK = {}
CKC = {}
CKF = {}
CKS = {}
# redefine PKCS#11 constants using well known prefixes
for x in PyKCS11.LowLevel.__dict__.keys():
if x[:4] == 'CKM_' \
or x[:4] == 'CKR_' \
or x[:4] == 'CKA_' \
or x[:4] == 'CKO_' \
or x[:4] == 'CKU_' \
or x[:4] == 'CKK_' \
or x[:4] == 'CKC_' \
or x[:4] == 'CKF_' \
or x[:4] == 'CKS_':
a = "%s=PyKCS11.LowLevel.%s" % (x, x)
exec(a)
if x[3:] != "_VENDOR_DEFINED":
eval(x[:3])[eval(x)] = x # => CKM[CKM_RSA_PKCS] = 'CKM_RSA_PKCS'
eval(x[:3])[x] = eval(x) # => CKM['CKM_RSA_PKCS'] = CKM_RSA_PKCS
# special CKR[] values
CKR[-2] = "Unkown PKCS#11 type"
CKR[-1] = "Load"
class ckbytelist(PyKCS11.LowLevel.ckbytelist):
"""
add a __repr__() method to the LowLevel equivalent
"""
def __repr__(self):
"""
return the representation of a tuple
the __str__ method will use it also
"""
rep = [elt for elt in self]
return repr(rep)
class byteArray(PyKCS11.LowLevel.byteArray):
"""
add a __repr__() method to the LowLevel equivalent
"""
def __repr__(self):
"""
return the representation of a tuple
the __str__ method will use it also
"""
rep = [elt for elt in self]
return repr(rep)
class CkClass(object):
"""
Base class for CK_* classes
"""
# dictionnary of integer_value: text_value for the flags bits
flags_dict = dict()
# dictionnary of fields names and types
# type can be "pair", "flags" or "text"
fields = dict()
flags = 0
def flags2text(self):
"""
parse the L{self.flags} field and create a list of "CKF_*" strings
corresponding to bits set in flags
@return: a list of strings
@rtype: list
"""
r = []
for v in self.flags_dict.keys():
if self.flags & v:
r.append(self.flags_dict[v])
return r
def to_dict(self):
"""
convert the fields of the object into a dictionnary
"""
dico = dict()
for field in self.fields.keys():
if field == "flags":
dico[field] = self.flags2text()
else:
dico[field] = eval("self." + field)
return dico
def __str__(self):
"""
text representation of the object
"""
dico = self.to_dict()
lines = list()
for key in sorted(dico.keys()):
type = self.fields[key]
if type == "flags":
lines.append("%s: %s" % (key, ", ".join(dico[key])))
elif type == "pair":
lines.append("%s: " % key + "%d.%d" % dico[key])
else:
lines.append("%s: %s" % (key, dico[key]))
return "\n".join(lines)
class CK_SLOT_INFO(CkClass):
"""
matches the PKCS#11 CK_SLOT_INFO structure
@ivar slotDescription: blank padded
@type slotDescription: string
@ivar manufacturerID: blank padded
@type manufacturerID: string
@ivar flags: See L{flags2text}
@type flags: integer
@ivar hardwareVersion: 2 elements list
@type hardwareVersion: list
@ivar firmwareVersion: 2 elements list
@type firmwareVersion: list
"""
flags_dict = {
CKF_TOKEN_PRESENT: "CKF_TOKEN_PRESENT",
CKF_REMOVABLE_DEVICE: "CKF_REMOVABLE_DEVICE",
CKF_HW_SLOT: "CKF_HW_SLOT"}
fields = {"slotDescription": "text",
"manufacturerID": "text",
"flags": "flags",
"hardwareVersion": "text",
"firmwareVersion": "text"}
class CK_INFO(CkClass):
"""
matches the PKCS#11 CK_INFO structure
@ivar cryptokiVersion: Cryptoki interface version
@type cryptokiVersion: integer
@ivar manufacturerID: blank padded
@type manufacturerID: string
@ivar flags: must be zero
@type flags: integer
@ivar libraryDescription: blank padded
@type libraryDescription: string
@ivar libraryVersion: 2 elements list
@type libraryVersion: list
"""
fields = {"cryptokiVersion": "pair",
"manufacturerID": "text",
"flags": "flags",
"libraryDescription": "text",
"libraryVersion": "pair"}
class CK_SESSION_INFO(CkClass):
"""
matches the PKCS#11 CK_SESSION_INFO structure
@ivar slotID: ID of the slot that interfaces with the token
@type slotID: integer
@ivar state: state of the session
@type state: integer
@ivar flags: bit flags that define the type of session
@type flags: integer
@ivar ulDeviceError: an error code defined by the cryptographic token
@type ulDeviceError: integer
"""
flags_dict = {
CKF_RW_SESSION: "CKF_RW_SESSION",
CKF_SERIAL_SESSION: "CKF_SERIAL_SESSION",
}
def state2text(self):
"""
parse the L{self.state} field and return a "CKS_*" string
corresponding to the state
@return: a string
@rtype: string
"""
return CKS[self.state]
fields = {"slotID": "text",
"state": "text",
"flags": "flags",
"ulDeviceError": "text"}
class CK_TOKEN_INFO(CkClass):
"""
matches the PKCS#11 CK_TOKEN_INFO structure
@ivar label: blank padded
@type label: string
@ivar manufacturerID: blank padded
@type manufacturerID: string
@ivar model: string blank padded
@type model: string
@ivar serialNumber: string blank padded
@type serialNumber: string
@ivar flags:
@type flags: integer
@ivar ulMaxSessionCount:
@type ulMaxSessionCount: integer
@ivar ulSessionCount:
@type ulSessionCount: integer
@ivar ulMaxRwSessionCount:
@type ulMaxRwSessionCount: integer
@ivar ulRwSessionCount:
@type ulRwSessionCount: integer
@ivar ulMaxPinLen:
@type ulMaxPinLen: integer
@ivar ulMinPinLen:
@type ulMinPinLen: integer
@ivar ulTotalPublicMemory:
@type ulTotalPublicMemory: integer
@ivar ulFreePublicMemory:
@type ulFreePublicMemory: integer
@ivar ulTotalPrivateMemory:
@type ulTotalPrivateMemory: integer
@ivar ulFreePrivateMemory:
@type ulFreePrivateMemory: integer
@ivar hardwareVersion: 2 elements list
@type hardwareVersion: list
@ivar firmwareVersion: 2 elements list
@type firmwareVersion: list
@ivar utcTime: string
@type utcTime: string
"""
flags_dict = {
CKF_RNG: "CKF_RNG",
CKF_WRITE_PROTECTED: "CKF_WRITE_PROTECTED",
CKF_LOGIN_REQUIRED: "CKF_LOGIN_REQUIRED",
CKF_USER_PIN_INITIALIZED: "CKF_USER_PIN_INITIALIZED",
CKF_RESTORE_KEY_NOT_NEEDED: "CKF_RESTORE_KEY_NOT_NEEDED",
CKF_CLOCK_ON_TOKEN: "CKF_CLOCK_ON_TOKEN",
CKF_PROTECTED_AUTHENTICATION_PATH: "CKF_PROTECTED_AUTHENTICATION_PATH",
CKF_DUAL_CRYPTO_OPERATIONS: "CKF_DUAL_CRYPTO_OPERATIONS",
CKF_TOKEN_INITIALIZED: "CKF_TOKEN_INITIALIZED",
CKF_SECONDARY_AUTHENTICATION: "CKF_SECONDARY_AUTHENTICATION",
CKF_USER_PIN_COUNT_LOW: "CKF_USER_PIN_COUNT_LOW",
CKF_USER_PIN_FINAL_TRY: "CKF_USER_PIN_FINAL_TRY",
CKF_USER_PIN_LOCKED: "CKF_USER_PIN_LOCKED",
CKF_USER_PIN_TO_BE_CHANGED: "CKF_USER_PIN_TO_BE_CHANGED",
CKF_SO_PIN_COUNT_LOW: "CKF_SO_PIN_COUNT_LOW",
CKF_SO_PIN_FINAL_TRY: "CKF_SO_PIN_FINAL_TRY",
CKF_SO_PIN_LOCKED: "CKF_SO_PIN_LOCKED",
CKF_SO_PIN_TO_BE_CHANGED: "CKF_SO_PIN_TO_BE_CHANGED",
}
fields = {"label": "text",
"manufacturerID": "text",
"model": "text",
"serialNumber": "text",
"flags": "flags",
"ulMaxSessionCount": "text",
"ulSessionCount": "text",
"ulMaxRwSessionCount": "text",
"ulRwSessionCount": "text",
"ulMaxPinLen": "text",
"ulMinPinLen": "text",
"ulTotalPublicMemory": "text",
"ulFreePublicMemory": "text",
"ulTotalPrivateMemory": "text",
"ulFreePrivateMemory": "text",
"hardwareVersion": "pair",
"firmwareVersion": "pair",
"utcTime": "text"}
class CK_MECHANISM_INFO(CkClass):
"""
matches the PKCS#11 CK_MECHANISM_INFO structure
@ivar ulMinKeySize: minimum size of the key
@type ulMinKeySize: integer
@ivar ulMaxKeySize: maximum size of the key
@type ulMaxKeySize: integer
@ivar flags: bit flags specifying mechanism capabilities
@type flags: integer
"""
flags_dict = {
CKF_HW: "CKF_HW",
CKF_ENCRYPT: "CKF_ENCRYPT",
CKF_DECRYPT: "CKF_DECRYPT",
CKF_DIGEST: "CKF_DIGEST",
CKF_SIGN: "CKF_SIGN",
CKF_SIGN_RECOVER: "CKF_SIGN_RECOVER",
CKF_VERIFY: "CKF_VERIFY",
CKF_VERIFY_RECOVER: "CKF_VERIFY_RECOVER",
CKF_GENERATE: "CKF_GENERATE",
CKF_GENERATE_KEY_PAIR: "CKF_GENERATE_KEY_PAIR",
CKF_WRAP: "CKF_WRAP",
CKF_UNWRAP: "CKF_UNWRAP",
CKF_DERIVE: "CKF_DERIVE",
CKF_EXTENSION: "CKF_EXTENSION",
}
fields = {"ulMinKeySize": "text",
"ulMaxKeySize": "text",
"flags": "flags"}
class PyKCS11Error(Exception):
""" define the possible PKCS#11 error codes """
def __init__(self, value, text=""):
self.value = value
self.text = text
def __str__(self):
"""
The text representation of a PKCS#11 error is something like:
"CKR_DEVICE_ERROR (0x00000030)"
"""
if (self.value < 0):
return CKR[self.value] + " (%s)" % self.text
else:
return CKR[self.value] + " (0x%08X)" % self.value
class PyKCS11Lib(object):
""" high level PKCS#11 binding """
def __init__(self):
self.lib = PyKCS11.LowLevel.CPKCS11Lib()
def __del__(self):
self.lib.Unload()
def load(self, pkcs11dll_filename=None, *init_string):
"""
load a PKCS#11 library
@type pkcs11dll_filename: string
@param pkcs11dll_filename: the library name. If this parameter
is not set the environment variable PYKCS11LIB is used instead
@return: a L{PyKCS11Lib} object
@raise PyKCS11Error(-1): when the load fails
"""
if pkcs11dll_filename == None:
pkcs11dll_filename = os.getenv("PYKCS11LIB")
if pkcs11dll_filename == None:
raise PyKCS11Error(-1, "No PKCS11 library specified (set PYKCS11LIB env variable)")
rv = self.lib.Load(pkcs11dll_filename, 1)
if rv == 0:
raise PyKCS11Error(-1, pkcs11dll_filename)
def initToken(self, slot, pin, label):
"""
C_InitToken
"""
rv = self.lib.C_InitToken(slot, pin, label)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def getInfo(self):
"""
C_GetInfo
@return: a L{CK_INFO} object
"""
info = PyKCS11.LowLevel.CK_INFO()
rv = self.lib.C_GetInfo(info)
if rv != CKR_OK:
raise PyKCS11Error(rv)
i = CK_INFO()
i.cryptokiVersion = (info.cryptokiVersion.major, info.cryptokiVersion.minor)
i.manufacturerID = info.GetManufacturerID()
i.flags = info.flags
i.libraryDescription = info.GetLibraryDescription()
i.libraryVersion = (info.libraryVersion.major, info.libraryVersion.minor)
return i
def getSlotList(self):
"""
C_GetSlotList
@return: a list of available slots
@rtype: list
"""
slotList = PyKCS11.LowLevel.ckintlist()
rv = self.lib.C_GetSlotList(0, slotList)
if rv != CKR_OK:
raise PyKCS11Error(rv)
s = []
for x in xrange(len(slotList)):
s.append(slotList[x])
return s
def getSlotInfo(self, slot):
"""
C_GetSlotInfo
@param slot: slot number returned by L{getSlotList}
@type slot: integer
@return: a L{CK_SLOT_INFO} object
"""
slotInfo = PyKCS11.LowLevel.CK_SLOT_INFO()
rv = self.lib.C_GetSlotInfo(slot, slotInfo)
if rv != CKR_OK:
raise PyKCS11Error(rv)
s = CK_SLOT_INFO()
s.slotDescription = slotInfo.GetSlotDescription()
s.manufacturerID = slotInfo.GetManufacturerID()
s.flags = slotInfo.flags
s.hardwareVersion = slotInfo.GetHardwareVersion()
s.firmwareVersion = slotInfo.GetFirmwareVersion()
return s
def getTokenInfo(self, slot):
"""
C_GetTokenInfo
@param slot: slot number returned by L{getSlotList}
@type slot: integer
@return: a L{CK_TOKEN_INFO} object
"""
tokeninfo = PyKCS11.LowLevel.CK_TOKEN_INFO()
rv = self.lib.C_GetTokenInfo(slot, tokeninfo)
if rv != CKR_OK:
raise PyKCS11Error(rv)
t = CK_TOKEN_INFO()
t.label = tokeninfo.GetLabel()
t.manufacturerID = tokeninfo.GetManufacturerID()
t.model = tokeninfo.GetModel()
t.serialNumber = tokeninfo.GetSerialNumber()
t.flags = tokeninfo.flags
t.ulMaxSessionCount = tokeninfo.ulMaxSessionCount
if t.ulMaxSessionCount == CK_UNAVAILABLE_INFORMATION:
t.ulMaxSessionCount = -1
t.ulSessionCount = tokeninfo.ulSessionCount
if t.ulSessionCount == CK_UNAVAILABLE_INFORMATION:
t.ulSessionCount = -1
t.ulMaxRwSessionCount = tokeninfo.ulMaxRwSessionCount
if t.ulMaxRwSessionCount == CK_UNAVAILABLE_INFORMATION:
t.ulMaxRwSessionCount = -1
t.ulRwSessionCount = tokeninfo.ulRwSessionCount
if t.ulRwSessionCount == CK_UNAVAILABLE_INFORMATION:
t.ulRwSessionCount = -1
t.ulMaxPinLen = tokeninfo.ulMaxPinLen
t.ulMinPinLen = tokeninfo.ulMinPinLen
t.ulTotalPublicMemory = tokeninfo.ulTotalPublicMemory
if t.ulTotalPublicMemory == CK_UNAVAILABLE_INFORMATION:
t.ulTotalPublicMemory = -1
t.ulFreePublicMemory = tokeninfo.ulFreePublicMemory
if t.ulFreePublicMemory == CK_UNAVAILABLE_INFORMATION:
t.ulFreePublicMemory = -1
t.ulTotalPrivateMemory = tokeninfo.ulTotalPrivateMemory
if t.ulTotalPrivateMemory == CK_UNAVAILABLE_INFORMATION:
t.ulTotalPrivateMemory = -1
t.ulFreePrivateMemory = tokeninfo.ulFreePrivateMemory
if t.ulFreePrivateMemory == CK_UNAVAILABLE_INFORMATION:
t.ulFreePrivateMemory = -1
t.hardwareVersion = (tokeninfo.hardwareVersion.major, tokeninfo.hardwareVersion.minor)
t.firmwareVersion = (tokeninfo.firmwareVersion.major, tokeninfo.firmwareVersion.minor)
t.utcTime = tokeninfo.GetUtcTime()
return t
def openSession(self, slot, flags=0):
"""
C_OpenSession
@param slot: slot number returned by L{getSlotList}
@type slot: integer
@param flags: 0 (default), CKF_RW_SESSION for RW session
@type flags: integer
@return: a L{Session} object
"""
se = PyKCS11.LowLevel.CK_SESSION_HANDLE()
flags |= CKF_SERIAL_SESSION
rv = self.lib.C_OpenSession(slot, flags, se)
if rv != CKR_OK:
raise PyKCS11Error(rv)
s = Session()
s.lib = self.lib
s.slot = slot
s.session = se
return s
def getMechanismList(self, slot):
"""
C_GetMechanismList
@return: the list of available mechanisms for a slot
@rtype: list
"""
mechanismList = PyKCS11.LowLevel.ckintlist()
rv = self.lib.C_GetMechanismList(slot, mechanismList)
if rv != CKR_OK:
raise PyKCS11Error(rv)
m = []
for x in xrange(len(mechanismList)):
m.append(CKM[mechanismList[x]])
return m
def getMechanismInfo(self, slot, type):
"""
C_GetMechanismInfo
@return: information about a mechanism
@rtype: a L{CK_MECHANISM_INFO} object
"""
info = PyKCS11.LowLevel.CK_MECHANISM_INFO()
rv = self.lib.C_GetMechanismInfo(slot, CKM[type], info)
if rv != CKR_OK:
raise PyKCS11Error(rv)
i = CK_MECHANISM_INFO()
i.ulMinKeySize = info.ulMinKeySize
i.ulMaxKeySize = info.ulMaxKeySize
i.flags = info.flags
return i
def waitForSlotEvent(self, flags=0):
"""
C_WaitForSlotEvent
@param flags: 0 (default) or CKF_DONT_BLOCK
@type flags: integer
@return: slot
@rtype: integer
"""
tmp = 0
(rv, slot) = self.lib.C_WaitForSlotEvent(flags, tmp)
if rv != CKR_OK:
raise PyKCS11Error(rv)
return slot
class Mechanism(object):
"""Wraps CK_MECHANISM"""
def __init__(self, mechanism, param):
"""
@param mechanism: the mechanism to be used
@type mechanism: integer, any CKM_* value
@param param: data to be used as crypto operation parameter
(i.e. the IV for some agorithms)
@type param: string or list/tuple of bytes
@see: L{Session.decrypt}, L{Session.sign}
"""
self.mechanism = mechanism
self.param = param
MechanismRSAPKCS1 = Mechanism(CKM_RSA_PKCS, None)
class Session(object):
""" Manage L{PyKCS11Lib.openSession} objects """
def closeSession(self):
"""
C_CloseSession
"""
rv = self.lib.C_CloseSession(self.session)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def closeAllSession(self):
"""
C_CloseAllSession
"""
rv = self.lib.C_CloseAllSession(self.slot)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def getSessionInfo(self):
"""
C_GetSessionInfo
@return: a L{CK_SESSION_INFO} object
"""
sessioninfo = PyKCS11.LowLevel.CK_SESSION_INFO()
rv = self.lib.C_GetSessionInfo(self.session, sessioninfo)
if rv != CKR_OK:
raise PyKCS11Error(rv)
s = CK_SESSION_INFO()
s.slotID = sessioninfo.slotID
s.state = sessioninfo.state
s.flags = sessioninfo.flags
s.ulDeviceError = sessioninfo.ulDeviceError
return s
def login(self, pin, user_type=CKU_USER):
"""
C_Login
@param pin: the user's PIN
@type pin: string
@param user_type: the user type. The default value is
CKU_USER. You may also use CKU_SO
@type user_type: integer
"""
rv = self.lib.C_Login(self.session, user_type, pin)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def logout(self):
"""
C_Logout
"""
rv = self.lib.C_Logout(self.session)
if rv != CKR_OK:
raise PyKCS11Error(rv)
del self
def initPin(self, new_pin):
"""
C_InitPIN
"""
rv = self.lib.C_InitPIN(self.session, new_pin)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def setPin(self, old_pin, new_pin):
"""
C_SetPIN
"""
rv = self.lib.C_SetPIN(self.session, old_pin, new_pin)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def sign(self, key, data, mecha=MechanismRSAPKCS1):
"""
C_SignInit/C_Sign
@param key: a key handle, obtained calling L{findObjects}.
@type key: integer
@param data: the data to be signed
@type data: (binary) sring or list/tuple of bytes
@param mecha: the signing mechanism to be used
@type mecha: L{Mechanism} instance or L{MechanismRSAPKCS1}
for CKM_RSA_PKCS
@return: the computed signature
@rtype: list of bytes
@note: the returned value is an istance of L{ckbytelist}.
You can easly convert it to a binary string with::
''.join(chr(i) for i in ckbytelistSignature)
"""
m = PyKCS11.LowLevel.CK_MECHANISM()
signature = ckbytelist()
ba = None # must be declared here or may be deallocated too early
m.mechanism = mecha.mechanism
if (mecha.param):
ba = PyKCS11.LowLevel.byteArray(len(mecha.param))
if type(mecha.param) is type(''):
for c in xrange(len(mecha.param)):
ba[c] = ord(mecha.param[c])
else:
for c in xrange(len(mecha.param)):
ba[c] = mecha.param[c]
# with cast() the ba object continue to own internal pointer
# (avoids a leak).
# pParameter is an opaque pointer, never garbage collected.
m.pParameter = ba.cast()
m.ulParameterLen = len(mecha.param)
data1 = ckbytelist()
data1.reserve(len(data))
if type(data) is type(''):
for x in data:
data1.append(ord(x))
else:
for c in xrange(len(data)):
data1.append(data[c])
rv = self.lib.C_SignInit(self.session, m, key)
if (rv != 0):
raise PyKCS11Error(rv)
#first call get signature size
rv = self.lib.C_Sign(self.session, data1, signature)
if (rv != 0):
raise PyKCS11Error(rv)
#second call get actual signature data
rv = self.lib.C_Sign(self.session, data1, signature)
if (rv != 0):
raise PyKCS11Error(rv)
return signature
def decrypt(self, key, data, mecha=MechanismRSAPKCS1):
"""
C_DecryptInit/C_Decrypt
@param key: a key handle, obtained calling L{findObjects}.
@type key: integer
@param data: the data to be decrypted
@type data: (binary) sring or list/tuple of bytes
@param mecha: the decrypt mechanism to be used
@type mecha: L{Mechanism} instance or L{MechanismRSAPKCS1}
for CKM_RSA_PKCS
@return: the decrypted data
@rtype: list of bytes
@note: the returned value is an istance of L{ckbytelist}.
You can easly convert it to a binary string with::
''.join(chr(i) for i in ckbytelistData)
"""
m = PyKCS11.LowLevel.CK_MECHANISM()
decrypted = ckbytelist()
ba = None # must be declared here or may be deallocated too early
m.mechanism = mecha.mechanism
if (mecha.param):
ba = PyKCS11.LowLevel.byteArray(len(mecha.param))
if type(mecha.param) is type(''):
for c in xrange(len(mecha.param)):
ba[c] = ord(mecha.param[c])
else:
for c in xrange(len(mecha.param)):
ba[c] = mecha.param[c]
# with cast() the ba object continue to own internal pointer
# (avoids a leak).
# pParameter is an opaque pointer, never garbage collected.
m.pParameter = ba.cast()
m.ulParameterLen = len(mecha.param)
data1 = ckbytelist()
data1.reserve(len(data))
if type(data) is type(''):
for x in data:
data1.append(ord(x))
else:
for c in xrange(len(data)):
data1.append(data[c])
rv = self.lib.C_DecryptInit(self.session, m, key)
if (rv != 0):
raise PyKCS11Error(rv)
#first call get decrypted size
rv = self.lib.C_Decrypt(self.session, data1, decrypted)
if (rv != 0):
raise PyKCS11Error(rv)
#second call get actual decrypted data
rv = self.lib.C_Decrypt(self.session, data1, decrypted)
if (rv != 0):
raise PyKCS11Error(rv)
return decrypted
def isNum(self, type):
if type in (CKA_CERTIFICATE_TYPE,
CKA_CLASS,
CKA_KEY_GEN_MECHANISM,
CKA_KEY_TYPE,
CKA_MODULUS_BITS,
CKA_VALUE_BITS,
CKA_VALUE_LEN):
return True
return False
def isString(self, type):
if type in (CKA_LABEL,
CKA_APPLICATION):
return True
return False
def isBool(self, type):
if type in (CKA_ALWAYS_SENSITIVE,
CKA_DECRYPT,
CKA_ENCRYPT,
CKA_HAS_RESET,
CKA_LOCAL,
CKA_MODIFIABLE,
CKA_NEVER_EXTRACTABLE,
CKA_PRIVATE,
CKA_RESET_ON_INIT,
CKA_SECONDARY_AUTH,
CKA_SENSITIVE,
CKA_SIGN,
CKA_SIGN_RECOVER,
CKA_TOKEN,
CKA_TRUSTED,
CKA_UNWRAP,
CKA_VERIFY,
CKA_VERIFY_RECOVER,
CKA_WRAP):
return True
return False
def isBin(self, type):
return (not self.isBool(type)) and (not self.isString(type)) and (not self.isNum(type))
def findObjects(self, template=()):
"""
find the objects matching the template pattern
@param template: list of attributes tuples (attribute,value).
The default value is () and all the objects are returned
@type template: list
@return: a list of object ids
@rtype: list
"""
t = PyKCS11.LowLevel.ckattrlist(len(template))
for x in xrange(len(template)):
attr = template[x]
if self.isNum(attr[0]):
t[x].SetNum(attr[0], attr[1])
elif self.isString(attr[0]):
t[x].SetString(attr[0], attr[1])
elif self.isBool(attr[0]):
t[x].SetBool(attr[0], attr[1])
elif self.isBin(attr[0]):
t[x].SetBin(attr[0], attr[1])
else:
raise PyKCS11Error(-2)
# we search for 10 objects by default. speed/memory tradeoff
result = PyKCS11.LowLevel.ckobjlist(10)
self.lib.C_FindObjectsInit(self.session, t)
res = []
while True:
self.lib.C_FindObjects(self.session, result)
for x in result:
# make a copy of the handle: the original value get
# corrupted (!!)
a = PyKCS11.LowLevel.CK_OBJECT_HANDLE()
a.assign(x.value())
res.append(a)
if len(result) == 0:
break
self.lib.C_FindObjectsFinal(self.session)
return res
def getAttributeValue(self, obj_id, attr, allAsBinary=False):
"""
C_GetAttributeValue
@param obj_id: object ID returned by L{findObjects}
@type obj_id: integer
@param attr: list of attributes
@type attr: list
@param allAsBinary: return all values as binary data; default is False.
@type allAsBinary: Boolean
@return: a list of values corresponding to the list of attributes
@rtype: list
@see: L{getAttributeValue_fragmented}
@note: if allAsBinary is True the function do not convert results to
Python types (i.e.: CKA_TOKEN to Bool, CKA_CLASS to int, ...).
Binary data is returned as L{ckbytelist} type, usable
as a list containing only bytes.
You can easly convert it to a binary string with::
''.join(chr(i) for i in ckbytelistVariable)
"""
valTemplate = PyKCS11.LowLevel.ckattrlist(len(attr))
for x in xrange(len(attr)):
valTemplate[x].SetType(attr[x])
# first call to get the attribute size and reserve the memory
rv = self.lib.C_GetAttributeValue(self.session, obj_id, valTemplate)
if rv == CKR_ATTRIBUTE_TYPE_INVALID \
or rv == CKR_ATTRIBUTE_SENSITIVE:
return self.getAttributeValue_fragmented(obj_id, attr, allAsBinary)
if rv != CKR_OK:
raise PyKCS11Error(rv)
# second call to get the attribute value
rv = self.lib.C_GetAttributeValue(self.session, obj_id, valTemplate)
if rv != CKR_OK:
raise PyKCS11Error(rv)
res = []
for x in xrange(len(attr)):
if (allAsBinary):
res.append(valTemplate[x].GetBin())
elif valTemplate[x].IsNum():
res.append(valTemplate[x].GetNum())
elif valTemplate[x].IsBool():
res.append(valTemplate[x].GetBool())
elif valTemplate[x].IsString():
res.append(valTemplate[x].GetString())
elif valTemplate[x].IsBin():
res.append(valTemplate[x].GetBin())
else:
raise PyKCS11Error(-2)
return res
def getAttributeValue_fragmented(self, obj_id, attr, allAsBinary=False):
"""
Same as L{getAttributeValue} except that when some attribute
is sensitive or unknown an empty value (None) is returned.
Note: this is achived by getting attributes one by one.
@see: L{getAttributeValue}
"""
# some attributes does not exists or is sensitive
# but we don't know which ones. So try one by one
valTemplate = PyKCS11.LowLevel.ckattrlist(1)
res = []
for x in xrange(len(attr)):
valTemplate[0].Reset()
valTemplate[0].SetType(attr[x])
# first call to get the attribute size and reserve the memory
rv = self.lib.C_GetAttributeValue(self.session, obj_id, valTemplate)
if rv == CKR_ATTRIBUTE_TYPE_INVALID \
or rv == CKR_ATTRIBUTE_SENSITIVE:
# append an empty value
res.append(None)
continue
if rv != CKR_OK:
raise PyKCS11Error(rv)
# second call to get the attribute value
rv = self.lib.C_GetAttributeValue(self.session, obj_id, valTemplate)
if rv != CKR_OK:
raise PyKCS11Error(rv)
if (allAsBinary):
res.append(valTemplate[0].GetBin())
elif valTemplate[0].IsNum():
res.append(valTemplate[0].GetNum())
elif valTemplate[0].IsBool():
res.append(valTemplate[0].GetBool())
elif valTemplate[0].IsString():
res.append(valTemplate[0].GetString())
elif valTemplate[0].IsBin():
res.append(valTemplate[0].GetBin())
else:
raise PyKCS11Error(-2)
return res
def seedRandom(self, seed):
"""
C_SeedRandom
@param seed: seed material
@type seed: iterable
"""
low_seed = ckbytelist(len(seed))
for c in xrange(len(seed)):
low_seed.append(seed[c])
rv = self.lib.C_SeedRandom(self.session, low_seed)
if rv != CKR_OK:
raise PyKCS11Error(rv)
def generateRandom(self, size=16):
"""
C_GenerateRandom
@param size: number of random bytes to get
@type size: integer
@note: the returned value is an istance of L{ckbytelist}.
You can easly convert it to a binary string with::
''.join(chr(i) for i in random)
"""
low_rand = ckbytelist(size)
rv = self.lib.C_GenerateRandom(self.session, low_rand)
if rv != CKR_OK:
raise PyKCS11Error(rv)
return low_rand
if __name__ == "__main__":
# sample test/debug code
p = PyKCS11Lib()
p.load()
print "getInfo"
print p.getInfo()
print
print "getSlotList"
s = p.getSlotList()
print "slots:", s
slot = s[1]
print "using slot:", slot
print
print "getSlotInfo"
print p.getSlotInfo(slot)
print
print "getTokenInfo"
print p.getTokenInfo(slot)
print
print "openSession"
se = p.openSession(slot)
print
print "sessionInfo"
print se.getSessionInfo()
print
print "seedRandom"
try:
se.seedRandom([1, 2, 3, 4])
except PyKCS11Error, e:
print e
print "generateRandom"
print se.generateRandom()
print
print "login"
se.login(pin="0000")
print
print "sessionInfo"
print se.getSessionInfo()
print
print "findObjects"
objs = se.findObjects([(CKA_CLASS, CKO_CERTIFICATE)])
print objs
print
print "getAttributeValue"
for o in objs:
attr = se.getAttributeValue(o, [CKA_LABEL, CKA_CLASS])
print attr
print
print "logout"
se.logout()
print
print "closeSession"
se.closeSession()
| gpl-2.0 | -1,112,641,178,927,531,400 | 29.958449 | 99 | 0.578263 | false | 3.593954 | false | false | false |
yunojuno/django-test | trello_webhooks/migrations/0001_initial.py | 2 | 1837 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='CallbackEvent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField()),
('event_type', models.CharField(max_length=50)),
('event_payload', jsonfield.fields.JSONField(default=dict)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Webhook',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('trello_model_id', models.CharField(help_text='The id of the model being watched.', max_length=24)),
('trello_id', models.CharField(help_text='Webhook id returned from Trello API.', max_length=24, blank=True)),
('description', models.CharField(help_text='Description of the webhook.', max_length=500, blank=True)),
('auth_token', models.CharField(help_text='The Trello API user auth token.', max_length=64)),
('created_at', models.DateTimeField(blank=True)),
('last_updated_at', models.DateTimeField(blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='callbackevent',
name='webhook',
field=models.ForeignKey(to='trello_webhooks.Webhook'),
preserve_default=True,
),
]
| mit | -2,390,449,551,675,246,000 | 38.085106 | 125 | 0.559608 | false | 4.405276 | false | false | false |
jamesacampbell/python-examples | multi-categorization-tweets-example.py | 1 | 6583 | # author: James Campbell
# what: example three+ categorization of tweets using nltk
# date created: November 23 2015
import nltk
import sys
from sys import exit
pos_tweets = [('I love this car', 'positive'),
('This view is amazing', 'positive'),
('I feel great this morning', 'positive'),
('I am so excited about the concert', 'positive'),
('He is my best friend', 'positive'),
('Going well', 'positive'),
('Thank you', 'positive'),
('Hope you are doing well', 'positive'),
('I am very happy', 'positive'),
('Good for you', 'positive'),
('all good. I know about it and I accept it.', 'positive'),
('This is really good!', 'positive'),
('Tomorrow is going to be fun.', 'positive'),
('Smiling all around.', 'positive'),
('These are great apples today.', 'positive'),
('How about them apples? Thomas is a happy boy.', 'positive'),
('Thomas is very zen. He is well-mannered.', 'positive'),
('happy and good lots of light!', 'positive'),
('I like this new iphone very much', 'positive')]
neg_tweets = [('I do not like this car', 'negative'),
('This view is horrible', 'negative'),
('I feel tired this morning', 'negative'),
('I am not looking forward to the concert', 'negative'),
('He is my enemy', 'negative'),
('I am a bad boy', 'negative'),
('This is not good', 'negative'),
('I am bothered by this', 'negative'),
('I am not connected with this', 'negative'),
('Sadistic creep you ass. Die.', 'negative'),
('All sorts of crazy and scary as hell.', 'negative'),
('Not his emails, no.', 'negative'),
('His father is dead. Returned obviously.', 'negative'),
('He has a bomb.', 'negative'),
('Too fast to be on foot. We cannot catch them.', 'negative'),
('Feeling so stupid stoopid stupid!', 'negative'),
(':-(( :-(', 'negative'),
('This is the worst way imaginable, all of this traffic', 'negative')]
rain_tweets = [('this rain is craze today', 'rain'),
('Nov 23 17:30 Temperature 3C no or few clouds Wind SW 6 km/h Humidity 70% France', 'rain'),
('missin climbing mountains in the rain', 'rain'),
('There are days in live broadcasting Torrential rain in Paris ', 'rain'),
('Heavy Rain today in!', 'rain'),
('Woman in the boulangerie started complaining about the rain. I said, "its better than terrorists". Need to finesse my jovial patter', 'rain'),
('Light to moderate rain over NCR', 'rain'),
('After a cold night last night, tonight will be milder and mainly frost-free, with this band of rain. Jo', 'rain'),
('But I love the rain. And it rains frequently these days~ So it makes me feel rather good', 'rain'),
('With 1000 mm rain already and more rain forecasted 4 Chennai, Nov 2015 will overtake Oct 2005 and Nov 1918 to become the Wettest Month EVER!', 'rain'),
('It is raining today. Wet!', 'rain'),
('Lots of rain today. Raining!', 'rain'),
('Why is it raining?', 'rain'),
('So much rain!', 'rain'),
('it always rains this time of year', 'rain'),
('raining', 'rain'),
('raining outside today, rained yesterday too', 'rain'),
('rainy weather today! jeez', 'rain'),
('Rain has finally extinguished a #wildfire in Olympic National Park that had been burning since May', 'rain'),
('The rain had us indoors for Thursdays celebration', 'rain'),
('Rain (hourly) 0.0 mm, Pressure: 1012 hPa, falling slowly', 'rain'),
('That aspiration yours outfit make ends meet spite of the rainy weather this midsummer?: Edb', 'rain'),
('Glasgow\'s bright lights of Gordon st tonight #rain #Glasgow', 'rain'),
('Why is it raining? Because it always rains this time of year', 'rain'),
('The forecast for this week\'s weather includes lots of rain!', 'rain'),
('Morning Has Broken: Morning has BrokenAs I sit in my warm car in between rain squalls I am looking out', 'rain'),
('Wind 2.0 mph SW. Barometer 1021.10 mb, Falling. Temperature 5.5 °C. Rain today 0.2 mm. Humidity 78%', 'rain')]
tweets = []
for (words, sentiment) in pos_tweets + neg_tweets + rain_tweets:
words_filtered = [e.lower() for e in words.split() if len(e) >= 2]
tweets.append((words_filtered, sentiment))
def get_words_in_tweets(tweets):
all_words = []
for (words, sentiment) in tweets:
all_words.extend(words)
return all_words
def get_word_features(wordlist):
wordlist = nltk.FreqDist(wordlist)
word_features = wordlist.keys()
return word_features
def extract_features(document):
document_words = set(document)
features = {}
for word in word_features:
features['contains(%s)' % word] = (word in document_words)
return features
word_features = get_word_features(get_words_in_tweets(tweets))
training_set = nltk.classify.apply_features(extract_features, tweets)
classifier = nltk.NaiveBayesClassifier.train(training_set)
runtweets = [] # setup to import a list of tweets here if you wish into a python list
if len(sys.argv) > 1: # if param passed 4 name of text file w/ list of tweets
tweetfile = sys.argv[1]
with open(tweetfile, "r") as ins:
for line in ins:
runtweets.append(line)
runtweets.append('I am a bad boy') # should be negative
runtweets.append('rain today') # should be rain
runtweets.append('so stupid') # should be negative
runtweets.append('it is raining outside') # should be rain
runtweets.append('I love it') # should be positive
runtweets.append('so good') # should be positive
poscount = 0
negcount = 0
raincount = 0
for tweett in runtweets:
valued = classifier.classify(extract_features(tweett.split()))
print(valued)
if valued == 'negative':
negcount = negcount + 1
if valued == 'positive':
poscount = poscount + 1
if valued == 'rain':
raincount = raincount + 1
print('Positive count: %s \nNegative count: %s \nRain count: %s' % (poscount, negcount, raincount))
exit()
| mit | -4,959,251,909,662,075,000 | 48.863636 | 168 | 0.590702 | false | 3.666852 | false | false | false |
karel-murgas/Shootris | classes.py | 1 | 9808 | """Defines classes for Shootris"""
# Copyright (C) 2016 Karel "laird Odol" Murgas
# karel.murgas@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############
# Libraries #
#############
from collections import deque
from utilities import *
#####################
# Class definitions #
#####################
class Blob:
"""Defines blob - now only for main blob - to be expanded later"""
def __init__(self, screen, field, event, speed):
self.cols = MAXCOL
self.max_rows = MAXROW
self.top = 0
self.left = 0
self.ls = LEFTSTICK
self.bs = BOTTOMSTICK
self.content = deque([])
self.generated_rows = 0
self.append_row()
self.screen = screen
self.field = field
self.speed = speed
self.timer = pyg.time.set_timer(event, speed)
self.event = event
self.row_fraction = 0
# oveř prádzný spodek
# zahoď spodek
def get_rect(self):
"""Gets rectangle containing the blob"""
top_line = 0 if self.top == 0 else self.top * CELLSIZE - rest_of_cell(self.row_fraction)
bottom_len = len(self.content) * CELLSIZE
if self.top == 0:
bottom_len -= rest_of_cell(self.row_fraction)
return pyg.Rect(self.left * CELLSIZE, top_line, self.cols * CELLSIZE, bottom_len)
def get_bottom(self):
"""Returns row index of bottom row"""
return self.top + len(self.content) - 1 # index of top row + number of rows - correction
def create_cell(self, r, c):
"""Creates content of the cell - colors the cell regarding left and bottom neighbours"""
if c > 0 and roll(self.ls) and self.content[r][c - 1] is not None:
return self.content[r][c - 1] # color by left cell
elif r < len(self.content) - 1 and roll(self.bs) and self.content[r + 1][c] is not None:
return self.content[r + 1][c] # color by bottom cell
else:
return get_random_color() # random color
def append_row(self):
"""Appends new row to the start of the blob"""
self.content.appendleft([])
self.generated_rows += 1
for c in range(self.cols):
self.content[0].append(self.create_cell(0, c))
def clearRow(self, row):
"""Draws row as if the color was None"""
draw_blob(self.screen, self.field, [[None]*self.cols], row)
def destroy(self):
"""Ends the game and deletes the blob instance"""
self.timer = pyg.time.set_timer(self.event, 0)
pyg.event.post(pyg.event.Event(LOOSE_EVENT))
del self
def win(self):
"""Ends the game winning and deletes the blob instance"""
self.timer = pyg.time.set_timer(self.event, 0)
pyg.event.post(pyg.event.Event(WIN_EVENT))
del self
def move(self):
"""Moves the blob one pixel down, checks for blob boundaries"""
if self.get_bottom() >= FIELDLENGTH - 1 and self.row_fraction == 0:
self.destroy()
else:
if self.generated_rows < self.max_rows: # there is new line in the buffer
if self.row_fraction == 0:
self.append_row()
else: # clear the top line
self.screen.blit(pyg.Surface((self.cols * CELLSIZE, 1)), (0, self.top * CELLSIZE - rest_of_cell(self.row_fraction), self.cols * CELLSIZE, 1))
if self.row_fraction == 0:
self.top += 1
self.row_fraction = (self.row_fraction + 1) % CELLSIZE
draw_blob(self.screen, self.field, self.content, self.top, self.row_fraction)
def damage(self, r, c, color):
"""Deletes content of this cell and all direct neighbours"""
score = 0
if self.content[r][c] == color:
self.content[r][c] = None
score += 1
if c > 0 and self.content[r][c - 1] == color: # left
score += self.damage(r, c - 1, color)
if c < self.cols - 1 and self.content[r][c + 1] == color: # right
score += self.damage(r, c + 1, color)
if r > 0 and self.content[r - 1][c] == color: # top
score += self.damage(r - 1, c, color)
if r < len(self.content) - 1 and self.content[r + 1][c] == color: # bottom
score += self.damage(r + 1, c, color)
return score
def hit(self, c, r, color):
"""Determines, if hit was success. If it was, deletes cells and checks and pop out empty bottom rows"""
if self.content[r][c] == color:
if SOUND_EFFECTS_ON:
sound_hit_success.play()
score = self.damage(r, c, color)
draw_blob(self.screen, self.field, self.content, self.top, self.row_fraction)
while len(self.content) > 0 and self.content[len(self.content) - 1] == [None] * self.cols:
self.content.pop()
if len(self.content) == 0 and self.max_rows == self.generated_rows:
self.win()
return score
else:
if SOUND_EFFECTS_ON:
if self.content[r][c] != None:
sound_hit_fail.play()
else:
sound_miss.play()
return 0
class Infopanel:
def __init__(self, screen):
self.position = INFO_FIELD
self.score_position =INFO_FIELD[1] + 5 * CELLSIZE
self.highscore_position = INFO_FIELD[1] + 7 * CELLSIZE
self.text_position = INFO_FIELD[1] + FIELDLENGTH * CELLSIZE / 2
self.text_flash_position = INFO_FIELD[1] + (FIELDLENGTH + 2) * CELLSIZE / 2
self.tips_header_position = INFO_FIELD[1] + (FIELDLENGTH + 8) * CELLSIZE / 2
self.tips_text_position = INFO_FIELD[1] + (FIELDLENGTH + 10) * CELLSIZE / 2
self.text_flesh_visible = True
self.score = 0
self.highscore = 0
self.screen = screen
self.flash_timer = pyg.time.set_timer(FLASH_EVENT, TEXT_FLESH_TIME)
self.tips_timer = pyg.time.set_timer(TIPS_EVENT, TIPS_TIME)
def write(self, text, surf_top, surf_left=INFO_FIELD[0] + CELLSIZE, surf_size=((INFOWIDTH - 1) * CELLSIZE, CELLSIZE), color=WHITE, size=CELLSIZE):
font = pyg.font.SysFont(pyg.font.get_default_font(), size)
surf_start = (surf_left, surf_top)
self.screen.blit(pyg.Surface(surf_size), surf_start)
self.screen.blit(font.render(text, 1, color), surf_start)
pyg.display.update(pyg.Rect(surf_start, surf_size))
def message(self, text):
self.write(text, self.text_position)
def message_flash(self, text):
self.write(text, self.text_flash_position)
def message_tips_header(self, text):
self.write(text, self.tips_header_position)
def message_tips(self, text):
self.write(text, self.tips_text_position, size=(CELLSIZE * 4) // 5)
def add_score(self, score):
self.score += score
col = WHITE if self.score < self.highscore else GREEN
self.write('SCORE: ' + str(self.score), self.score_position, color=col)
def resetscore(self):
if self.score >= self.highscore:
self.write('HIGHSCORE: ' + str(self.score), self.highscore_position, color=RED)
self.highscore = self.score
self.score = 0
self.write('SCORE: 0', self.score_position)
class Magazine:
def __init__(self, screen, max_ammo=MAXAMMO, event=ADD_AMMO_EVENT, speed=AMMO_REPLENISH_SPEED):
self.maxammo = max_ammo
self.screen = screen
self.position = pyg.Rect(INFO_FIELD[0] + CELLSIZE, INFO_FIELD[1] + CELLSIZE, (INFOWIDTH - 1) * CELLSIZE, 2 * CELLSIZE)
self.content = deque([])
self.add_ammo()
self.event = event
self.timer = pyg.time.set_timer(event, speed)
def add_ammo(self):
if len(self.content) < self.maxammo:
self.content.append(get_random_color())
self.draw()
def color_bullet(self, cell, color):
"""Colors one 'bullet' cell"""
cell.fill(color)
return cell
def draw(self):
self.screen.blit(pyg.Surface(((INFOWIDTH - 1) * CELLSIZE, 2 * CELLSIZE)), self.position)
cell = pyg.Surface((2 * CELLSIZE, 2 * CELLSIZE))
for i, color in enumerate(self.content):
self.screen.blit(self.color_bullet(cell, color), (INFO_FIELD[0] + (1 + 2 * i) * CELLSIZE, INFO_FIELD[1] + CELLSIZE))
pyg.display.update(self.position)
def shoot(self):
if not self.is_empty():
bullet = self.content.popleft()
self.draw()
return bullet
else:
if SOUND_EFFECTS_ON:
sound_empty.play()
return None
def destroy(self):
self.timer = pyg.time.set_timer(self.event, 0)
self.content = deque([])
self.draw()
del self
def is_empty(self):
return len(self.content) == 0
def reload(self):
if not self.is_empty():
if SOUND_EFFECTS_ON:
sound_reload.play()
bullet = self.content.popleft()
self.content.append(bullet)
self.draw() | gpl-3.0 | 912,791,536,594,842,800 | 38.22 | 157 | 0.580681 | false | 3.49021 | false | false | false |
Ebag333/Pyfa | gui/crestFittings.py | 1 | 15015 | import time
import webbrowser
import json
import wx
import requests
from service.port import Port
from service.fit import Fit
from eos.types import Cargo
from eos.db import getItem
from gui.display import Display
import gui.globalEvents as GE
if 'wxMac' not in wx.PlatformInfo or ('wxMac' in wx.PlatformInfo and wx.VERSION >= (3, 0)):
from service.crest import Crest, CrestModes
class CrestFittings(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, id=wx.ID_ANY, title="Browse EVE Fittings", pos=wx.DefaultPosition,
size=wx.Size(550, 450), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
self.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
self.mainFrame = parent
mainSizer = wx.BoxSizer(wx.VERTICAL)
sCrest = Crest.getInstance()
characterSelectSizer = wx.BoxSizer(wx.HORIZONTAL)
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
self.stLogged = wx.StaticText(self, wx.ID_ANY, "Currently logged in as %s" % sCrest.implicitCharacter.name,
wx.DefaultPosition, wx.DefaultSize)
self.stLogged.Wrap(-1)
characterSelectSizer.Add(self.stLogged, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
else:
self.charChoice = wx.Choice(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, [])
characterSelectSizer.Add(self.charChoice, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
self.updateCharList()
self.fetchBtn = wx.Button(self, wx.ID_ANY, u"Fetch Fits", wx.DefaultPosition, wx.DefaultSize, 5)
characterSelectSizer.Add(self.fetchBtn, 0, wx.ALL, 5)
mainSizer.Add(characterSelectSizer, 0, wx.EXPAND, 5)
self.sl = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL)
mainSizer.Add(self.sl, 0, wx.EXPAND | wx.ALL, 5)
contentSizer = wx.BoxSizer(wx.HORIZONTAL)
browserSizer = wx.BoxSizer(wx.VERTICAL)
self.fitTree = FittingsTreeView(self)
browserSizer.Add(self.fitTree, 1, wx.ALL | wx.EXPAND, 5)
contentSizer.Add(browserSizer, 1, wx.EXPAND, 0)
fitSizer = wx.BoxSizer(wx.VERTICAL)
self.fitView = FitView(self)
fitSizer.Add(self.fitView, 1, wx.ALL | wx.EXPAND, 5)
btnSizer = wx.BoxSizer(wx.HORIZONTAL)
self.importBtn = wx.Button(self, wx.ID_ANY, u"Import to pyfa", wx.DefaultPosition, wx.DefaultSize, 5)
self.deleteBtn = wx.Button(self, wx.ID_ANY, u"Delete from EVE", wx.DefaultPosition, wx.DefaultSize, 5)
btnSizer.Add(self.importBtn, 1, wx.ALL, 5)
btnSizer.Add(self.deleteBtn, 1, wx.ALL, 5)
fitSizer.Add(btnSizer, 0, wx.EXPAND)
contentSizer.Add(fitSizer, 1, wx.EXPAND, 0)
mainSizer.Add(contentSizer, 1, wx.EXPAND, 5)
self.fetchBtn.Bind(wx.EVT_BUTTON, self.fetchFittings)
self.importBtn.Bind(wx.EVT_BUTTON, self.importFitting)
self.deleteBtn.Bind(wx.EVT_BUTTON, self.deleteFitting)
self.mainFrame.Bind(GE.EVT_SSO_LOGOUT, self.ssoLogout)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.statusbar = wx.StatusBar(self)
self.statusbar.SetFieldsCount()
self.SetStatusBar(self.statusbar)
self.cacheTimer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.updateCacheStatus, self.cacheTimer)
self.SetSizer(mainSizer)
self.Layout()
self.Centre(wx.BOTH)
def ssoLogin(self, event):
self.updateCharList()
event.Skip()
def updateCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
if len(chars) == 0:
self.Close()
self.charChoice.Clear()
for char in chars:
self.charChoice.Append(char.name, char.ID)
self.charChoice.SetSelection(0)
def updateCacheStatus(self, event):
t = time.gmtime(self.cacheTime - time.time())
if t < 0:
self.cacheTimer.Stop()
else:
sTime = time.strftime("%H:%M:%S", t)
self.statusbar.SetStatusText("Cached for %s" % sTime, 0)
def ssoLogout(self, event):
if event.type == CrestModes.IMPLICIT:
self.Close()
else:
self.updateCharList()
event.Skip() # continue event
def OnClose(self, event):
self.mainFrame.Unbind(GE.EVT_SSO_LOGOUT, handler=self.ssoLogout)
self.mainFrame.Unbind(GE.EVT_SSO_LOGIN, handler=self.ssoLogin)
event.Skip()
def getActiveCharacter(self):
sCrest = Crest.getInstance()
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
return sCrest.implicitCharacter.ID
selection = self.charChoice.GetCurrentSelection()
return self.charChoice.GetClientData(selection) if selection is not None else None
def fetchFittings(self, event):
sCrest = Crest.getInstance()
try:
waitDialog = wx.BusyInfo("Fetching fits, please wait...", parent=self)
fittings = sCrest.getFittings(self.getActiveCharacter())
self.cacheTime = fittings.get('cached_until')
self.updateCacheStatus(None)
self.cacheTimer.Start(1000)
self.fitTree.populateSkillTree(fittings)
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection")
finally:
del waitDialog
def importFitting(self, event):
selection = self.fitView.fitSelection
if not selection:
return
data = self.fitTree.fittingsTreeCtrl.GetPyData(selection)
sPort = Port.getInstance()
fits = sPort.importFitFromBuffer(data)
self.mainFrame._openAfterImport(fits)
def deleteFitting(self, event):
sCrest = Crest.getInstance()
selection = self.fitView.fitSelection
if not selection:
return
data = json.loads(self.fitTree.fittingsTreeCtrl.GetPyData(selection))
dlg = wx.MessageDialog(self,
"Do you really want to delete %s (%s) from EVE?" % (data['name'], data['ship']['name']),
"Confirm Delete", wx.YES | wx.NO | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
try:
sCrest.delFitting(self.getActiveCharacter(), data['fittingID'])
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection")
class ExportToEve(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, id=wx.ID_ANY, title="Export fit to EVE", pos=wx.DefaultPosition,
size=(wx.Size(350, 100)), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
self.mainFrame = parent
self.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
sCrest = Crest.getInstance()
mainSizer = wx.BoxSizer(wx.VERTICAL)
hSizer = wx.BoxSizer(wx.HORIZONTAL)
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
self.stLogged = wx.StaticText(self, wx.ID_ANY, "Currently logged in as %s" % sCrest.implicitCharacter.name,
wx.DefaultPosition, wx.DefaultSize)
self.stLogged.Wrap(-1)
hSizer.Add(self.stLogged, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
else:
self.charChoice = wx.Choice(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, [])
hSizer.Add(self.charChoice, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
self.updateCharList()
self.charChoice.SetSelection(0)
self.exportBtn = wx.Button(self, wx.ID_ANY, u"Export Fit", wx.DefaultPosition, wx.DefaultSize, 5)
hSizer.Add(self.exportBtn, 0, wx.ALL, 5)
mainSizer.Add(hSizer, 0, wx.EXPAND, 5)
self.exportBtn.Bind(wx.EVT_BUTTON, self.exportFitting)
self.statusbar = wx.StatusBar(self)
self.statusbar.SetFieldsCount(2)
self.statusbar.SetStatusWidths([100, -1])
self.mainFrame.Bind(GE.EVT_SSO_LOGOUT, self.ssoLogout)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.SetSizer(hSizer)
self.SetStatusBar(self.statusbar)
self.Layout()
self.Centre(wx.BOTH)
def updateCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
if len(chars) == 0:
self.Close()
self.charChoice.Clear()
for char in chars:
self.charChoice.Append(char.name, char.ID)
self.charChoice.SetSelection(0)
def ssoLogin(self, event):
self.updateCharList()
event.Skip()
def ssoLogout(self, event):
if event.type == CrestModes.IMPLICIT:
self.Close()
else:
self.updateCharList()
event.Skip() # continue event
def OnClose(self, event):
self.mainFrame.Unbind(GE.EVT_SSO_LOGOUT, handler=self.ssoLogout)
event.Skip()
def getActiveCharacter(self):
sCrest = Crest.getInstance()
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
return sCrest.implicitCharacter.ID
selection = self.charChoice.GetCurrentSelection()
return self.charChoice.GetClientData(selection) if selection is not None else None
def exportFitting(self, event):
sPort = Port.getInstance()
fitID = self.mainFrame.getActiveFit()
self.statusbar.SetStatusText("", 0)
if fitID is None:
self.statusbar.SetStatusText("Please select an active fitting in the main window", 1)
return
self.statusbar.SetStatusText("Sending request and awaiting response", 1)
sCrest = Crest.getInstance()
try:
sFit = Fit.getInstance()
data = sPort.exportCrest(sFit.getFit(fitID))
res = sCrest.postFitting(self.getActiveCharacter(), data)
self.statusbar.SetStatusText("%d: %s" % (res.status_code, res.reason), 0)
try:
text = json.loads(res.text)
self.statusbar.SetStatusText(text['message'], 1)
except ValueError:
self.statusbar.SetStatusText("", 1)
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection", 1)
class CrestMgmt(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title="CREST Character Management", pos=wx.DefaultPosition,
size=wx.Size(550, 250), style=wx.DEFAULT_DIALOG_STYLE)
self.mainFrame = parent
mainSizer = wx.BoxSizer(wx.HORIZONTAL)
self.lcCharacters = wx.ListCtrl(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_REPORT)
self.lcCharacters.InsertColumn(0, heading='Character')
self.lcCharacters.InsertColumn(1, heading='Refresh Token')
self.popCharList()
mainSizer.Add(self.lcCharacters, 1, wx.ALL | wx.EXPAND, 5)
btnSizer = wx.BoxSizer(wx.VERTICAL)
self.addBtn = wx.Button(self, wx.ID_ANY, u"Add Character", wx.DefaultPosition, wx.DefaultSize, 0)
btnSizer.Add(self.addBtn, 0, wx.ALL | wx.EXPAND, 5)
self.deleteBtn = wx.Button(self, wx.ID_ANY, u"Revoke Character", wx.DefaultPosition, wx.DefaultSize, 0)
btnSizer.Add(self.deleteBtn, 0, wx.ALL | wx.EXPAND, 5)
mainSizer.Add(btnSizer, 0, wx.EXPAND, 5)
self.addBtn.Bind(wx.EVT_BUTTON, self.addChar)
self.deleteBtn.Bind(wx.EVT_BUTTON, self.delChar)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.SetSizer(mainSizer)
self.Layout()
self.Centre(wx.BOTH)
def ssoLogin(self, event):
self.popCharList()
event.Skip()
def popCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
self.lcCharacters.DeleteAllItems()
for index, char in enumerate(chars):
self.lcCharacters.InsertStringItem(index, char.name)
self.lcCharacters.SetStringItem(index, 1, char.refresh_token)
self.lcCharacters.SetItemData(index, char.ID)
self.lcCharacters.SetColumnWidth(0, wx.LIST_AUTOSIZE)
self.lcCharacters.SetColumnWidth(1, wx.LIST_AUTOSIZE)
def addChar(self, event):
sCrest = Crest.getInstance()
uri = sCrest.startServer()
webbrowser.open(uri)
def delChar(self, event):
item = self.lcCharacters.GetFirstSelected()
if item > -1:
charID = self.lcCharacters.GetItemData(item)
sCrest = Crest.getInstance()
sCrest.delCrestCharacter(charID)
self.popCharList()
class FittingsTreeView(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, id=wx.ID_ANY)
self.parent = parent
pmainSizer = wx.BoxSizer(wx.VERTICAL)
tree = self.fittingsTreeCtrl = wx.TreeCtrl(self, wx.ID_ANY, style=wx.TR_DEFAULT_STYLE | wx.TR_HIDE_ROOT)
pmainSizer.Add(tree, 1, wx.EXPAND | wx.ALL, 0)
self.root = tree.AddRoot("Fits")
self.populateSkillTree(None)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.displayFit)
self.SetSizer(pmainSizer)
self.Layout()
def populateSkillTree(self, data):
if data is None:
return
root = self.root
tree = self.fittingsTreeCtrl
tree.DeleteChildren(root)
dict = {}
fits = data['items']
for fit in fits:
if fit['ship']['name'] not in dict:
dict[fit['ship']['name']] = []
dict[fit['ship']['name']].append(fit)
for name, fits in dict.iteritems():
shipID = tree.AppendItem(root, name)
for fit in fits:
fitId = tree.AppendItem(shipID, fit['name'])
tree.SetPyData(fitId, json.dumps(fit))
tree.SortChildren(root)
def displayFit(self, event):
selection = self.fittingsTreeCtrl.GetSelection()
fit = json.loads(self.fittingsTreeCtrl.GetPyData(selection))
list = []
for item in fit['items']:
try:
cargo = Cargo(getItem(item['type']['id']))
cargo.amount = item['quantity']
list.append(cargo)
except:
pass
self.parent.fitView.fitSelection = selection
self.parent.fitView.update(list)
class FitView(Display):
DEFAULT_COLS = ["Base Icon",
"Base Name"]
def __init__(self, parent):
Display.__init__(self, parent, style=wx.LC_SINGLE_SEL)
self.fitSelection = None
| gpl-3.0 | 6,150,813,481,155,787,000 | 34.75 | 119 | 0.622111 | false | 3.601583 | false | false | false |
ombt/analytics | books/programming_in_python_3/book_examples/py31eg/IndentedList.py | 2 | 5068 | #!/usr/bin/env python3
# Copyright (c) 2008-11 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
def indented_list_sort(indented_list, indent=" "):
"""Returns an alphabetically sorted copy of the given list
The indented list is assumed to be a list of strings in a
hierarchy with indentation used to indicate child items.
The indent parameter specifies the characters that constitute
one level of indent.
The function copies the list, and returns it sorted in
case-insensitive alphabetical order, with child items sorted
underneath their parent items, and so on with grandchild items,
and so on recursively to any level of depth.
>>> indented_list = ["M", " MX", " MG", "D", " DA", " DF",\
" DFX", " DFK", " DFB", " DC", "K", "X", "H", " HJ",\
" HB", "A"]
>>>
>>> indented_list = indented_list_sort(indented_list, " ")
>>> indented_list[:8]
['A', 'D', ' DA', ' DC', ' DF', ' DFB', ' DFK', ' DFX']
>>> indented_list[8:]
['H', ' HB', ' HJ', 'K', 'M', ' MG', ' MX', 'X']
"""
KEY, ITEM, CHILDREN = range(3)
def add_entry(level, key, item, children):
if level == 0:
children.append((key, item, []))
else:
add_entry(level - 1, key, item, children[-1][CHILDREN])
def update_indented_list(entry):
indented_list.append(entry[ITEM])
for subentry in sorted(entry[CHILDREN]):
update_indented_list(subentry)
entries = []
for item in indented_list:
level = 0
i = 0
while item.startswith(indent, i):
i += len(indent)
level += 1
key = item.strip().lower()
add_entry(level, key, item, entries)
indented_list = []
for entry in sorted(entries):
update_indented_list(entry)
return indented_list
def indented_list_sort_local(indented_list, indent=" "):
"""
Given an indented list, i.e., a list of items with indented
subitems, sorts the items, and the subitems within each item (and so
on recursively) in case-insensitive alphabetical order.
>>> indented_list = ["M", " MX", " MG", "D", " DA", " DF", " DFX", \
" DFK", " DFB", " DC", "K", "X", "H", " HJ", " HB", "A"]
>>>
>>> indented_list = indented_list_sort_local(indented_list, " ")
>>> indented_list[:8]
['A', 'D', ' DA', ' DC', ' DF', ' DFB', ' DFK', ' DFX']
>>> indented_list[8:]
['H', ' HB', ' HJ', 'K', 'M', ' MG', ' MX', 'X']
"""
KEY, ITEM, CHILDREN = range(3)
def add_entry(key, item, children):
nonlocal level
if level == 0:
children.append((key, item, []))
else:
level -= 1
add_entry(key, item, children[-1][CHILDREN])
def update_indented_list(entry):
indented_list.append(entry[ITEM])
for subentry in sorted(entry[CHILDREN]):
update_indented_list(subentry)
entries = []
for item in indented_list:
level = 0
i = 0
while item.startswith(indent, i):
i += len(indent)
level += 1
key = item.strip().lower()
add_entry(key, item, entries)
indented_list = []
for entry in sorted(entries):
update_indented_list(entry)
return indented_list
if __name__ == "__main__":
before = ["Nonmetals",
" Hydrogen",
" Carbon",
" Nitrogen",
" Oxygen",
"Inner Transitionals",
" Lanthanides",
" Cerium",
" Europium",
" Actinides",
" Uranium",
" Curium",
" Plutonium",
"Alkali Metals",
" Lithium",
" Sodium",
" Potassium"]
result1 = indented_list_sort(before)
result2 = indented_list_sort_local(before)
after = ["Alkali Metals",
" Lithium",
" Potassium",
" Sodium",
"Inner Transitionals",
" Actinides",
" Curium",
" Plutonium",
" Uranium",
" Lanthanides",
" Cerium",
" Europium",
"Nonmetals",
" Carbon",
" Hydrogen",
" Nitrogen",
" Oxygen"]
assert result1 == result2 == after
import doctest
doctest.testmod()
| mit | -7,855,108,976,740,129,000 | 32.562914 | 74 | 0.525257 | false | 3.632975 | false | false | false |
miguelotemagno/imagestion | imagestion_1.0/Imagen.py | 1 | 14445 | # +-----------------------------------------------------------------------+
# | IMAGESTION |
# | |
# | Copyright (C) 2010-Today, GNUCHILE.CL - Santiago de Chile |
# | Licensed under the GNU GPL |
# | |
# | Redistribution and use in source and binary forms, with or without |
# | modification, are permitted provided that the following conditions |
# | are met: |
# | |
# | o Redistributions of source code must retain the above copyright |
# | notice, this list of conditions and the following disclaimer. |
# | o Redistributions in binary form must reproduce the above copyright |
# | notice, this list of conditions and the following disclaimer in the |
# | documentation and/or other materials provided with the distribution.|
# | o The names of the authors may not be used to endorse or promote |
# | products derived from this software without specific prior written |
# | permission. |
# | |
# | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
# | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
# | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
# | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
# | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
# | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
# | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
# | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
# | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
# | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
# | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
# | |
# +-----------------------------------------------------------------------+
# | Author: Miguel Vargas Welch <miguelote@gmail.com> |
# +-----------------------------------------------------------------------+
#import Image
from PIL import Image
import thread
from datetime import datetime
import multiprocessing as mp
import ctypes as c
import numpy as np
# pip install image
# pip install Pillow
## Referencias apoyo:
## http://www.pythonware.com/library/pil/handbook/introduction.htm
## http://www.pythonware.com/library/pil/handbook/image.htm
## http://www.tutorialspoint.com/python/python_multithreading.htm
## http://ostermiller.org/dilate_and_erode.html
class Imagen(object):
def __init__(self,ruta):
self.path = ruta
self.busy = 0
self.reload()
self.manager = mp.Manager()
pass
def reload(self):
self.RGB = Image.open(self.path)
self.ancho, self.alto = self.RGB.size
self.R, self.G, self.B = self.RGB.split()
pass
def dilate(self):
self.busy = 1
try:
R = [self.R, self.R.copy()]
G = [self.G, self.G.copy()]
B = [self.B, self.B.copy()]
lsArgs = [
(R, 0, 0, self.alto, self.ancho),
(G, 0, 0, self.alto, self.ancho),
(B, 0, 0, self.alto, self.ancho)
]
processes = [mp.Process(target=self._dilate, args=lsArgs[x]) for x in range(0, 2)]
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# thread.start_new_thread( self._dilate, (R, 0, 0, self.alto, self.ancho) )
# thread.start_new_thread( self._dilate, (G, 0, 0, self.alto, self.ancho) )
# thread.start_new_thread( self._dilate, (B, 0, 0, self.alto, self.ancho) )
except:
print "Error: unable to start thread dilate"
self.busy = 0
# while self.busy > 0:
# pass
self.R = R[1]
self.G = G[1]
self.B = B[1]
print self.busy
def _dilate(self, lst, y1, x1, y2, x2):
"""
@return :
@author Miguelote
"""
id = self.busy
ancho = x2 - x1
alto = y2 - y1
if ancho > 100 and alto > 100:
difX = ancho % 2
difY = alto % 2
width = ancho // 2 if(difX > 0) else ancho / 2
height = alto // 2 if(difY > 0) else alto / 2
#print [id, '-', alto, ancho, '-', y1,x1, y2,x2]
lsArgs = [
(lst, y1, x1, y2 - height, x2 - width),
(lst, y1, x1 + width, y2 - height, x2),
(lst, y1 + height, x1, y2, x2 - width),
(lst, y1 + height, x1 + width, y2, x2)
]
processes = [mp.Process(target=self._dilate, args=lsArgs[x]) for x in range(0, 3)]
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# try:
# thread.start_new_thread( self._dilate, (lst, y1, x1, y2-height, x2-width) )
# thread.start_new_thread( self._dilate, (lst, y1, x1+width, y2-height, x2) )
# thread.start_new_thread( self._dilate, (lst, y1+height, x1, y2, x2-width) )
# thread.start_new_thread( self._dilate, (lst, y1+height, x1+width, y2, x2) )
# except:
# print "Error: unable to start thread _dilate"
# print [id, alto, ancho]
# print [y1, x1, y2-height, x2-width]
# print [y1, x1+width, y2-height, x2]
# print [y1+height, x1, y2, x2-width]
# print [y1+height, x1+width, y2, x2]
# print self.busy
else:
img, copia = lst
self.busy = self.busy + 1
start = datetime.now()
print [id, '-' ,self.busy, '_dilate' , alto, ancho]
for y in xrange(y1,y2):
for x in xrange(x1,x2):
punto = img.getpixel((x,y))
##norte = im.getpixel((x,y-1))
##sur = im.getpixel((x,y+1))
##este = im.getpixel((x+1,y))
##oeste = im.getpixel((x-1,y))
if y>0 and punto>img.getpixel((x,y-1)):
lst[1].putpixel((x,y-1),punto)
if x>0 and punto>img.getpixel((x-1,y)):
lst[1].putpixel((x-1,y),punto)
if y<self.alto-1 and punto>img.getpixel((x,y+1)):
lst[1].putpixel((x,y+1),punto)
if x<self.ancho-1 and punto>img.getpixel((x+1,y)):
lst[1].putpixel((x+1,y),punto)
if y>0 and x>0 and punto>img.getpixel((x-1,y-1)):
lst[1].putpixel((x-1,y-1),punto)
if y<self.alto-1 and x>0 and punto>img.getpixel((x-1,y+1)):
lst[1].putpixel((x-1,y+1),punto)
if y>0 and x<self.ancho-1 and punto>img.getpixel((x+1,y-1)):
lst[1].putpixel((x+1,y-1),punto)
if y<self.alto-1 and x<self.ancho-1 and punto>img.getpixel((x+1,y+1)):
lst[1].putpixel((x+1,y+1),punto)
stop = datetime.now()
delay = stop - start
print [id, '-' ,self.busy, "fin", delay]
self.busy = self.busy -1
if self.busy == 1:
self.busy = 0
def erode(self):
self.busy = 1
shareR = mp.Array('i', 2 * self.ancho * self.alto)
shareG = mp.Array('i', 2 * self.ancho * self.alto)
shareB = mp.Array('i', 2 * self.ancho * self.alto)
R = np.frombuffer(shareR.get_obj())
G = np.frombuffer(shareG.get_obj())
B = np.frombuffer(shareB.get_obj())
R[0], R[1] = (self.R, self.R.copy())
G[0], G[1] = (self.G, self.G.copy())
B[0], B[1] = (self.B, self.B.copy())
lsArgs = [
(R, 0, 0, self.alto, self.ancho),
(G, 0, 0, self.alto, self.ancho),
(B, 0, 0, self.alto, self.ancho)
]
processes = [mp.Process(target=self._erode, args=lsArgs[x]) for x in range(0, 2)]
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# try:
# R = [self.R, self.R.copy()]
# G = [self.G, self.G.copy()]
# B = [self.B, self.B.copy()]
# thread.start_new_thread( self._erode, (R, 0, 0, self.alto, self.ancho) )
# thread.start_new_thread( self._erode, (G, 0, 0, self.alto, self.ancho) )
# thread.start_new_thread( self._erode, (B, 0, 0, self.alto, self.ancho) )
# except:
# print "Error: unable to start thread erode"
# self.busy = 0
#
# while self.busy > 0:
# pass
self.R = R[1]
self.G = G[1]
self.B = B[1]
print self.busy
def _erode(self, lst, y1, x1, y2, x2):
"""
@return :
@author Miguelote
"""
id = self.busy
ancho = x2 - x1
alto = y2 - y1
if ancho > 100 and alto > 100:
difX = ancho % 2
difY = alto % 2
width = ancho // 2 if(difX > 0) else ancho / 2
height = alto // 2 if(difY > 0) else alto / 2
#print [id, '-', alto, ancho, '-', y1,x1, y2,x2]
lsArgs = [
(lst, y1, x1, y2 - height, x2 - width),
(lst, y1, x1 + width, y2 - height, x2),
(lst, y1 + height, x1, y2, x2 - width),
(lst, y1 + height, x1 + width, y2, x2)
]
processes = [mp.Process(target=self._erode, args=lsArgs[x]) for x in range(0, 3)]
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# try:
# thread.start_new_thread( self._erode, (lst, y1, x1, y2-height, x2-width) )
# thread.start_new_thread( self._erode, (lst, y1, x1+width, y2-height, x2) )
# thread.start_new_thread( self._erode, (lst, y1+height, x1, y2, x2-width) )
# thread.start_new_thread( self._erode, (lst, y1+height, x1+width, y2, x2) )
# except:
# print "Error: unable to start thread _erode"
# print [id, alto, ancho]
# print [y1, x1, y2-height, x2-width]
# print [y1, x1+width, y2-height, x2]
# print [y1+height, x1, y2, x2-width]
# print [y1+height, x1+width, y2, x2]
# print self.busy
else:
img, copia = lst
self.busy = self.busy + 1
start = datetime.now()
print [id, '-' ,self.busy, '_erode' , alto, ancho]
for y in xrange(y1,y2):
for x in xrange(x1,x2):
punto = img.getpixel((x,y))
##norte = im.getpixel((x,y-1))
##sur = im.getpixel((x,y+1))
##este = im.getpixel((x+1,y))
##oeste = im.getpixel((x-1,y))
if y>0 and punto>img.getpixel((x,y-1)):
lst[1].putpixel((x,y),img.getpixel((x,y-1)))
if x>0 and punto>img.getpixel((x-1,y)):
lst[1].putpixel((x,y),img.getpixel((x-1,y)))
if y<self.alto-1 and punto>img.getpixel((x,y+1)):
lst[1].putpixel((x,y),img.getpixel((x,y+1)))
if x<self.ancho-1 and punto>img.getpixel((x+1,y)):
lst[1].putpixel((x,y),img.getpixel((x+1,y)))
if y>0 and x>0 and punto>img.getpixel((x-1,y-1)):
lst[1].putpixel((x,y),img.getpixel((x-1,y-1)))
if y>0 and x<self.ancho-1 and punto>img.getpixel((x+1,y-1)):
lst[1].putpixel((x,y),img.getpixel((x+1,y-1)))
if y<self.alto-1 and x>0 and punto>img.getpixel((x-1,y+1)):
lst[1].putpixel((x,y),img.getpixel((x-1,y+1)))
if y<self.alto-1 and x<self.ancho-1 and punto>img.getpixel((x+1,y+1)):
lst[1].putpixel((x,y),img.getpixel((x+1,y+1)))
stop = datetime.now()
delay = stop - start
print [id, '-' ,self.busy, "fin", delay]
self.busy = self.busy -1
if self.busy == 1:
self.busy = 0
def rgb2gray(self):
"""
@return :
@author Miguelote
"""
pass
def substract(self,img):
pass
def getR(self):
"""
@return int[][] :
@author Miguelote
"""
return self.R
pass
def getG(self):
"""
@return int[][] :
@author Miguelote
"""
return self.G
pass
def getB(self):
"""
@return int[][] :
@author Miguelote
"""
return self.B
pass
def getRGB(self):
"""
@return int[][][3] :
@author Miguelote
"""
self.RGB = Image.merge("RGB", (self.R, self.G, self.B))
return self.RGB
pass
def getAlto(self):
return self.alto
pass
def getAncho(self):
return self.ancho
pass
def getPath(self):
return self.path
pass
| gpl-2.0 | 3,082,745,213,248,005,000 | 34.666667 | 94 | 0.451506 | false | 3.453263 | false | false | false |
sdispater/orator | orator/dbal/platforms/postgres_platform.py | 1 | 14126 | # -*- coding: utf-8 -*-
from .platform import Platform
from .keywords.postgresql_keywords import PostgreSQLKeywords
from ..table import Table
from ..column import Column
from ..identifier import Identifier
class PostgresPlatform(Platform):
INTERNAL_TYPE_MAPPING = {
"smallint": "smallint",
"int2": "smallint",
"serial": "integer",
"serial4": "integer",
"int": "integer",
"int4": "integer",
"integer": "integer",
"bigserial": "bigint",
"serial8": "bigint",
"bigint": "bigint",
"int8": "bigint",
"bool": "boolean",
"boolean": "boolean",
"text": "text",
"tsvector": "text",
"varchar": "string",
"interval": "string",
"_varchar": "string",
"char": "string",
"bpchar": "string",
"inet": "string",
"date": "date",
"datetime": "datetime",
"timestamp": "datetime",
"timestamptz": "datetimez",
"time": "time",
"timetz": "time",
"float": "float",
"float4": "float",
"float8": "float",
"double": "float",
"double precision": "float",
"real": "float",
"decimal": "decimal",
"money": "decimal",
"numeric": "decimal",
"year": "date",
"uuid": "guid",
"bytea": "blob",
"json": "json",
}
def get_list_table_columns_sql(self, table):
sql = """SELECT
a.attnum,
quote_ident(a.attname) AS field,
t.typname AS type,
format_type(a.atttypid, a.atttypmod) AS complete_type,
(SELECT t1.typname FROM pg_catalog.pg_type t1 WHERE t1.oid = t.typbasetype) AS domain_type,
(SELECT format_type(t2.typbasetype, t2.typtypmod) FROM
pg_catalog.pg_type t2 WHERE t2.typtype = 'd' AND t2.oid = a.atttypid) AS domain_complete_type,
a.attnotnull AS isnotnull,
(SELECT 't'
FROM pg_index
WHERE c.oid = pg_index.indrelid
AND pg_index.indkey[0] = a.attnum
AND pg_index.indisprimary = 't'
) AS pri,
(SELECT pg_get_expr(adbin, adrelid)
FROM pg_attrdef
WHERE c.oid = pg_attrdef.adrelid
AND pg_attrdef.adnum=a.attnum
) AS default,
(SELECT pg_description.description
FROM pg_description WHERE pg_description.objoid = c.oid AND a.attnum = pg_description.objsubid
) AS comment
FROM pg_attribute a, pg_class c, pg_type t, pg_namespace n
WHERE %s
AND a.attnum > 0
AND a.attrelid = c.oid
AND a.atttypid = t.oid
AND n.oid = c.relnamespace
ORDER BY a.attnum""" % self.get_table_where_clause(
table
)
return sql
def get_list_table_indexes_sql(self, table):
sql = """
SELECT quote_ident(relname) as relname, pg_index.indisunique, pg_index.indisprimary,
pg_index.indkey, pg_index.indrelid,
pg_get_expr(indpred, indrelid) AS where
FROM pg_class, pg_index
WHERE oid IN (
SELECT indexrelid
FROM pg_index si, pg_class sc, pg_namespace sn
WHERE %s
AND sc.oid=si.indrelid AND sc.relnamespace = sn.oid
) AND pg_index.indexrelid = oid"""
sql = sql % self.get_table_where_clause(table, "sc", "sn")
return sql
def get_list_table_foreign_keys_sql(self, table):
return (
"SELECT quote_ident(r.conname) as conname, "
"pg_catalog.pg_get_constraintdef(r.oid, true) AS condef "
"FROM pg_catalog.pg_constraint r "
"WHERE r.conrelid = "
"("
"SELECT c.oid "
"FROM pg_catalog.pg_class c, pg_catalog.pg_namespace n "
"WHERE "
+ self.get_table_where_clause(table)
+ " AND n.oid = c.relnamespace"
")"
" AND r.contype = 'f'"
)
def get_table_where_clause(self, table, class_alias="c", namespace_alias="n"):
where_clause = (
namespace_alias
+ ".nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND "
)
if table.find(".") >= 0:
split = table.split(".")
schema, table = split[0], split[1]
schema = "'%s'" % schema
else:
schema = (
"ANY(string_to_array((select replace(replace(setting, '\"$user\"', user), ' ', '')"
" from pg_catalog.pg_settings where name = 'search_path'),','))"
)
where_clause += "%s.relname = '%s' AND %s.nspname = %s" % (
class_alias,
table,
namespace_alias,
schema,
)
return where_clause
def get_advanced_foreign_key_options_sql(self, foreign_key):
query = ""
if foreign_key.has_option("match"):
query += " MATCH %s" % foreign_key.get_option("match")
query += super(PostgresPlatform, self).get_advanced_foreign_key_options_sql(
foreign_key
)
deferrable = (
foreign_key.has_option("deferrable")
and foreign_key.get_option("deferrable") is not False
)
if deferrable:
query += " DEFERRABLE"
else:
query += " NOT DEFERRABLE"
query += " INITIALLY"
deferred = (
foreign_key.has_option("deferred")
and foreign_key.get_option("deferred") is not False
)
if deferred:
query += " DEFERRED"
else:
query += " IMMEDIATE"
return query
def get_alter_table_sql(self, diff):
"""
Get the ALTER TABLE SQL statement
:param diff: The table diff
:type diff: orator.dbal.table_diff.TableDiff
:rtype: list
"""
sql = []
for column_diff in diff.changed_columns.values():
if self.is_unchanged_binary_column(column_diff):
continue
old_column_name = column_diff.get_old_column_name().get_quoted_name(self)
column = column_diff.column
if any(
[
column_diff.has_changed("type"),
column_diff.has_changed("precision"),
column_diff.has_changed("scale"),
column_diff.has_changed("fixed"),
]
):
query = (
"ALTER "
+ old_column_name
+ " TYPE "
+ self.get_sql_type_declaration(column.to_dict())
)
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
if column_diff.has_changed("default") or column_diff.has_changed("type"):
if column.get_default() is None:
default_clause = " DROP DEFAULT"
else:
default_clause = " SET" + self.get_default_value_declaration_sql(
column.to_dict()
)
query = "ALTER " + old_column_name + default_clause
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
if column_diff.has_changed("notnull"):
op = "DROP"
if column.get_notnull():
op = "SET"
query = "ALTER " + old_column_name + " " + op + " NOT NULL"
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
if column_diff.has_changed("autoincrement"):
if column.get_autoincrement():
seq_name = self.get_identity_sequence_name(
diff.name, old_column_name
)
sql.append("CREATE SEQUENCE " + seq_name)
sql.append(
"SELECT setval('" + seq_name + "', "
"(SELECT MAX(" + old_column_name + ") FROM " + diff.name + "))"
)
query = (
"ALTER "
+ old_column_name
+ " SET DEFAULT nextval('"
+ seq_name
+ "')"
)
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
else:
query = "ALTER " + old_column_name + " DROP DEFAULT"
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
if column_diff.has_changed("length"):
query = (
"ALTER "
+ old_column_name
+ " TYPE "
+ self.get_sql_type_declaration(column.to_dict())
)
sql.append(
"ALTER TABLE "
+ diff.get_name(self).get_quoted_name(self)
+ " "
+ query
)
for old_column_name, column in diff.renamed_columns.items():
sql.append(
"ALTER TABLE " + diff.get_name(self).get_quoted_name(self) + " "
"RENAME COLUMN "
+ Identifier(old_column_name).get_quoted_name(self)
+ " TO "
+ column.get_quoted_name(self)
)
return sql
def is_unchanged_binary_column(self, column_diff):
column_type = column_diff.column.get_type()
if column_type not in ["blob", "binary"]:
return False
if isinstance(column_diff.from_column, Column):
from_column = column_diff.from_column
else:
from_column = None
if from_column:
from_column_type = self.INTERNAL_TYPE_MAPPING[from_column.get_type()]
if from_column_type in ["blob", "binary"]:
return False
return (
len(
[
x
for x in column_diff.changed_properties
if x not in ["type", "length", "fixed"]
]
)
== 0
)
if column_diff.has_changed("type"):
return False
return (
len(
[
x
for x in column_diff.changed_properties
if x not in ["length", "fixed"]
]
)
== 0
)
def convert_booleans(self, item):
if isinstance(item, list):
for i, value in enumerate(item):
if isinstance(value, bool):
item[i] = str(value).lower()
elif isinstance(item, bool):
item = str(item).lower()
return item
def get_boolean_type_declaration_sql(self, column):
return "BOOLEAN"
def get_integer_type_declaration_sql(self, column):
if column.get("autoincrement"):
return "SERIAL"
return "INT"
def get_bigint_type_declaration_sql(self, column):
if column.get("autoincrement"):
return "BIGSERIAL"
return "BIGINT"
def get_smallint_type_declaration_sql(self, column):
return "SMALLINT"
def get_guid_type_declaration_sql(self, column):
return "UUID"
def get_datetime_type_declaration_sql(self, column):
return "TIMESTAMP(0) WITHOUT TIME ZONE"
def get_datetimetz_type_declaration_sql(self, column):
return "TIMESTAMP(0) WITH TIME ZONE"
def get_date_type_declaration_sql(self, column):
return "DATE"
def get_time_type_declaration_sql(self, column):
return "TIME(0) WITHOUT TIME ZONE"
def get_string_type_declaration_sql(self, column):
length = column.get("length", "255")
fixed = column.get("fixed")
if fixed:
return "CHAR(%s)" % length
else:
return "VARCHAR(%s)" % length
def get_binary_type_declaration_sql(self, column):
return "BYTEA"
def get_blob_type_declaration_sql(self, column):
return "BYTEA"
def get_clob_type_declaration_sql(self, column):
return "TEXT"
def get_text_type_declaration_sql(self, column):
return "TEXT"
def get_json_type_declaration_sql(self, column):
return "JSON"
def get_decimal_type_declaration_sql(self, column):
if "precision" not in column or not column["precision"]:
column["precision"] = 10
if "scale" not in column or not column["scale"]:
column["precision"] = 0
return "DECIMAL(%s, %s)" % (column["precision"], column["scale"])
def get_float_type_declaration_sql(self, column):
return "DOUBLE PRECISION"
def supports_foreign_key_constraints(self):
return True
def has_native_json_type(self):
return True
def _get_reserved_keywords_class(self):
return PostgreSQLKeywords
| mit | 5,858,059,935,755,748,000 | 31.548387 | 118 | 0.467719 | false | 4.289705 | false | false | false |
rdio/sentry | src/sentry/models/alert.py | 1 | 3798 | """
sentry.models.alert
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from sentry.constants import (
STATUS_RESOLVED, STATUS_UNRESOLVED, MINUTE_NORMALIZATION
)
from sentry.db.models import (
Model, GzippedDictField, BoundedPositiveIntegerField, sane_repr
)
from sentry.utils.db import has_trending
from sentry.utils.http import absolute_uri
class Alert(Model):
project = models.ForeignKey('sentry.Project')
group = models.ForeignKey('sentry.Group', null=True)
datetime = models.DateTimeField(default=timezone.now)
message = models.TextField()
data = GzippedDictField(null=True)
related_groups = models.ManyToManyField('sentry.Group', through='sentry.AlertRelatedGroup', related_name='related_alerts')
status = BoundedPositiveIntegerField(default=0, choices=(
(STATUS_UNRESOLVED, _('Unresolved')),
(STATUS_RESOLVED, _('Resolved')),
), db_index=True)
class Meta:
app_label = 'sentry'
db_table = 'sentry_alert'
__repr__ = sane_repr('project_id', 'group_id', 'datetime')
# TODO: move classmethods to manager
@classmethod
def get_recent_for_project(cls, project_id):
return cls.objects.filter(
project=project_id,
group_id__isnull=True,
datetime__gte=timezone.now() - timedelta(minutes=60),
status=STATUS_UNRESOLVED,
).order_by('-datetime')
@classmethod
def maybe_alert(cls, project_id, message, group_id=None):
from sentry.models import Group
now = timezone.now()
manager = cls.objects
# We only create an alert based on:
# - an alert for the project hasn't been created in the last 30 minutes
# - an alert for the event hasn't been created in the last 60 minutes
# TODO: there is a race condition if we're calling this function for the same project
if manager.filter(
project=project_id, datetime__gte=now - timedelta(minutes=60)).exists():
return
if manager.filter(
project=project_id, group=group_id,
datetime__gte=now - timedelta(minutes=60)).exists():
return
alert = manager.create(
project_id=project_id,
group_id=group_id,
datetime=now,
message=message,
)
if not group_id and has_trending():
# Capture the top 5 trending events at the time of this error
related_groups = Group.objects.get_accelerated([project_id], minutes=MINUTE_NORMALIZATION)[:5]
for group in related_groups:
AlertRelatedGroup.objects.create(
group=group,
alert=alert,
)
return alert
@property
def team(self):
return self.project.team
@property
def is_resolved(self):
return (self.status == STATUS_RESOLVED
or self.datetime < timezone.now() - timedelta(minutes=60))
def get_absolute_url(self):
return absolute_uri(reverse('sentry-alert-details', args=[
self.team.slug, self.project.slug, self.id]))
class AlertRelatedGroup(Model):
group = models.ForeignKey('sentry.Group')
alert = models.ForeignKey(Alert)
data = GzippedDictField(null=True)
class Meta:
app_label = 'sentry'
db_table = 'sentry_alertrelatedgroup'
unique_together = (('group', 'alert'),)
__repr__ = sane_repr('group_id', 'alert_id')
| bsd-3-clause | 34,375,455,835,411,132 | 31.741379 | 126 | 0.633755 | false | 4.07074 | false | false | false |
tomaszpiotro/ntr | dziekanat/dziekanat/settings.py | 1 | 1315 | import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '+k*kdcv4ut*bd99nb(ox$%j_9(1#8@_)!aa4oy2%iwsg&!tt15'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['localhost',
'127.0.0.1']
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'dziekanat.urls'
WSGI_APPLICATION = 'dziekanat.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'ntr',
'USER': 'postgres',
'PASSWORD': 'qwerty1asd',
'HOST': 'localhost',
'PORT': '5432',
},
}
LANGUAGE_CODE = 'pl-pl'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
| mit | -8,697,183,733,747,685,000 | 22.070175 | 69 | 0.668441 | false | 3.207317 | false | true | false |
rcfduarte/nmsat | projects/examples/scripts/single_neuron_pattern_input.py | 1 | 8325 | __author__ = 'duarte'
from modules.parameters import ParameterSet, ParameterSpace, extract_nestvalid_dict
from modules.input_architect import EncodingLayer, InputSignalSet, InputNoise, InputSignal
from modules.net_architect import Network
from modules.io import set_storage_locations
from modules.signals import iterate_obj_list
from modules.analysis import single_neuron_responses
from modules.visualization import InputPlots, set_global_rcParams
import cPickle as pickle
import numpy as np
import scipy.stats as stats
import nest
"""
:param parameter_set: must be consistent with the computation
:param plot: plot results - either show them or save to file
:param display: show figures/reports
:param save: save results
:return results_dictionary:
"""
# ######################################################################################################################
# Experiment options
# ======================================================================================================================
plot = True
display = True
save = True
debug = False
online = True
# ######################################################################################################################
# Extract parameters from file and build global ParameterSet
# ======================================================================================================================
params_file = '../parameters/single_neuron_patterned_synaptic_input.py'
parameter_set = ParameterSpace(params_file)[0]
parameter_set = parameter_set.clean(termination='pars')
if not isinstance(parameter_set, ParameterSet):
if isinstance(parameter_set, basestring) or isinstance(parameter_set, dict):
parameter_set = ParameterSet(parameter_set)
else:
raise TypeError("parameter_set must be ParameterSet, string with full path to parameter file or dictionary")
# ######################################################################################################################
# Setup extra variables and parameters
# ======================================================================================================================
if plot:
set_global_rcParams(parameter_set.kernel_pars['mpl_path'])
paths = set_storage_locations(parameter_set, save)
np.random.seed(parameter_set.kernel_pars['np_seed'])
results = dict()
# ######################################################################################################################
# Set kernel and simulation parameters
# ======================================================================================================================
print('\nRuning ParameterSet {0}'.format(parameter_set.label))
nest.ResetKernel()
nest.set_verbosity('M_WARNING')
nest.SetKernelStatus(extract_nestvalid_dict(parameter_set.kernel_pars.as_dict(), param_type='kernel'))
# ######################################################################################################################
# Build network
# ======================================================================================================================
net = Network(parameter_set.net_pars)
# ######################################################################################################################
# Randomize initial variable values
# ======================================================================================================================
for idx, n in enumerate(list(iterate_obj_list(net.populations))):
if hasattr(parameter_set.net_pars, "randomize_neuron_pars"):
randomize = parameter_set.net_pars.randomize_neuron_pars[idx]
for k, v in randomize.items():
n.randomize_initial_states(k, randomization_function=v[0], **v[1])
########################################################################################################################
# Build Input Signal Sets
# ======================================================================================================================
assert hasattr(parameter_set, "input_pars")
# Current input (need to build 2 separate noise signals for the 2 input channels)
total_stimulation_time = parameter_set.kernel_pars.sim_time + parameter_set.kernel_pars.transient_t
input_noise_ch1 = InputNoise(parameter_set.input_pars.noise, stop_time=total_stimulation_time)
input_noise_ch1.generate()
input_noise_ch1.re_seed(parameter_set.kernel_pars.np_seed)
input_noise_ch2 = InputNoise(parameter_set.input_pars.noise, stop_time=total_stimulation_time)
input_noise_ch2.generate()
input_noise_ch2.re_seed(parameter_set.kernel_pars.np_seed)
if plot:
inp_plot = InputPlots(stim_obj=None, input_obj=None, noise_obj=input_noise_ch1)
inp_plot.plot_noise_component(display=display, save=paths['figures'] + "/InputNoise_CH1")
inp_plot = InputPlots(stim_obj=None, input_obj=None, noise_obj=input_noise_ch2)
inp_plot.plot_noise_component(display=display, save=paths['figures'] + "/InputNoise_CH2")
# ######################################################################################################################
# Build and connect input
# ======================================================================================================================
enc_layer_ch1 = EncodingLayer(parameter_set.encoding_ch1_pars, signal=input_noise_ch1)
enc_layer_ch1.connect(parameter_set.encoding_ch1_pars, net)
enc_layer_ch2 = EncodingLayer(parameter_set.encoding_ch2_pars, signal=input_noise_ch2)
enc_layer_ch2.connect(parameter_set.encoding_ch2_pars, net)
# ######################################################################################################################
# Connect Devices
# ======================================================================================================================
net.connect_devices()
# ######################################################################################################################
# Simulate
# ======================================================================================================================
if parameter_set.kernel_pars.transient_t:
net.simulate(parameter_set.kernel_pars.transient_t)
net.flush_records()
net.simulate(parameter_set.kernel_pars.sim_time + nest.GetKernelStatus()['resolution'])
# ######################################################################################################################
# Extract and store data
# ======================================================================================================================
net.extract_population_activity(
t_start=parameter_set.kernel_pars.transient_t, # + nest.GetKernelStatus()['resolution'],
t_stop=parameter_set.kernel_pars.sim_time + parameter_set.kernel_pars.transient_t)
net.extract_network_activity()
# ######################################################################################################################
# Analyse / plot data
# ======================================================================================================================
results = dict()
analysis_interval = [parameter_set.kernel_pars.transient_t,
parameter_set.kernel_pars.transient_t + parameter_set.kernel_pars.sim_time]
for idd, nam in enumerate(net.population_names):
results.update({nam: {}})
results[nam] = single_neuron_responses(net.populations[idd],
parameter_set, pop_idx=idd,
start=analysis_interval[0],
stop=analysis_interval[1],
plot=plot, display=display,
save=paths['figures'] + paths['label'])
if results[nam]['rate']:
print('Output Rate [{0}] = {1} spikes/s'.format(str(nam), str(results[nam]['rate'])))
# ######################################################################################################################
# Save data
# ======================================================================================================================
if save:
with open(paths['results'] + 'Results_' + parameter_set.label, 'w') as f:
pickle.dump(results, f)
parameter_set.save(paths['parameters'] + 'Parameters_' + parameter_set.label)
| gpl-2.0 | -234,421,545,666,139,140 | 53.058442 | 120 | 0.448529 | false | 5.154799 | false | false | false |
j-carpentier/nova | nova/objects/aggregate.py | 29 | 7877 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import utils as compute_utils
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class Aggregate(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: String attributes updated to support unicode
VERSION = '1.1'
fields = {
'id': fields.IntegerField(),
'name': fields.StringField(),
'hosts': fields.ListOfStringsField(nullable=True),
'metadata': fields.DictOfStringsField(nullable=True),
}
obj_extra_fields = ['availability_zone']
@staticmethod
def _from_db_object(context, aggregate, db_aggregate):
for key in aggregate.fields:
if key == 'metadata':
db_key = 'metadetails'
else:
db_key = key
aggregate[key] = db_aggregate[db_key]
aggregate._context = context
aggregate.obj_reset_changes()
return aggregate
def _assert_no_hosts(self, action):
if 'hosts' in self.obj_what_changed():
raise exception.ObjectActionError(
action=action,
reason='hosts updated inline')
@base.remotable_classmethod
def get_by_id(cls, context, aggregate_id):
db_aggregate = db.aggregate_get(context, aggregate_id)
return cls._from_db_object(context, cls(), db_aggregate)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
self._assert_no_hosts('create')
updates = self.obj_get_changes()
payload = dict(updates)
if 'metadata' in updates:
# NOTE(danms): For some reason the notification format is weird
payload['meta_data'] = payload.pop('metadata')
compute_utils.notify_about_aggregate_update(self._context,
"create.start",
payload)
metadata = updates.pop('metadata', None)
db_aggregate = db.aggregate_create(self._context, updates,
metadata=metadata)
self._from_db_object(self._context, self, db_aggregate)
payload['aggregate_id'] = self.id
compute_utils.notify_about_aggregate_update(self._context,
"create.end",
payload)
@base.remotable
def save(self):
self._assert_no_hosts('save')
updates = self.obj_get_changes()
payload = {'aggregate_id': self.id}
if 'metadata' in updates:
payload['meta_data'] = updates['metadata']
compute_utils.notify_about_aggregate_update(self._context,
"updateprop.start",
payload)
updates.pop('id', None)
db_aggregate = db.aggregate_update(self._context, self.id, updates)
compute_utils.notify_about_aggregate_update(self._context,
"updateprop.end",
payload)
self._from_db_object(self._context, self, db_aggregate)
@base.remotable
def update_metadata(self, updates):
payload = {'aggregate_id': self.id,
'meta_data': updates}
compute_utils.notify_about_aggregate_update(self._context,
"updatemetadata.start",
payload)
to_add = {}
for key, value in updates.items():
if value is None:
try:
db.aggregate_metadata_delete(self._context, self.id, key)
except exception.AggregateMetadataNotFound:
pass
try:
self.metadata.pop(key)
except KeyError:
pass
else:
to_add[key] = value
self.metadata[key] = value
db.aggregate_metadata_add(self._context, self.id, to_add)
compute_utils.notify_about_aggregate_update(self._context,
"updatemetadata.end",
payload)
self.obj_reset_changes(fields=['metadata'])
@base.remotable
def destroy(self):
db.aggregate_delete(self._context, self.id)
@base.remotable
def add_host(self, host):
db.aggregate_host_add(self._context, self.id, host)
if self.hosts is None:
self.hosts = []
self.hosts.append(host)
self.obj_reset_changes(fields=['hosts'])
@base.remotable
def delete_host(self, host):
db.aggregate_host_delete(self._context, self.id, host)
self.hosts.remove(host)
self.obj_reset_changes(fields=['hosts'])
@property
def availability_zone(self):
return self.metadata.get('availability_zone', None)
@base.NovaObjectRegistry.register
class AggregateList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added key argument to get_by_host()
# Aggregate <= version 1.1
# Version 1.2: Added get_by_metadata_key
VERSION = '1.2'
fields = {
'objects': fields.ListOfObjectsField('Aggregate'),
}
# NOTE(danms): Aggregate was at 1.1 before we added this
obj_relationships = {
'objects': [('1.0', '1.1'), ('1.1', '1.1'), ('1.2', '1.1')],
}
@classmethod
def _filter_db_aggregates(cls, db_aggregates, hosts):
if not isinstance(hosts, set):
hosts = set(hosts)
filtered_aggregates = []
for db_aggregate in db_aggregates:
for host in db_aggregate['hosts']:
if host in hosts:
filtered_aggregates.append(db_aggregate)
break
return filtered_aggregates
@base.remotable_classmethod
def get_all(cls, context):
db_aggregates = db.aggregate_get_all(context)
return base.obj_make_list(context, cls(context), objects.Aggregate,
db_aggregates)
@base.remotable_classmethod
def get_by_host(cls, context, host, key=None):
db_aggregates = db.aggregate_get_by_host(context, host, key=key)
return base.obj_make_list(context, cls(context), objects.Aggregate,
db_aggregates)
@base.remotable_classmethod
def get_by_metadata_key(cls, context, key, hosts=None):
db_aggregates = db.aggregate_get_by_metadata_key(context, key=key)
if hosts is not None:
db_aggregates = cls._filter_db_aggregates(db_aggregates, hosts)
return base.obj_make_list(context, cls(context), objects.Aggregate,
db_aggregates)
| apache-2.0 | -245,690,764,181,820,060 | 38.189055 | 78 | 0.563666 | false | 4.373681 | false | false | false |
poifra/Creamuspython2 | test.py | 1 | 1509 |
from Chordbook import transpose, durations
from Sequencer import Sequence, Note
from Synth import ClassicSynth, BassWalkSynth, PianoSynth
from random import choice
from itertools import cycle
from pyo import *
s = Server().boot()
noteCount = 1
totalCount = 0
tempo = 70
chords = cycle(
[
transpose(target='m7',key='D',octave=5),
transpose(target='7',key='G',octave=5),
transpose(target='maj7', key='C',octave=5)
]
)
chordName = cycle(['Dm7','G7','CMaj7'])
currentChord = next(chords)
currentChordName = next(chordName)
duree = durations['half']
realNotes = [Note(n, duree) for n in currentChord]
seqs = [Sequence([n],tempo) for n in realNotes]
for seq in seqs:
seq.play()
synths = [PianoSynth(seq) for seq in seqs]
for syn in synths:
syn.get_out().out()
def changeChord():
global currentChord, seqs, synths
global noteCount, currentChordName, totalCount
if noteCount > 4:
print "changing chord"
noteCount = 1
currentChord = next(chords)
currentChordName = next(chordName)
newNotes = [Note(n, duree) for n in currentChord]
for seq in seqs:
if seq.isPlaying():
seq.stop()
seqs = [Sequence([n],tempo) for n in newNotes]
synths = [PianoSynth(seq) for seq in seqs]
for seq in seqs:
if not(seq.isPlaying()):
seq.play()
for syn in synths:
syn.get_out().out()
print "Current="+currentChordName+" Total count="+str(totalCount)
noteCount += 1
totalCount += 1
pat = Pattern(changeChord, time=60/(tempo / durations['quarter'] / 4))
pat.play()
s.gui(locals())
| mit | 6,184,060,515,430,927,000 | 22.578125 | 70 | 0.700464 | false | 2.680284 | false | false | false |
agarwal-karan/mongo-connector | mongo_connector/doc_managers/elastic_doc_manager.py | 1 | 9182 | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Elasticsearch implementation of the DocManager interface.
Receives documents from an OplogThread and takes the appropriate actions on
Elasticsearch.
"""
import logging
from threading import Timer
import bson.json_util
from elasticsearch import Elasticsearch, exceptions as es_exceptions
from elasticsearch.helpers import scan, streaming_bulk
from mongo_connector import errors
from mongo_connector.constants import (DEFAULT_COMMIT_INTERVAL,
DEFAULT_MAX_BULK)
from mongo_connector.util import retry_until_ok
from mongo_connector.doc_managers import DocManagerBase, exception_wrapper
from mongo_connector.doc_managers.formatters import DefaultDocumentFormatter
wrap_exceptions = exception_wrapper({
es_exceptions.ConnectionError: errors.ConnectionFailed,
es_exceptions.TransportError: errors.OperationFailed})
class DocManager(DocManagerBase):
"""Elasticsearch implementation of the DocManager interface.
Receives documents from an OplogThread and takes the appropriate actions on
Elasticsearch.
"""
def __init__(self, url, auto_commit_interval=DEFAULT_COMMIT_INTERVAL,
unique_key='_id', chunk_size=DEFAULT_MAX_BULK,
meta_index_name="mongodb_meta", meta_type="mongodb_meta",
**kwargs):
self.elastic = Elasticsearch(hosts=[url])
self.auto_commit_interval = auto_commit_interval
self.doc_type = 'string' # default type is string, change if needed
self.meta_index_name = meta_index_name
self.meta_type = meta_type
self.unique_key = unique_key
self.chunk_size = chunk_size
if self.auto_commit_interval not in [None, 0]:
self.run_auto_commit()
self._formatter = DefaultDocumentFormatter()
def stop(self):
"""Stop the auto-commit thread."""
self.auto_commit_interval = None
def apply_update(self, doc, update_spec):
if "$set" not in update_spec and "$unset" not in update_spec:
# Don't try to add ns and _ts fields back in from doc
return update_spec
return super(DocManager, self).apply_update(doc, update_spec)
@wrap_exceptions
def update(self, doc, update_spec):
"""Apply updates given in update_spec to the document whose id
matches that of doc.
"""
document = self.elastic.get(index=doc['ns'],
id=str(doc['_id']))
updated = self.apply_update(document['_source'], update_spec)
# _id is immutable in MongoDB, so won't have changed in update
updated['_id'] = document['_id']
# Add metadata fields back into updated, for the purposes of
# calling upsert(). Need to do this until these become separate
# arguments in 2.x
updated['ns'] = doc['ns']
updated['_ts'] = doc['_ts']
self.upsert(updated)
# upsert() strips metadata, so only _id + fields in _source still here
return updated
@wrap_exceptions
def upsert(self, doc):
"""Insert a document into Elasticsearch."""
doc_type = self.doc_type
index = doc.pop('ns')
# No need to duplicate '_id' in source document
doc_id = str(doc.pop("_id"))
metadata = {
"ns": index,
"_ts": doc.pop("_ts")
}
# Index the source document
self.elastic.index(index=index, doc_type=doc_type,
body=self._formatter.format_document(doc), id=doc_id,
refresh=(self.auto_commit_interval == 0))
# Index document metadata
self.elastic.index(index=self.meta_index_name, doc_type=self.meta_type,
body=bson.json_util.dumps(metadata), id=doc_id,
refresh=(self.auto_commit_interval == 0))
# Leave _id, since it's part of the original document
doc['_id'] = doc_id
@wrap_exceptions
def bulk_upsert(self, docs):
"""Insert multiple documents into Elasticsearch."""
def docs_to_upsert():
doc = None
for doc in docs:
# Remove metadata and redundant _id
index = doc.pop("ns")
doc_id = str(doc.pop("_id"))
timestamp = doc.pop("_ts")
document_action = {
"_index": index,
"_type": self.doc_type,
"_id": doc_id,
"_source": self._formatter.format_document(doc)
}
document_meta = {
"_index": self.meta_index_name,
"_type": self.meta_type,
"_id": doc_id,
"_source": {
"_ns": index,
"ts": timestamp
}
}
yield document_action
yield document_meta
if not doc:
raise errors.EmptyDocsError(
"Cannot upsert an empty sequence of "
"documents into Elastic Search")
try:
kw = {}
if self.chunk_size > 0:
kw['chunk_size'] = self.chunk_size
responses = streaming_bulk(client=self.elastic,
actions=docs_to_upsert(),
**kw)
for ok, resp in responses:
if not ok:
logging.error(
"Could not bulk-upsert document "
"into ElasticSearch: %r" % resp)
if self.auto_commit_interval == 0:
self.commit()
except errors.EmptyDocsError:
# This can happen when mongo-connector starts up, there is no
# config file, but nothing to dump
pass
@wrap_exceptions
def remove(self, doc):
"""Remove a document from Elasticsearch."""
# self.elastic.delete(index=doc['ns'], doc_type=self.doc_type,
# id=str(doc["_id"]),
# refresh=(self.auto_commit_interval == 0))
# self.elastic.delete(index=self.meta_index_name, doc_type=self.meta_type,
# id=str(doc["_id"]),
# refresh=(self.auto_commit_interval == 0))
pass
@wrap_exceptions
def _stream_search(self, *args, **kwargs):
"""Helper method for iterating over ES search results."""
for hit in scan(self.elastic, query=kwargs.pop('body', None),
scroll='10m', **kwargs):
hit['_source']['_id'] = hit['_id']
yield hit['_source']
def search(self, start_ts, end_ts):
"""Query Elasticsearch for documents in a time range.
This method is used to find documents that may be in conflict during
a rollback event in MongoDB.
"""
return self._stream_search(
index=self.meta_index_name,
body={
"query": {
"filtered": {
"filter": {
"range": {
"_ts": {"gte": start_ts, "lte": end_ts}
}
}
}
}
})
def commit(self):
"""Refresh all Elasticsearch indexes."""
retry_until_ok(self.elastic.indices.refresh, index="")
def run_auto_commit(self):
"""Periodically commit to the Elastic server."""
self.elastic.indices.refresh()
if self.auto_commit_interval not in [None, 0]:
Timer(self.auto_commit_interval, self.run_auto_commit).start()
@wrap_exceptions
def get_last_doc(self):
"""Get the most recently modified document from Elasticsearch.
This method is used to help define a time window within which documents
may be in conflict after a MongoDB rollback.
"""
try:
result = self.elastic.search(
index=self.meta_index_name,
body={
"query": {"match_all": {}},
"sort": [{"_ts": "desc"}],
},
size=1
)["hits"]["hits"]
for r in result:
r['_source']['_id'] = r['_id']
return r['_source']
except es_exceptions.RequestError:
# no documents so ES returns 400 because of undefined _ts mapping
return None
| apache-2.0 | -7,876,939,161,302,815,000 | 37.742616 | 82 | 0.549771 | false | 4.485589 | false | false | false |
LA-Toth/sitegen | sitegen/siteloader/pages.py | 1 | 3137 | import os
import markdown
from sitegen.siteloader.base import FinalHtmlAction, FSDependencyObserver
from sitegen.siteloader.dependency import Action
class MarkdownObserver(FSDependencyObserver):
def notify(self, directory: str, entry: str):
is_md = entry.endswith('.md')
is_html = entry.endswith('.html')
if is_md or is_html:
name = os.path.splitext(entry)[0]
path = os.path.join(directory, entry)
name_path = os.path.join(directory, name)
sub_path_items = name_path.split(os.path.sep)[1:]
build_target_path = os.sep.join(['_build'] + sub_path_items) + '.middle'
yaml_target_path = build_target_path + '.yml'
install_target_path = os.sep.join(['_install'] + sub_path_items) + '.html'
action_class = MarkdownAction if is_md else HtmlAction
self._dependency_collector.add_site_dependency([install_target_path])
self._dependency_collector.add_dependency(install_target_path,
[build_target_path, yaml_target_path],
FinalHtmlAction)
self._dependency_collector.add_dependency(build_target_path, [path], action_class)
class _PageAction(Action):
max_deps_count = 1
def __get_input_text(self, path: str):
with open(path, 'rt') as f:
input_text = f.read()
lines = input_text.splitlines()
if lines[0] == '--':
end = lines[1:].index('--') + 1
yaml_text = '\n'.join(lines[1:end])
input_text = '\n'.join(lines[(end + 2):])
else:
yaml_text = "title: " + os.path.basename(path).rsplit('.', 1)[0]
return input_text, yaml_text
def run(self):
path, target_path, yaml_target_path = self.__get_full_paths()
if not os.path.exists(os.path.dirname(target_path)):
os.makedirs(os.path.dirname(target_path))
print("Compiling", self.target_path)
input_text, yaml_text = self.__get_input_text(path)
output_text = self._format_text(input_text)
self.__write_output_files(output_text, target_path, yaml_target_path, yaml_text)
def __get_full_paths(self):
path = os.path.join(self._site_root, self.dependencies[0])
target_path = os.path.join(self._site_root, self.target_path)
yaml_target_path = target_path + '.yml'
return path, target_path, yaml_target_path
def _format_text(self, input_text: str):
raise NotImplementedError("Cannot generate output text")
def __write_output_files(self, output_text, target_path, yaml_target_path, yaml_text):
with open(target_path, 'wt') as f:
f.write(output_text)
with open(yaml_target_path, 'wt') as f:
f.write(yaml_text)
class MarkdownAction(_PageAction):
def _format_text(self, input_text: str):
return markdown.markdown(input_text, output_format='html5')
class HtmlAction(_PageAction):
def _format_text(self, input_text: str):
return input_text
| gpl-3.0 | -8,737,573,106,826,761,000 | 34.247191 | 94 | 0.597386 | false | 3.66472 | false | false | false |