signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def consistent_with ( self , state ) :
"""Indicate that the query should be consistent with one or more
mutations .
: param state : The state of the mutations it should be consistent
with .
: type state : : class : ` ~ . couchbase . mutation _ state . MutationState `""" | if self . consistency not in ( UNBOUNDED , NOT_BOUNDED , 'at_plus' ) :
raise TypeError ( 'consistent_with not valid with other consistency options' )
if not state :
raise TypeError ( 'Passed empty or invalid state' , state )
self . consistency = 'at_plus'
self . _body [ 'scan_vectors' ] = state . _sv |
def expire ( self , key , timeout ) :
"""Set a timeout on key .
if timeout is float it will be multiplied by 1000
coerced to int and passed to ` pexpire ` method .
Otherwise raises TypeError if timeout argument is not int .""" | if isinstance ( timeout , float ) :
return self . pexpire ( key , int ( timeout * 1000 ) )
if not isinstance ( timeout , int ) :
raise TypeError ( "timeout argument must be int, not {!r}" . format ( timeout ) )
fut = self . execute ( b'EXPIRE' , key , timeout )
return wait_convert ( fut , bool ) |
def convert_text_to_rouge_format ( text , title = "dummy title" ) :
"""Convert a text to a format ROUGE understands . The text is
assumed to contain one sentence per line .
text : The text to convert , containg one sentence per line .
title : Optional title for the text . The title will appear
in the converted file , but doesn ' t seem to have
any other relevance .
Returns : The converted text as string .""" | sentences = text . split ( "\n" )
sent_elems = [ "<a name=\"{i}\">[{i}]</a> <a href=\"#{i}\" id={i}>" "{text}</a>" . format ( i = i , text = sent ) for i , sent in enumerate ( sentences , start = 1 ) ]
html = """<html>
<head>
<title>{title}</title>
</head>
<body bgcolor="white">
{elems}
</body>
</html>""" . format ( title = title , elems = "\n" . join ( sent_elems ) )
return html |
def __validate1 ( property ) :
"""Exit with error if property is not valid .""" | assert isinstance ( property , Property )
msg = None
if not property . feature . free :
feature . validate_value_string ( property . feature , property . value ) |
def main ( ) :
"""The main entry point of the program""" | # Parse command line arguments
argp = _cli_argument_parser ( )
args = argp . parse_args ( )
# setup logging
logging . basicConfig ( level = args . loglevel , format = "%(levelname)s %(message)s" )
console . display ( "Collecting documentation from files" )
collector_metrics = metrics . Metrics ( )
docs = collector . parse ( args . path , args . trace_parser , metrics = collector_metrics )
collector_metrics . display ( )
console . display ( "Rendering documentation" )
try :
if args . output :
template = renderer . template_from_filename ( args . output )
else :
template = "json"
out = renderer . render ( docs , template )
except ValueError as err :
logging . error ( err )
sys . exit ( 1 )
except TemplateNotFound as err :
logging . error ( "Template `{}` not found. Available templates are: {}" . format ( err . name , renderer . list_templates ( ) ) )
sys . exit ( 1 )
if not args . output :
print ( out )
else :
console . display ( "Writing documentation to" , args . output )
with io . open ( args . output , "w" , encoding = "utf-8" ) as fp :
fp . write ( out ) |
def get_scan_results_xml ( self , scan_id , pop_res ) :
"""Gets scan _ id scan ' s results in XML format .
@ return : String of scan results in xml .""" | results = Element ( 'results' )
for result in self . scan_collection . results_iterator ( scan_id , pop_res ) :
results . append ( get_result_xml ( result ) )
logger . info ( 'Returning %d results' , len ( results ) )
return results |
def format_py3o_val ( value ) :
"""format a value to fit py3o ' s context
* Handle linebreaks""" | value = force_unicode ( value )
value = escape ( value )
value = value . replace ( u'\n' , u'<text:line-break/>' )
return Markup ( value ) |
def locateChild ( self , context , segments ) :
"""Delegate dispatch to a sharing resource if the request is for a user
subdomain , otherwise fall back to the wrapped resource ' s C { locateChild }
implementation .""" | request = IRequest ( context )
hostname = request . getHeader ( 'host' )
info = self . subdomain ( hostname )
if info is not None :
username , domain = info
index = UserIndexPage ( IRealm ( self . siteStore ) , self . webViewer )
resource = index . locateChild ( None , [ username ] ) [ 0 ]
return resource , segments
return self . wrapped . locateChild ( context , segments ) |
def sort_with_heap ( sequence ) :
"""The function inserts values into a heap structure and successively removes and returns the smallest elements .
Example :
sort _ with _ heap ( [ 1 , 3 , 5 , 7 , 9 , 2 , 4 , 6 , 8 , 0 ] )
Returns : [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9]
sort _ with _ heap ( [ 25 , 35 , 22 , 85 , 14 , 65 , 75 , 25 , 58 ] )
Returns : [ 14 , 22 , 25 , 25 , 35 , 58 , 65 , 75 , 85]
sort _ with _ heap ( [ 7 , 1 , 9 , 5 ] )
Returns : [ 1 , 5 , 7 , 9]""" | import heapq as hq
heap = [ ]
for item in sequence :
hq . heappush ( heap , item )
return [ hq . heappop ( heap ) for _ in range ( len ( heap ) ) ] |
def env_int ( name , required = False , default = empty ) :
"""Pulls an environment variable out of the environment and casts it to an
integer . If the name is not present in the environment and no default is
specified then a ` ` ValueError ` ` will be raised . Similarly , if the
environment value is not castable to an integer , a ` ` ValueError ` ` will be
raised .
: param name : The name of the environment variable be pulled
: type name : str
: param required : Whether the environment variable is required . If ` ` True ` `
and the variable is not present , a ` ` KeyError ` ` is raised .
: type required : bool
: param default : The value to return if the environment variable is not
present . ( Providing a default alongside setting ` ` required = True ` ` will raise
a ` ` ValueError ` ` )
: type default : bool""" | value = get_env_value ( name , required = required , default = default )
if value is empty :
raise ValueError ( "`env_int` requires either a default value to be specified, or for " "the variable to be present in the environment" )
return int ( value ) |
def _kernel_shape ( self , input_shape ) :
"""Helper to calculate the kernel shape .""" | kernel_size_iter = iter ( self . _kernel_size )
return [ self . _filters if c == 'O' else input_shape [ self . _lhs_spec . index ( 'C' ) ] if c == 'I' else next ( kernel_size_iter ) for c in self . _rhs_spec ] |
def list_build_records_for_set ( id = None , name = None , page_size = 200 , page_index = 0 , sort = "" , q = "" ) :
"""List all build records for a BuildConfigurationSet""" | content = list_build_records_for_set_raw ( id , name , page_size , page_index , sort , q )
if content :
return utils . format_json_list ( content ) |
def create_pipeline ( self , onetime = None ) :
"""Create the spinnaker pipeline ( s ) .""" | utils . banner ( "Creating Pipeline" )
kwargs = { 'app' : self . app , 'trigger_job' : self . trigger_job , 'prop_path' : self . json_path , 'base' : None , 'runway_dir' : self . runway_dir , }
pipeline_type = self . configs [ 'pipeline' ] [ 'type' ]
if pipeline_type not in consts . ALLOWED_TYPES :
raise NotImplementedError ( 'Pipeline type "{0}" not permitted.' . format ( pipeline_type ) )
if not onetime :
if pipeline_type == 'lambda' :
spinnakerpipeline = pipeline . SpinnakerPipelineLambda ( ** kwargs )
elif pipeline_type == 's3' :
spinnakerpipeline = pipeline . SpinnakerPipelineS3 ( ** kwargs )
elif pipeline_type == 'datapipeline' :
spinnakerpipeline = pipeline . SpinnakerPipelineDataPipeline ( ** kwargs )
elif pipeline_type == 'manual' :
spinnakerpipeline = pipeline . SpinnakerPipelineManual ( ** kwargs )
else : # Handles all other pipelines
spinnakerpipeline = pipeline . SpinnakerPipeline ( ** kwargs )
else :
spinnakerpipeline = pipeline . SpinnakerPipelineOnetime ( onetime = onetime , ** kwargs )
spinnakerpipeline . create_pipeline ( ) |
def OnLineWidth ( self , event ) :
"""Line width choice event handler""" | linewidth_combobox = event . GetEventObject ( )
idx = event . GetInt ( )
width = int ( linewidth_combobox . GetString ( idx ) )
borders = self . bordermap [ self . borderstate ]
post_command_event ( self , self . BorderWidthMsg , width = width , borders = borders ) |
def simple_profile ( self , annual_demand , ** kwargs ) :
"""Create industrial load profile
Parameters
annual _ demand : float
Total demand .
Other Parameters
am : datetime . time
beginning of workday
pm : datetime . time
end of workday
week : list
list of weekdays
weekend : list
list of weekend days
profile _ factors : dictionary
dictionary with scaling factors for night and day of weekdays and
weekend days""" | # Day ( am to pm ) , night ( pm to am ) , week day ( week ) ,
# weekend day ( weekend )
am = kwargs . get ( 'am' , settime ( 7 , 00 , 0 ) )
pm = kwargs . get ( 'pm' , settime ( 23 , 30 , 0 ) )
week = kwargs . get ( 'week' , [ 1 , 2 , 3 , 4 , 5 ] )
weekend = kwargs . get ( 'weekend' , [ 0 , 6 , 7 ] )
default_factors = { 'week' : { 'day' : 0.8 , 'night' : 0.6 } , 'weekend' : { 'day' : 0.9 , 'night' : 0.7 } }
profile_factors = kwargs . get ( 'profile_factors' , default_factors )
self . dataframe [ 'ind' ] = 0
self . dataframe [ 'ind' ] . mask ( self . dataframe [ 'weekday' ] . between_time ( am , pm ) . isin ( week ) , profile_factors [ 'week' ] [ 'day' ] , True )
self . dataframe [ 'ind' ] . mask ( self . dataframe [ 'weekday' ] . between_time ( pm , am ) . isin ( week ) , profile_factors [ 'week' ] [ 'night' ] , True )
self . dataframe [ 'ind' ] . mask ( self . dataframe [ 'weekday' ] . between_time ( am , pm ) . isin ( weekend ) , profile_factors [ 'weekend' ] [ 'day' ] , True )
self . dataframe [ 'ind' ] . mask ( self . dataframe [ 'weekday' ] . between_time ( pm , am ) . isin ( weekend ) , profile_factors [ 'weekend' ] [ 'night' ] , True )
if self . dataframe [ 'ind' ] . isnull ( ) . any ( axis = 0 ) :
logging . error ( 'NAN value found in industrial load profile' )
time_interval = self . dataframe . index . freq . nanos / 3.6e12
return ( self . dataframe [ 'ind' ] / self . dataframe [ 'ind' ] . sum ( ) * annual_demand / time_interval ) |
def assert_has_attr ( obj , attribute , msg_fmt = "{msg}" ) :
"""Fail is an object does not have an attribute .
> > > assert _ has _ attr ( [ ] , " index " )
> > > assert _ has _ attr ( [ ] , " i _ do _ not _ have _ this " )
Traceback ( most recent call last ) :
AssertionError : [ ] does not have attribute ' i _ do _ not _ have _ this '
The following msg _ fmt arguments are supported :
* msg - the default error message
* obj - object to test
* attribute - name of the attribute to check""" | if not hasattr ( obj , attribute ) :
msg = "{!r} does not have attribute '{}'" . format ( obj , attribute )
fail ( msg_fmt . format ( msg = msg , obj = obj , attribute = attribute ) ) |
def save_configuration_to_hdf5 ( register , configuration_file , name = '' ) :
'''Saving configuration to HDF5 file from register object
Parameters
register : pybar . fei4 . register object
configuration _ file : string , file
Filename of the HDF5 configuration file or file object .
name : string
Additional identifier ( subgroup ) . Useful when storing more than one configuration inside a HDF5 file .''' | def save_conf ( ) :
logging . info ( "Saving configuration: %s" % h5_file . filename )
register . configuration_file = h5_file . filename
try :
configuration_group = h5_file . create_group ( h5_file . root , "configuration" )
except tb . NodeError :
configuration_group = h5_file . root . configuration
if name :
try :
configuration_group = h5_file . create_group ( configuration_group , name )
except tb . NodeError :
configuration_group = h5_file . root . configuration . name
# calibration _ parameters
try :
h5_file . remove_node ( configuration_group , name = 'calibration_parameters' )
except tb . NodeError :
pass
calibration_data_table = h5_file . create_table ( configuration_group , name = 'calibration_parameters' , description = NameValue , title = 'calibration_parameters' )
calibration_data_row = calibration_data_table . row
for key , value in register . calibration_parameters . iteritems ( ) :
calibration_data_row [ 'name' ] = key
calibration_data_row [ 'value' ] = str ( value )
calibration_data_row . append ( )
calibration_data_table . flush ( )
# miscellaneous
try :
h5_file . remove_node ( configuration_group , name = 'miscellaneous' )
except tb . NodeError :
pass
miscellaneous_data_table = h5_file . create_table ( configuration_group , name = 'miscellaneous' , description = NameValue , title = 'miscellaneous' )
miscellaneous_data_row = miscellaneous_data_table . row
miscellaneous_data_row [ 'name' ] = 'Flavor'
miscellaneous_data_row [ 'value' ] = register . flavor
miscellaneous_data_row . append ( )
miscellaneous_data_row [ 'name' ] = 'Chip_ID'
miscellaneous_data_row [ 'value' ] = register . chip_id
miscellaneous_data_row . append ( )
for key , value in register . miscellaneous . iteritems ( ) :
miscellaneous_data_row [ 'name' ] = key
miscellaneous_data_row [ 'value' ] = value
miscellaneous_data_row . append ( )
miscellaneous_data_table . flush ( )
# global
try :
h5_file . remove_node ( configuration_group , name = 'global_register' )
except tb . NodeError :
pass
global_data_table = h5_file . create_table ( configuration_group , name = 'global_register' , description = NameValue , title = 'global_register' )
global_data_table_row = global_data_table . row
global_regs = register . get_global_register_objects ( readonly = False )
for global_reg in sorted ( global_regs , key = itemgetter ( 'name' ) ) :
global_data_table_row [ 'name' ] = global_reg [ 'name' ]
global_data_table_row [ 'value' ] = global_reg [ 'value' ]
# TODO : some function that converts to bin , hex
global_data_table_row . append ( )
global_data_table . flush ( )
# pixel
for pixel_reg in register . pixel_registers . itervalues ( ) :
try :
h5_file . remove_node ( configuration_group , name = pixel_reg [ 'name' ] )
except tb . NodeError :
pass
data = pixel_reg [ 'value' ] . T
atom = tb . Atom . from_dtype ( data . dtype )
ds = h5_file . create_carray ( configuration_group , name = pixel_reg [ 'name' ] , atom = atom , shape = data . shape , title = pixel_reg [ 'name' ] )
ds [ : ] = data
if isinstance ( configuration_file , tb . file . File ) :
h5_file = configuration_file
save_conf ( )
else :
with tb . open_file ( configuration_file , mode = "a" , title = '' ) as h5_file :
save_conf ( ) |
def yesno ( message , default = 'yes' , suffix = ' ' ) :
"""Prompt user to answer yes or no . Return True if the default is chosen ,
otherwise False .""" | if default == 'yes' :
yesno_prompt = '[Y/n]'
elif default == 'no' :
yesno_prompt = '[y/N]'
else :
raise ValueError ( "default must be 'yes' or 'no'." )
if message != '' :
prompt_text = "{0} {1}{2}" . format ( message , yesno_prompt , suffix )
else :
prompt_text = "{0}{1}" . format ( yesno_prompt , suffix )
while True :
response = get_input ( prompt_text ) . strip ( )
if response == '' :
return True
else :
if re . match ( '^(y)(es)?$' , response , re . IGNORECASE ) :
if default == 'yes' :
return True
else :
return False
elif re . match ( '^(n)(o)?$' , response , re . IGNORECASE ) :
if default == 'no' :
return True
else :
return False |
def requirements ( self , requires ) :
'''Sets the requirements for the package .
It will take either a valid path to a requirements file or
a list of requirements .''' | if requires :
if isinstance ( requires , basestring ) and os . path . isfile ( os . path . abspath ( requires ) ) :
self . _requirements_file = os . path . abspath ( requires )
else :
if isinstance ( self . _requirements , basestring ) :
requires = requires . split ( )
self . _requirements_file = None
self . _requirements = requires
else : # If the default requirements file is found use that
if os . path . isfile ( self . _requirements_file ) :
return
self . _requirements , self . _requirements_file = None , None |
async def send_notification ( self , method , args = ( ) ) :
'''Send an RPC notification over the network .''' | message = self . connection . send_notification ( Notification ( method , args ) )
await self . _send_message ( message ) |
def _create_activity2 ( self , parent , name , activity_type = ActivityType . TASK ) :
"""Create a new activity .
. . important : :
This function creates activities for KE - chain versions later than 2.9.0-135
In effect where the module ' wim ' has version ' > = 2.0.0 ' .
The version of ' wim ' in KE - chain can be found in the property : attr : ` Client . app _ versions `
In WIM2 the type of the activity is called activity _ type
: param parent : parent under which to create the activity
: type parent : basestring or : class : ` models . Activity2 `
: param name : new activity name
: type name : basestring
: param activity _ type : type of activity : TASK ( default ) or PROCESS
: type activity _ type : basestring
: return : the created : class : ` models . Activity2 `
: raises APIError : When the object could not be created
: raises IllegalArgumentError : When an incorrect activitytype or parent is provided""" | # WIM1 : activity _ class , WIM2 : activity _ type
if self . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) :
raise APIError ( 'This method is only compatible with versions of KE-chain where the internal `wim` module ' 'has a version >=2.0.0. Use the `Client.create_activity()` method.' )
if activity_type and activity_type not in ActivityType . values ( ) :
raise IllegalArgumentError ( "Please provide accepted activity_type (provided:{} accepted:{})" . format ( activity_type , ActivityType . values ( ) ) )
if isinstance ( parent , ( Activity , Activity2 ) ) :
parent = parent . id
elif is_uuid ( parent ) :
parent = parent
else :
raise IllegalArgumentError ( "Please provide either an activity object or a UUID" )
data = { "name" : name , "parent_id" : parent , "activity_type" : activity_type }
response = self . _request ( 'POST' , self . _build_url ( 'activities' ) , data = data , params = API_EXTRA_PARAMS [ 'activities' ] )
if response . status_code != requests . codes . created : # pragma : no cover
raise APIError ( "Could not create activity" )
data = response . json ( )
return Activity2 ( data [ 'results' ] [ 0 ] , client = self ) |
def post ( self , request , bot_id , format = None ) :
"""Add a new hook
serializer : HookSerializer
responseMessages :
- code : 401
message : Not authenticated
- code : 400
message : Not valid request""" | return super ( HookList , self ) . post ( request , bot_id , format ) |
async def wait_tasks ( tasks , flatten = True ) :
'''Gather a list of asynchronous tasks and wait their completion .
: param list tasks :
A list of * asyncio * tasks wrapped in : func : ` asyncio . ensure _ future ` .
: param bool flatten :
If ` ` True ` ` the returned results are flattened into one list if the
tasks return iterable objects . The parameter does nothing if all the
results are not iterable .
: returns :
The results of tasks as a list or as a flattened list''' | rets = await asyncio . gather ( * tasks )
if flatten and all ( map ( lambda x : hasattr ( x , '__iter__' ) , rets ) ) :
rets = list ( itertools . chain ( * rets ) )
return rets |
def get ( self , model_class , strict = True , returnDict = False , fetchOne = False , ** where ) :
'''params :
model _ class : The queried model class
strict : bool - > If True , queries are run with EQUAL ( = ) operator .
If False : Queries are run with RLIKE keyword
returnDict : bool - > Return a list if dictionaries ( field _ names : values )
fetchOne : bool - > cursor . fetchone ( ) else : cursor . fetchall ( )
where : * * kwargs for quere WHERE condition .
if where in { } : Returns all results in the table
Usage :
print ( Session ( ) . get ( Employee , id = 1 , returnDict = True ) )''' | self . typeassert ( model_class , strict , returnDict , where )
table = model_class . __name__ . lower ( )
with Session ( self . settings ) as conn :
if not where :
query = f'SELECT * FROM {table}'
else :
query = f'SELECT * FROM {table} WHERE'
index = 1
operator = '=' if strict else 'RLIKE'
for key , value in where . items ( ) :
if index == 1 :
query += " %s %s '%s' " % ( key , operator , value )
else :
query += " AND %s %s '%s' " % ( key , operator , value )
index += 1
try :
cursor = conn . cursor ( )
cursor . execute ( query )
except mysql . Error as e :
if e . errno == 1146 :
print ( f"The table {table} does not exist" )
return [ ]
else :
raise e
else :
if fetchOne :
colnames = [ d [ 0 ] for d in cursor . description ]
results = cursor . fetchone ( )
if returnDict :
return { col : val for col , val in zip ( colnames , results ) } if results else { }
return results
return self . handleResult ( cursor , returnDict ) |
def key_binding ( self , keydef , mode = 'force' ) :
"""Function decorator to register a low - level key binding .
The callback function signature is ` ` fun ( key _ state , key _ name ) ` ` where ` ` key _ state ` ` is either ` ` ' U ' ` ` for " key
up " or ` ` ' D ' ` ` for " key down " .
The keydef format is : ` ` [ Shift + ] [ Ctrl + ] [ Alt + ] [ Meta + ] < key > ` ` where ` ` < key > ` ` is either the literal character the
key produces ( ASCII or Unicode character ) , or a symbolic name ( as printed by ` ` mpv - - input - keylist ` ` ) .
To unregister the callback function , you can call its ` ` unregister _ mpv _ key _ bindings ` ` attribute : :
player = mpv . MPV ( )
@ player . key _ binding ( ' Q ' )
def binding ( state , name ) :
print ( ' blep ' )
binding . unregister _ mpv _ key _ bindings ( )
WARNING : For a single keydef only a single callback / command can be registered at the same time . If you register
a binding multiple times older bindings will be overwritten and there is a possibility of references leaking . So
don ' t do that .
BIG FAT WARNING : mpv ' s key binding mechanism is pretty powerful . This means , you essentially get arbitrary code
exectution through key bindings . This interface makes some limited effort to sanitize the keydef given in the
first parameter , but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY . If your input comes from config files , this is
completely fine - - but , if you are about to pass untrusted input into this parameter , better double - check whether
this is secure in your case .""" | def register ( fun ) :
fun . mpv_key_bindings = getattr ( fun , 'mpv_key_bindings' , [ ] ) + [ keydef ]
def unregister_all ( ) :
for keydef in fun . mpv_key_bindings :
self . unregister_key_binding ( keydef )
fun . unregister_mpv_key_bindings = unregister_all
self . register_key_binding ( keydef , fun , mode )
return fun
return register |
def FileEntryExistsByPathSpec ( self , path_spec ) :
"""Determines if a file entry for a path specification exists .
Args :
path _ spec ( PathSpec ) : a path specification .
Returns :
bool : True if the file entry exists .""" | location = getattr ( path_spec , 'location' , None )
if location is None or not location . startswith ( self . LOCATION_ROOT ) :
return False
if len ( location ) == 1 :
return True
return self . _cpio_archive_file . FileEntryExistsByPath ( location [ 1 : ] ) |
def index ( args ) :
"""% prog index database . fasta
Wrapper for ` bwa index ` . Same interface .""" | p = OptionParser ( index . __doc__ )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
dbfile , = args
check_index ( dbfile ) |
def _tot_unhandled_services_by_state ( self , state ) :
"""Generic function to get the number of unhandled problem services in the specified state
: param state : state to filter on
: type state :
: return : number of service in state * state * and which are not acknowledged problems
: rtype : int""" | return sum ( 1 for s in self . services if s . state == state and s . is_problem and not s . problem_has_been_acknowledged ) |
def _add_to_download_queue ( self , lpath , rfile ) : # type : ( Downloader , pathlib . Path ,
# blobxfer . models . azure . StorageEntity ) - > None
"""Add remote file to download queue
: param Downloader self : this
: param pathlib . Path lpath : local path
: param blobxfer . models . azure . StorageEntity rfile : remote file""" | # prepare remote file for download
dd = blobxfer . models . download . Descriptor ( lpath , rfile , self . _spec . options , self . _general_options , self . _resume )
with self . _transfer_lock :
self . _transfer_cc [ dd . entity . path ] = 0
if dd . entity . is_encrypted :
self . _dd_map [ str ( dd . final_path ) ] = dd
# add download descriptor to queue
self . _transfer_queue . put ( dd )
if self . _download_start_time is None :
with self . _transfer_lock :
if self . _download_start_time is None :
self . _download_start_time = blobxfer . util . datetime_now ( ) |
def report_many ( self , event_list , metadata = None , block = None ) :
"""Reports all the given events to Alooma by formatting them properly and
placing them in the buffer to be sent by the Sender instance
: param event _ list : A list of dicts / strings representing events
: param metadata : ( Optional ) A dict with extra metadata to be attached to
the event
: param block : ( Optional ) If True , the function will block the thread
until the event buffer has space for the event .
If False , reported events are discarded if the queue is
full . Defaults to None , which uses the global ` block `
parameter given in the ` init ` .
: return : A list with tuples , each containing a failed event
and its original index . An empty list means success""" | failed_list = [ ]
for index , event in enumerate ( event_list ) :
queued_successfully = self . report ( event , metadata , block )
if not queued_successfully :
failed_list . append ( ( index , event ) )
return failed_list |
def strip_tweet ( text , remove_url = True ) :
"""Strip tweet message .
This method removes mentions strings and urls ( optional ) .
: param text : tweet message
: type text : : class : ` str `
: param remove _ url : Remove urls . default : const : ` True ` .
: type remove _ url : : class : ` boolean `
: returns : Striped tweet message
: rtype : : class : ` str `""" | if remove_url :
text = url_pattern . sub ( '' , text )
else :
text = expand_url ( text )
text = mention_pattern . sub ( '' , text )
text = html_parser . unescape ( text )
text = text . strip ( )
return text |
def remove_foothills ( self , q_data , marked , bin_num , bin_lower , centers , foothills ) :
"""Mark points determined to be foothills as globbed , so that they are not included in
future searches . Also searches neighboring points to foothill points to determine
if they should also be considered foothills .
Args :
q _ data : Quantized data
marked : Marked
bin _ num : Current bin being searched
bin _ lower : Next bin being searched
centers : dictionary of local maxima considered to be object centers
foothills : List of foothill points being removed .""" | hills = [ ]
for foot in foothills :
center = foot [ 0 ]
hills [ : ] = foot [ 1 ] [ : ]
# remove all foothills
while len ( hills ) > 0 : # mark this point
pt = hills . pop ( - 1 )
marked [ pt ] = self . GLOBBED
for s_index , val in np . ndenumerate ( marked [ pt [ 0 ] - 1 : pt [ 0 ] + 2 , pt [ 1 ] - 1 : pt [ 1 ] + 2 ] ) :
index = ( s_index [ 0 ] - 1 + pt [ 0 ] , s_index [ 1 ] - 1 + pt [ 1 ] )
# is neighbor part of peak or part of mountain ?
if val == self . UNMARKED : # will let in even minor peaks
if ( q_data [ index ] >= 0 ) and ( q_data [ index ] < bin_lower ) and ( ( q_data [ index ] <= q_data [ pt ] ) or self . is_closest ( index , center , centers , bin_num ) ) :
hills . append ( index )
del foothills [ : ] |
def sort ( self , key , * get_patterns , by = None , offset = None , count = None , asc = None , alpha = False , store = None ) :
"""Sort the elements in a list , set or sorted set .""" | args = [ ]
if by is not None :
args += [ b'BY' , by ]
if offset is not None and count is not None :
args += [ b'LIMIT' , offset , count ]
if get_patterns :
args += sum ( ( [ b'GET' , pattern ] for pattern in get_patterns ) , [ ] )
if asc is not None :
args += [ asc is True and b'ASC' or b'DESC' ]
if alpha :
args += [ b'ALPHA' ]
if store is not None :
args += [ b'STORE' , store ]
return self . execute ( b'SORT' , key , * args ) |
def query ( self , coords ) :
"""Args :
coords ( ` astropy . coordinates . SkyCoord ` ) : The coordinates to query .
Returns :
A float array of the value of the map at the given coordinates . The
shape of the output is the same as the shape of the coordinates
stored by ` coords ` .""" | pix_idx = coord2healpix ( coords , self . _frame , self . _nside , nest = self . _nest )
return self . _pix_val [ pix_idx ] |
def init ( ) :
'''Return the list of svn remotes and their configuration information''' | bp_ = os . path . join ( __opts__ [ 'cachedir' ] , 'svnfs' )
new_remote = False
repos = [ ]
per_remote_defaults = { }
for param in PER_REMOTE_OVERRIDES :
per_remote_defaults [ param ] = six . text_type ( __opts__ [ 'svnfs_{0}' . format ( param ) ] )
for remote in __opts__ [ 'svnfs_remotes' ] :
repo_conf = copy . deepcopy ( per_remote_defaults )
if isinstance ( remote , dict ) :
repo_url = next ( iter ( remote ) )
per_remote_conf = dict ( [ ( key , six . text_type ( val ) ) for key , val in six . iteritems ( salt . utils . data . repack_dictlist ( remote [ repo_url ] ) ) ] )
if not per_remote_conf :
log . error ( 'Invalid per-remote configuration for remote %s. If no ' 'per-remote parameters are being specified, there may be ' 'a trailing colon after the URL, which should be removed. ' 'Check the master configuration file.' , repo_url )
_failhard ( )
per_remote_errors = False
for param in ( x for x in per_remote_conf if x not in PER_REMOTE_OVERRIDES ) :
log . error ( 'Invalid configuration parameter \'%s\' for remote %s. ' 'Valid parameters are: %s. See the documentation for ' 'further information.' , param , repo_url , ', ' . join ( PER_REMOTE_OVERRIDES ) )
per_remote_errors = True
if per_remote_errors :
_failhard ( )
repo_conf . update ( per_remote_conf )
else :
repo_url = remote
if not isinstance ( repo_url , six . string_types ) :
log . error ( 'Invalid svnfs remote %s. Remotes must be strings, you may ' 'need to enclose the URL in quotes' , repo_url )
_failhard ( )
try :
repo_conf [ 'mountpoint' ] = salt . utils . url . strip_proto ( repo_conf [ 'mountpoint' ] )
except TypeError : # mountpoint not specified
pass
hash_type = getattr ( hashlib , __opts__ . get ( 'hash_type' , 'md5' ) )
repo_hash = hash_type ( repo_url ) . hexdigest ( )
rp_ = os . path . join ( bp_ , repo_hash )
if not os . path . isdir ( rp_ ) :
os . makedirs ( rp_ )
if not os . listdir ( rp_ ) : # Only attempt a new checkout if the directory is empty .
try :
CLIENT . checkout ( repo_url , rp_ )
repos . append ( rp_ )
new_remote = True
except pysvn . _pysvn . ClientError as exc :
log . error ( 'Failed to initialize svnfs remote \'%s\': %s' , repo_url , exc )
_failhard ( )
else : # Confirm that there is an svn checkout at the necessary path by
# running pysvn . Client ( ) . status ( )
try :
CLIENT . status ( rp_ )
except pysvn . _pysvn . ClientError as exc :
log . error ( 'Cache path %s (corresponding remote: %s) exists but is ' 'not a valid subversion checkout. You will need to ' 'manually delete this directory on the master to continue ' 'to use this svnfs remote.' , rp_ , repo_url )
_failhard ( )
repo_conf . update ( { 'repo' : rp_ , 'url' : repo_url , 'hash' : repo_hash , 'cachedir' : rp_ , 'lockfile' : os . path . join ( rp_ , 'update.lk' ) } )
repos . append ( repo_conf )
if new_remote :
remote_map = os . path . join ( __opts__ [ 'cachedir' ] , 'svnfs/remote_map.txt' )
try :
with salt . utils . files . fopen ( remote_map , 'w+' ) as fp_ :
timestamp = datetime . now ( ) . strftime ( '%d %b %Y %H:%M:%S.%f' )
fp_ . write ( '# svnfs_remote map as of {0}\n' . format ( timestamp ) )
for repo_conf in repos :
fp_ . write ( salt . utils . stringutils . to_str ( '{0} = {1}\n' . format ( repo_conf [ 'hash' ] , repo_conf [ 'url' ] ) ) )
except OSError :
pass
else :
log . info ( 'Wrote new svnfs_remote map to %s' , remote_map )
return repos |
def plot_csm_and_maps ( self , isite , max_csm = 8.0 ) :
"""Plotting of the coordination numbers of a given site for all the distfactor / angfactor parameters . If the
chemical environments are given , a color map is added to the plot , with the lowest continuous symmetry measure
as the value for the color of that distfactor / angfactor set .
: param isite : Index of the site for which the plot has to be done
: param plot _ type : How to plot the coordinations
: param title : Title for the figure
: param max _ dist : Maximum distance to be plotted when the plotting of the distance is set to ' initial _ normalized '
or ' initial _ real ' ( Warning : this is not the same meaning in both cases ! In the first case ,
the closest atom lies at a " normalized " distance of 1.0 so that 2.0 means refers to this
normalized distance while in the second case , the real distance is used )
: param figsize : Size of the figure to be plotted
: return : Nothing returned , just plot the figure""" | try :
import matplotlib . pyplot as plt
except ImportError :
print ( 'Plotting Chemical Environments requires matplotlib ... exiting "plot" function' )
return
fig = self . get_csm_and_maps ( isite = isite , max_csm = max_csm )
if fig is None :
return
plt . show ( ) |
async def _load_tuple ( self , reader , elem_type , params = None , elem = None ) :
"""Loads tuple of elements from the reader . Supports the tuple ref .
Returns loaded tuple .
: param reader :
: param elem _ type :
: param params :
: param container :
: return :""" | c_len = await load_uvarint ( reader )
if elem and c_len != len ( elem ) :
raise ValueError ( "Size mismatch" )
if c_len != len ( elem_type . f_specs ( ) ) :
raise ValueError ( "Tuple size mismatch" )
elem_fields = params [ 0 ] if params else None
if elem_fields is None :
elem_fields = elem_type . f_specs ( )
res = elem if elem else [ ]
for i in range ( c_len ) :
try :
self . tracker . push_index ( i )
fvalue = await self . load_field ( reader , elem_fields [ i ] , params [ 1 : ] if params else None , eref ( res , i ) if elem else None , )
self . tracker . pop ( )
except Exception as e :
raise helpers . ArchiveException ( e , tracker = self . tracker ) from e
if not elem :
res . append ( fvalue )
return res |
def main ( ) :
"""Use processes and Netmiko to connect to each of the devices . Execute
' show version ' on each device . Record the amount of time required to do this .""" | start_time = datetime . now ( )
procs = [ ]
for a_device in devices :
my_proc = Process ( target = show_version , args = ( a_device , ) )
my_proc . start ( )
procs . append ( my_proc )
for a_proc in procs :
print ( a_proc )
a_proc . join ( )
print ( "\nElapsed time: " + str ( datetime . now ( ) - start_time ) ) |
def refresh ( self , url = CONST . PANEL_URL ) :
"""Refresh the alarm device .""" | response_object = AbodeDevice . refresh ( self , url )
# pylint : disable = W0212
self . _abode . _panel . update ( response_object [ 0 ] )
return response_object |
def rangefinder_send ( self , distance , voltage , force_mavlink1 = False ) :
'''Rangefinder reporting
distance : distance in meters ( float )
voltage : raw voltage if available , zero otherwise ( float )''' | return self . send ( self . rangefinder_encode ( distance , voltage ) , force_mavlink1 = force_mavlink1 ) |
def t_ID ( self , t ) :
r'[ a - zA - Z ] +' | if t . value in self . _RESERVED . keys ( ) :
t . type = self . _RESERVED [ t . value ]
return t
if Information . is_valid_symbol ( t . value ) or Information . is_valid_category ( t . value ) :
t . type = self . _INFORMATION_UNIT
return t
if Duration . is_valid_symbol ( t . value ) :
t . type = self . _DURATION_UNIT
return t
raise LexingError ( 'Unrecognised token or unit \'{0.value}\' at ' 'position {0.lexpos}' . format ( t ) ) |
def find_actions ( orbit , N_max , force_harmonic_oscillator = False , toy_potential = None ) :
r"""Find approximate actions and angles for samples of a phase - space orbit .
Uses toy potentials with known , analytic action - angle transformations to
approximate the true coordinates as a Fourier sum .
This code is adapted from Jason Sanders '
` genfunc < https : / / github . com / jlsanders / genfunc > ` _
Parameters
orbit : ` ~ gala . dynamics . Orbit `
N _ max : int
Maximum integer Fourier mode vector length , : math : ` | \ boldsymbol { n } | ` .
force _ harmonic _ oscillator : bool ( optional )
Force using the harmonic oscillator potential as the toy potential .
toy _ potential : Potential ( optional )
Fix the toy potential class .
Returns
aaf : dict
A Python dictionary containing the actions , angles , frequencies , and
value of the generating function and derivatives for each integer
vector . Each value of the dictionary is a : class : ` numpy . ndarray ` or
: class : ` astropy . units . Quantity ` .""" | if orbit . norbits == 1 :
return _single_orbit_find_actions ( orbit , N_max , force_harmonic_oscillator = force_harmonic_oscillator , toy_potential = toy_potential )
else :
norbits = orbit . norbits
actions = np . zeros ( ( 3 , norbits ) )
angles = np . zeros ( ( 3 , norbits ) )
freqs = np . zeros ( ( 3 , norbits ) )
for n in range ( norbits ) :
aaf = _single_orbit_find_actions ( orbit [ : , n ] , N_max , force_harmonic_oscillator = force_harmonic_oscillator , toy_potential = toy_potential )
actions [ n ] = aaf [ 'actions' ] . value
angles [ n ] = aaf [ 'angles' ] . value
freqs [ n ] = aaf [ 'freqs' ] . value
return dict ( actions = actions * aaf [ 'actions' ] . unit , angles = angles * aaf [ 'angles' ] . unit , freqs = freqs * aaf [ 'freqs' ] . unit , Sn = actions [ 3 : ] , dSn = angles [ 6 : ] , nvecs = aaf [ 'nvecs' ] ) |
def add_geo_facet ( self , * args , ** kwargs ) :
"""Add a geo factory facet""" | self . facets . append ( GeoDistanceFacet ( * args , ** kwargs ) ) |
def adaptive_rejection_sampling ( logpdf : callable , a : float , b : float , domain : Tuple [ float , float ] , n_samples : int , random_stream = None ) :
"""Adaptive rejection sampling samples exactly ( all samples are i . i . d ) and efficiently from any univariate log - concave distribution . The basic idea is to successively determine an envelope of straight - line segments to construct an increasingly accurate approximation of the logarithm .
It does not require any normalization of the target distribution .
Parameters
logpdf : callable
Univariate function that computes : math : ` log ( f ( u ) ) `
for a given : math : ` u ` , where : math : ` f ( u ) ` is proportional
to the target density to sample from .
a : float
Lower starting point used to initialize the hulls .
Must lie in the domain of the logpdf and it
must hold : : math : ` a < b ` .
b : float
Upper starting point used to initialize the hulls .
Must lie in the domain of the logpdf and it
must hold : : math : ` a < b ` .
domain : Tuple [ float , float ]
Domain of ` logpdf ` .
May be unbounded on either or both sides ,
in which case ` ( float ( " - inf " ) , float ( " inf " ) ) `
would be passed .
If this domain is unbounded to the left ,
the derivative of the logpdf
for x < = a must be positive .
If this domain is unbounded to the right the derivative of the logpdf for x > = b
must be negative .
n _ samples : int
Number of samples to draw .
random _ stream : RandomState , optional
Seeded random number generator object with same interface as a NumPy
RandomState object . Defaults to ` None ` in which case a NumPy
RandomState seeded from ` / dev / urandom ` if available or the clock if not
will be used .
Returns
samples : list
A list of samples drawn from the
target distribution : math : ` f `
with the given ` logpdf ` .
Examples
Sampling from a simple gaussian , adaptive rejection sampling style .
We use the logpdf of a standard gaussian and this small code snippet
demonstrates that our sample approximation accurately approximates the mean :
> > > from math import isclose
> > > from numpy import log , exp , mean
> > > gaussian _ logpdf = lambda x , sigma = 1 : log ( exp ( - x * * 2 / sigma ) )
> > > a , b = - 2 , 2 # a < b must hold
> > > domain = ( float ( " - inf " ) , float ( " inf " ) )
> > > n _ samples = 10000
> > > samples = adaptive _ rejection _ sampling ( logpdf = gaussian _ logpdf , a = a , b = b , domain = domain , n _ samples = n _ samples )
> > > isclose ( mean ( samples ) , 0.0 , abs _ tol = 1e - 02)
True""" | assert ( hasattr ( logpdf , "__call__" ) )
assert ( len ( domain ) == 2 ) , "Domain must be two-element iterable."
assert ( domain [ 1 ] >= domain [ 0 ] ) , "Invalid domain, it must hold: domain[1] >= domain[0]."
assert ( n_samples >= 0 ) , "Number of samples must be >= 0."
if random_stream is None :
random_stream = RandomState ( )
if a >= b or isinf ( a ) or isinf ( b ) or a < domain [ 0 ] or b > domain [ 1 ] :
raise ValueError ( "invalid a and b" )
n_derivative_steps = 1e-3 * ( b - a )
S = ( a , a + n_derivative_steps , b - n_derivative_steps , b )
if domain [ 0 ] == float ( "-inf" ) : # ensure positive derivative at ' a '
derivative_sign = sign ( logpdf ( a + n_derivative_steps ) - logpdf ( a ) )
positive_derivative = derivative_sign > 0
assert ( positive_derivative ) , "derivative at 'a' must be positive, since the domain is unbounded to the left"
if domain [ 1 ] == float ( "inf" ) : # ensure negative derivative at ' b '
derivative_sign = sign ( logpdf ( b ) - logpdf ( b - n_derivative_steps ) )
negative_derivative = derivative_sign < 0
assert ( negative_derivative ) , "derivative at 'b' must be negative, since the domain is unbounded to the right"
# initialize a mesh on which to create upper & lower hulls
n_initial_mesh_points = 3
S = unique ( ( S [ 0 ] , * ( linspace ( S [ 1 ] , S [ 2 ] , num = n_initial_mesh_points + 2 ) ) , S [ 3 ] ) )
fS = tuple ( logpdf ( s ) for s in S )
lower_hull , upper_hull = compute_hulls ( S = S , fS = fS , domain = domain )
samples = [ ]
while len ( samples ) < n_samples :
mesh_changed = False
x = sample_upper_hull ( upper_hull , random_stream = random_stream )
lh_val , uh_val = evaluate_hulls ( x , lower_hull , upper_hull )
U = random_stream . rand ( )
if log ( U ) <= lh_val - uh_val : # accept u is below lower bound
samples . append ( x )
elif log ( U ) <= logpdf ( x ) - uh_val : # accept , u is between lower bound and f
samples . append ( x )
mesh_changed = True
else : # reject , u is between f and upper _ bound
mesh_changed = True
if mesh_changed :
S = sorted ( [ * S , x ] )
fS = tuple ( logpdf ( s ) for s in S )
lower_hull , upper_hull = compute_hulls ( S = S , fS = fS , domain = domain )
return samples |
def is_all_field_none ( self ) :
""": rtype : bool""" | if self . _id_ is not None :
return False
if self . _description is not None :
return False
if self . _ean_code is not None :
return False
if self . _avatar_attachment is not None :
return False
if self . _tab_attachment is not None :
return False
if self . _quantity is not None :
return False
if self . _amount is not None :
return False
return True |
def delete ( self , ids ) :
"""Method to delete environments vip by their id ' s .
: param ids : Identifiers of environments vip
: return : None""" | url = build_uri_with_ids ( 'api/v3/environment-vip/%s/' , ids )
return super ( ApiEnvironmentVip , self ) . delete ( url ) |
def get ( self , node_id ) :
"""Args :
node _ id : Returns an Entry instance for the given node ID .
If the requested node ID does not exist , throws KeyError .""" | if ( self . _registry [ node_id ] . monotonic_timestamp + self . TIMEOUT ) < time . monotonic ( ) :
self . _call_event_handlers ( self . UpdateEvent ( self . _registry [ node_id ] , self . UpdateEvent . EVENT_ID_OFFLINE ) )
del self . _registry [ node_id ]
return self . _registry [ node_id ] |
def lift_chart ( df , col_true = None , col_pred = None , col_scores = None , pos_label = 1 ) :
r"""Compute life value , true positive rate ( TPR ) and threshold from predicted DataFrame .
Note that this method will trigger the defined flow to execute .
: param df : predicted data frame
: type df : DataFrame
: param pos _ label : positive label
: type pos _ label : str
: param col _ true : true column
: type col _ true : str
: param col _ pred : predicted column , ' prediction _ result ' if absent .
: type col _ pred : str
: param col _ scores : score column , ' prediction _ score ' if absent .
: type col _ scores : str
: return : lift value , true positive rate and threshold , in numpy array format .
: Example :
> > > import matplotlib . pyplot as plt
> > > depth , lift , thresh = lift _ chart ( predicted )
> > > plt . plot ( depth , lift )""" | if not col_pred :
col_pred = get_field_name_by_role ( df , FieldRole . PREDICTED_CLASS )
if not col_scores :
col_scores = get_field_name_by_role ( df , FieldRole . PREDICTED_SCORE )
thresh , tp , fn , tn , fp = _run_roc_node ( df , pos_label , col_true , col_pred , col_scores )
depth = ( tp + fp ) * 1.0 / ( tp + fp + tn + fn )
tpr = tp * 1.0 / ( tp + fn )
lift = tpr / depth
lift_result = namedtuple ( 'LiftResult' , 'depth lift thresh' )
return lift_result ( depth = depth , lift = lift , thresh = thresh ) |
def quoted_or_list ( items : List [ str ] ) -> Optional [ str ] :
"""Given [ A , B , C ] return " ' A ' , ' B ' , or ' C ' " .
Note : We use single quotes here , since these are also used by repr ( ) .""" | return or_list ( [ f"'{item}'" for item in items ] ) |
def autofix ( W , copy = True ) :
'''Fix a bunch of common problems . More specifically , remove Inf and NaN ,
ensure exact binariness and symmetry ( i . e . remove floating point
instability ) , and zero diagonal .
Parameters
W : np . ndarray
weighted connectivity matrix
copy : bool
if True , returns a copy of the matrix . Otherwise , modifies the matrix
in place . Default value = True .
Returns
W : np . ndarray
connectivity matrix with fixes applied''' | if copy :
W = W . copy ( )
# zero diagonal
np . fill_diagonal ( W , 0 )
# remove np . inf and np . nan
W [ np . logical_or ( np . where ( np . isinf ( W ) ) , np . where ( np . isnan ( W ) ) ) ] = 0
# ensure exact binarity
u = np . unique ( W )
if np . all ( np . logical_or ( np . abs ( u ) < 1e-8 , np . abs ( u - 1 ) < 1e-8 ) ) :
W = np . around ( W , decimal = 5 )
# ensure exact symmetry
if np . allclose ( W , W . T ) :
W = np . around ( W , decimals = 5 )
return W |
def get_contents ( self , path , ref = github . GithubObject . NotSet ) :
""": calls : ` GET / repos / : owner / : repo / contents / : path < http : / / developer . github . com / v3 / repos / contents > ` _
: param path : string
: param ref : string
: rtype : : class : ` github . ContentFile . ContentFile `""" | return self . get_file_contents ( path , ref ) |
def send_script_async ( self , conn_id , data , progress_callback , callback ) :
"""Asynchronously send a a script to this IOTile device
Args :
conn _ id ( int ) : A unique identifer that will refer to this connection
data ( string ) : the script to send to the device
progress _ callback ( callable ) : A function to be called with status on our progress , called as :
progress _ callback ( done _ count , total _ count )
callback ( callable ) : A callback for when we have finished sending the script . The callback will be called as
callback ( connection _ id , adapter _ id , success , failure _ reason )
' connection _ id ' : the connection id
' adapter _ id ' : this adapter ' s id
' success ' : a bool indicating whether we received a response to our attempted RPC
' failure _ reason ' : a string with the reason for the failure if success = = False""" | try :
context = self . conns . get_context ( conn_id )
except ArgumentError :
callback ( conn_id , self . id , False , "Could not find connection information" )
return
topics = context [ 'topics' ]
context [ 'progress_callback' ] = progress_callback
self . conns . begin_operation ( conn_id , 'script' , callback , 60.0 )
chunks = 1
if len ( data ) > self . mtu :
chunks = len ( data ) // self . mtu
if len ( data ) % self . mtu != 0 :
chunks += 1
# Send the script out possibly in multiple chunks if it ' s larger than our maximum transmit unit
for i in range ( 0 , chunks ) :
start = i * self . mtu
chunk = data [ start : start + self . mtu ]
encoded = base64 . standard_b64encode ( chunk )
script_message = { 'key' : context [ 'key' ] , 'client' : self . name , 'type' : 'command' , 'operation' : 'send_script' , 'script' : encoded , 'fragment_count' : chunks , 'fragment_index' : i }
self . client . publish ( topics . action , script_message ) |
def all_dbs ( self ) :
"""Retrieves a list of all database names for the current client .
: returns : List of database names for the client""" | url = '/' . join ( ( self . server_url , '_all_dbs' ) )
resp = self . r_session . get ( url )
resp . raise_for_status ( )
return response_to_json_dict ( resp ) |
def get_match_history ( start_at_match_id = None , player_name = None , hero_id = None , skill = 0 , date_min = None , date_max = None , account_id = None , league_id = None , matches_requested = None , game_mode = None , min_players = None , tournament_games_only = None , ** kwargs ) :
"""List of most recent 25 matches before start _ at _ match _ id""" | params = { "start_at_match_id" : start_at_match_id , "player_name" : player_name , "hero_id" : hero_id , "skill" : skill , "date_min" : date_min , "date_max" : date_max , "account_id" : account_id , "league_id" : league_id , "matches_requested" : matches_requested , "game_mode" : game_mode , "min_players" : min_players , "tournament_games_only" : tournament_games_only }
return make_request ( "GetMatchHistory" , params , ** kwargs ) |
def _VerifyMethodCall ( self ) :
"""Verify the called method is expected .
This can be an ordered method , or part of an unordered set .
Returns :
The expected mock method .
Raises :
UnexpectedMethodCall if the method called was not expected .""" | expected = self . _PopNextMethod ( )
# Loop here , because we might have a MethodGroup followed by another
# group .
while isinstance ( expected , MethodGroup ) :
expected , method = expected . MethodCalled ( self )
if method is not None :
return method
# This is a mock method , so just check equality .
if expected != self :
raise UnexpectedMethodCallError ( self , expected )
return expected |
def check_ndk_api ( ndk_api , android_api ) :
"""Warn if the user ' s NDK is too high or low .""" | if ndk_api > android_api :
raise BuildInterruptingException ( 'Target NDK API is {}, higher than the target Android API {}.' . format ( ndk_api , android_api ) , instructions = ( 'The NDK API is a minimum supported API number and must be lower ' 'than the target Android API' ) )
if ndk_api < MIN_NDK_API :
warning ( OLD_NDK_API_MESSAGE ) |
def delete ( self ) :
"""If a dynamic version , delete it the standard way and remove it from the
inventory , else delete all dynamic versions .""" | if self . dynamic_version_of is None :
self . _delete_dynamic_versions ( )
else :
super ( DynamicFieldMixin , self ) . delete ( )
self . _inventory . srem ( self . dynamic_part ) |
def grid ( children = [ ] , sizing_mode = None , nrows = None , ncols = None ) :
"""Conveniently create a grid of layoutable objects .
Grids are created by using ` ` GridBox ` ` model . This gives the most control over
the layout of a grid , but is also tedious and may result in unreadable code in
practical applications . ` ` grid ( ) ` ` function remedies this by reducing the level
of control , but in turn providing a more convenient API .
Supported patterns :
1 . Nested lists of layoutable objects . Assumes the top - level list represents
a column and alternates between rows and columns in subsequent nesting
levels . One can use ` ` None ` ` for padding purpose .
> > > grid ( [ p1 , [ [ p2 , p3 ] , p4 ] ] )
GridBox ( children = [
( p1 , 0 , 0 , 1 , 2 ) ,
( p2 , 1 , 0 , 1 , 1 ) ,
( p3 , 2 , 0 , 1 , 1 ) ,
( p4 , 1 , 1 , 2 , 1 ) ,
2 . Nested ` ` Row ` ` and ` ` Column ` ` instances . Similar to the first pattern , just
instead of using nested lists , it uses nested ` ` Row ` ` and ` ` Column ` ` models .
This can be much more readable that the former . Note , however , that only
models that don ' t have ` ` sizing _ mode ` ` set are used .
> > > grid ( column ( p1 , row ( column ( p2 , p3 ) , p4 ) ) )
GridBox ( children = [
( p1 , 0 , 0 , 1 , 2 ) ,
( p2 , 1 , 0 , 1 , 1 ) ,
( p3 , 2 , 0 , 1 , 1 ) ,
( p4 , 1 , 1 , 2 , 1 ) ,
3 . Flat list of layoutable objects . This requires ` ` nrows ` ` and / or ` ` ncols ` ` to
be set . The input list will be rearranged into a 2D array accordingly . One
can use ` ` None ` ` for padding purpose .
> > > grid ( [ p1 , p2 , p3 , p4 ] , ncols = 2)
GridBox ( children = [
( p1 , 0 , 0 , 1 , 1 ) ,
( p2 , 0 , 1 , 1 , 1 ) ,
( p3 , 1 , 0 , 1 , 1 ) ,
( p4 , 1 , 1 , 1 , 1 ) ,""" | row = namedtuple ( "row" , [ "children" ] )
col = namedtuple ( "col" , [ "children" ] )
def flatten ( layout ) :
Item = namedtuple ( "Item" , [ "layout" , "r0" , "c0" , "r1" , "c1" ] )
Grid = namedtuple ( "Grid" , [ "nrows" , "ncols" , "items" ] )
def gcd ( a , b ) :
a , b = abs ( a ) , abs ( b )
while b != 0 :
a , b = b , a % b
return a
def lcm ( a , * rest ) :
for b in rest :
a = ( a * b ) // gcd ( a , b )
return a
nonempty = lambda child : child . nrows != 0 and child . ncols != 0
def _flatten ( layout ) :
if isinstance ( layout , row ) :
children = list ( filter ( nonempty , map ( _flatten , layout . children ) ) )
if not children :
return Grid ( 0 , 0 , [ ] )
nrows = lcm ( * [ child . nrows for child in children ] )
ncols = sum ( [ child . ncols for child in children ] )
items = [ ]
offset = 0
for child in children :
factor = nrows // child . nrows
for ( layout , r0 , c0 , r1 , c1 ) in child . items :
items . append ( ( layout , factor * r0 , c0 + offset , factor * r1 , c1 + offset ) )
offset += child . ncols
return Grid ( nrows , ncols , items )
elif isinstance ( layout , col ) :
children = list ( filter ( nonempty , map ( _flatten , layout . children ) ) )
if not children :
return Grid ( 0 , 0 , [ ] )
nrows = sum ( [ child . nrows for child in children ] )
ncols = lcm ( * [ child . ncols for child in children ] )
items = [ ]
offset = 0
for child in children :
factor = ncols // child . ncols
for ( layout , r0 , c0 , r1 , c1 ) in child . items :
items . append ( ( layout , r0 + offset , factor * c0 , r1 + offset , factor * c1 ) )
offset += child . nrows
return Grid ( nrows , ncols , items )
else :
return Grid ( 1 , 1 , [ Item ( layout , 0 , 0 , 1 , 1 ) ] )
grid = _flatten ( layout )
children = [ ]
for ( layout , r0 , c0 , r1 , c1 ) in grid . items :
if layout is not None :
children . append ( ( layout , r0 , c0 , r1 - r0 , c1 - c0 ) )
return GridBox ( children = children )
if isinstance ( children , list ) :
if nrows is not None or ncols is not None :
N = len ( children )
if ncols is None :
ncols = math . ceil ( N / nrows )
layout = col ( [ row ( children [ i : i + ncols ] ) for i in range ( 0 , N , ncols ) ] )
else :
def traverse ( children , level = 0 ) :
if isinstance ( children , list ) :
container = col if level % 2 == 0 else row
return container ( [ traverse ( child , level + 1 ) for child in children ] )
else :
return children
layout = traverse ( children )
elif isinstance ( children , LayoutDOM ) :
def is_usable ( child ) :
return _has_auto_sizing ( child ) and child . spacing == 0
def traverse ( item , top_level = False ) :
if isinstance ( item , Box ) and ( top_level or is_usable ( item ) ) :
container = col if isinstance ( item , Column ) else row
return container ( list ( map ( traverse , item . children ) ) )
else :
return item
layout = traverse ( children , top_level = True )
elif isinstance ( children , string_types ) :
raise NotImplementedError
else :
raise ValueError ( "expected a list, string or model" )
grid = flatten ( layout )
if sizing_mode is not None :
grid . sizing_mode = sizing_mode
for child in grid . children :
layout = child [ 0 ]
if _has_auto_sizing ( layout ) :
layout . sizing_mode = sizing_mode
return grid |
def save_xml ( self , doc , element ) :
'''Save this message _ sending object into an xml . dom . Element object .''' | for cond in self . _targets :
new_element = doc . createElementNS ( RTS_NS , RTS_NS_S + 'targets' )
new_element . setAttributeNS ( XSI_NS , XSI_NS_S + 'type' , 'rtsExt:condition_ext' )
cond . save_xml ( doc , new_element )
element . appendChild ( new_element ) |
def get_symbol ( num_classes = 20 , nms_thresh = 0.5 , force_suppress = False , nms_topk = 400 , ** kwargs ) :
"""Single - shot multi - box detection with VGG 16 layers ConvNet
This is a modified version , with fc6 / fc7 layers replaced by conv layers
And the network is slightly smaller than original VGG 16 network
This is the detection network
Parameters :
num _ classes : int
number of object classes not including background
nms _ thresh : float
threshold of overlap for non - maximum suppression
force _ suppress : boolean
whether suppress different class objects
nms _ topk : int
apply NMS to top K detections
Returns :
mx . Symbol""" | net = get_symbol_train ( num_classes )
cls_preds = net . get_internals ( ) [ "multibox_cls_pred_output" ]
loc_preds = net . get_internals ( ) [ "multibox_loc_pred_output" ]
anchor_boxes = net . get_internals ( ) [ "multibox_anchors_output" ]
cls_prob = mx . symbol . softmax ( data = cls_preds , axis = 1 , name = 'cls_prob' )
out = mx . symbol . contrib . MultiBoxDetection ( * [ cls_prob , loc_preds , anchor_boxes ] , name = "detection" , nms_threshold = nms_thresh , force_suppress = force_suppress , variances = ( 0.1 , 0.1 , 0.2 , 0.2 ) , nms_topk = nms_topk )
return out |
def _iterate_fields_cond ( self , pkt , val , use_val ) :
"""Internal function used by _ find _ fld _ pkt & _ find _ fld _ pkt _ val""" | # Iterate through the fields
for fld , cond in self . flds :
if isinstance ( cond , tuple ) :
if use_val :
if cond [ 1 ] ( pkt , val ) :
return fld
continue
else :
cond = cond [ 0 ]
if cond ( pkt ) :
return fld
return self . dflt |
def _filter_commands ( ctx , commands = None ) :
"""Return list of used commands .""" | lookup = getattr ( ctx . command , 'commands' , { } )
if not lookup and isinstance ( ctx . command , click . MultiCommand ) :
lookup = _get_lazyload_commands ( ctx . command )
if commands is None :
return sorted ( lookup . values ( ) , key = lambda item : item . name )
names = [ name . strip ( ) for name in commands . split ( ',' ) ]
return [ lookup [ name ] for name in names if name in lookup ] |
def unlink ( self , req , parent , name ) :
"""Remove a file
Valid replies :
reply _ err""" | self . reply_err ( req , errno . EROFS ) |
def set_weights ( self , weights_values : dict , ignore_missing = False ) :
"""Sets the weights values of the network .
: param weights _ values : dictionary with weights for each layer""" | network_name = self . __class__ . __name__ . lower ( )
with tf . variable_scope ( network_name ) :
for layer_name in weights_values :
with tf . variable_scope ( layer_name , reuse = True ) :
for param_name , data in weights_values [ layer_name ] . items ( ) :
try :
var = tf . get_variable ( param_name )
self . _session . run ( var . assign ( data ) )
except ValueError :
if not ignore_missing :
raise |
def merge_subtokens ( doc , label = "subtok" ) :
"""Merge subtokens into a single token .
doc ( Doc ) : The Doc object .
label ( unicode ) : The subtoken dependency label .
RETURNS ( Doc ) : The Doc object with merged subtokens .
DOCS : https : / / spacy . io / api / pipeline - functions # merge _ subtokens""" | merger = Matcher ( doc . vocab )
merger . add ( "SUBTOK" , None , [ { "DEP" : label , "op" : "+" } ] )
matches = merger ( doc )
spans = [ doc [ start : end + 1 ] for _ , start , end in matches ]
with doc . retokenize ( ) as retokenizer :
for span in spans :
retokenizer . merge ( span )
return doc |
def save ( filename , data , format = None , ** kwargs ) :
'''save ( filename , data ) writes the given data to the given filename then yieds that filename .
save ( filename , data , format ) specifies that the given format should be used ; this should be the
name of the exporter ( though a file extension that is recognized also will work ) .
Additionally , functions located in save . < format > may be used ; so , for example , the following
are equivalent calls :
save ( filename , image , ' nifti ' )
save . nifti ( filename , image )
In fact , the save . nifti function is just the nifti exporter , so help ( save . nifti ) will also
yield documentation for the nifti exporter .
Keyword options may be passed to save ; these must match those accepted by the given export
function .''' | filename = os . path . expanduser ( os . path . expandvars ( filename ) )
if format is None :
format = guess_export_format ( filename , data , ** kwargs )
if format is None :
raise ValueError ( 'Could not deduce export format for file %s' % filename )
else :
format = format . lower ( )
if format not in exporters : # it might be an extension
fmt = next ( ( k for ( k , ( _ , es , _ ) ) in six . iteritems ( exporters ) if format in es ) , None )
if fmt is None : # okay , no idea what it is
raise ValueError ( 'Format \'%s\' not recognized by neuropythy' % format )
format = fmt
( f , _ , _ ) = exporters [ format ]
return f ( filename , data , ** kwargs ) |
def system_call ( command ) :
"""Run a command and return stdout .
Would be better to use subprocess . check _ output , but this works on 2.6,
which is still the system Python on CentOS 7.""" | p = subprocess . Popen ( [ command ] , stdout = subprocess . PIPE , shell = True )
return p . stdout . read ( ) |
def _make_tasks_unique ( tasks ) :
"""If some tasks of the workflow are the same they are deep copied .""" | unique_tasks = [ ]
prev_tasks = set ( )
for task in tasks :
if task in prev_tasks :
task = copy . deepcopy ( task )
unique_tasks . append ( task )
return unique_tasks |
def get_file_hash ( storage , path ) :
"""Create md5 hash from file contents .""" | contents = storage . open ( path ) . read ( )
file_hash = hashlib . md5 ( contents ) . hexdigest ( )
# Check if content should be gzipped and hash gzipped content
content_type = mimetypes . guess_type ( path ) [ 0 ] or 'application/octet-stream'
if settings . is_gzipped and content_type in settings . gzip_content_types :
cache_key = get_cache_key ( 'gzip_hash_%s' % file_hash )
file_hash = cache . get ( cache_key , False )
if file_hash is False :
buffer = BytesIO ( )
zf = gzip . GzipFile ( mode = 'wb' , compresslevel = 6 , fileobj = buffer , mtime = 0.0 )
zf . write ( force_bytes ( contents ) )
zf . close ( )
file_hash = hashlib . md5 ( buffer . getvalue ( ) ) . hexdigest ( )
cache . set ( cache_key , file_hash )
return '"%s"' % file_hash |
def port ( self , container , private_port ) :
"""Lookup the public - facing port that is NAT - ed to ` ` private _ port ` ` .
Identical to the ` ` docker port ` ` command .
Args :
container ( str ) : The container to look up
private _ port ( int ) : The private port to inspect
Returns :
( list of dict ) : The mapping for the host ports
Raises :
: py : class : ` docker . errors . APIError `
If the server returns an error .
Example :
. . code - block : : bash
$ docker run - d - p 80:80 ubuntu : 14.04 / bin / sleep 30
7174d6347063a83f412fad6124c99cffd25ffe1a0807eb4b7f9cec76ac8cb43b
. . code - block : : python
> > > cli . port ( ' 7174d6347063 ' , 80)
[ { ' HostIp ' : ' 0.0.0.0 ' , ' HostPort ' : ' 80 ' } ]""" | res = self . _get ( self . _url ( "/containers/{0}/json" , container ) )
self . _raise_for_status ( res )
json_ = res . json ( )
private_port = str ( private_port )
h_ports = None
# Port settings is None when the container is running with
# network _ mode = host .
port_settings = json_ . get ( 'NetworkSettings' , { } ) . get ( 'Ports' )
if port_settings is None :
return None
if '/' in private_port :
return port_settings . get ( private_port )
for protocol in [ 'tcp' , 'udp' , 'sctp' ] :
h_ports = port_settings . get ( private_port + '/' + protocol )
if h_ports :
break
return h_ports |
def list_kadastrale_afdelingen ( self ) :
'''List all ` kadastrale afdelingen ` in Flanders .
: param integer sort : Field to sort on .
: rtype : A : class : ` list ` of : class : ` Afdeling ` .''' | def creator ( ) :
gemeentes = self . list_gemeenten ( )
res = [ ]
for g in gemeentes :
res += self . list_kadastrale_afdelingen_by_gemeente ( g )
return res
if self . caches [ 'permanent' ] . is_configured :
key = 'list_afdelingen_rest'
afdelingen = self . caches [ 'permanent' ] . get_or_create ( key , creator )
else :
afdelingen = creator ( )
return afdelingen |
def get_remote ( self , key , default = None , scope = None ) :
"""Get data from the remote end ( s ) of the : class : ` Conversation ` with the given scope .
In Python , this is equivalent to : :
relation . conversation ( scope ) . get _ remote ( key , default )
See : meth : ` conversation ` and : meth : ` Conversation . get _ remote ` .""" | return self . conversation ( scope ) . get_remote ( key , default ) |
def generic_visit ( self , node ) :
"""Implement generic node .""" | # [ [ [ cog
# cog . out ( " print ( pcolor ( ' Enter generic visitor ' , ' magenta ' ) ) " )
# [ [ [ end ] ] ]
# A generic visitor that potentially closes callables is needed to
# close enclosed callables that are not at the end of the enclosing
# callable , otherwise the ending line of the enclosed callable would
# be the ending line of the enclosing callable , which would be
# incorrect
self . _close_callable ( node )
super ( _AstTreeScanner , self ) . generic_visit ( node ) |
def all_slots ( self , cls : CLASS_OR_CLASSNAME , * , cls_slots_first : bool = False ) -> List [ SlotDefinition ] :
"""Return all slots that are part of the class definition . This includes all is _ a , mixin and apply _ to slots
but does NOT include slot _ usage targets . If class B has a slot _ usage entry for slot " s " , only the slot
definition for the redefined slot will be included , not its base . Slots are added in the order they appear
in classes , with recursive is _ a ' s being added first followed by mixins and finally apply _ tos
@ param cls : class definition or class definition name
@ param cls _ slots _ first : True means return class slots at the top of the list
@ return : ordered list of slots in the class with slot usages removed""" | def merge_definitions ( cls_name : Optional [ ClassDefinitionName ] ) -> None :
if cls_name :
for slot in self . all_slots ( cls_name ) :
aliased_name = self . aliased_slot_name ( slot )
if aliased_name not in known_slots :
known_slots . add ( aliased_name )
rval . append ( slot )
if not isinstance ( cls , ClassDefinition ) :
cls = self . schema . classes [ cls ]
known_slots : Set [ str ] = self . aliased_slot_names ( cls . slots )
rval : List [ SlotDefinition ] = [ ]
if cls_slots_first :
rval += self . cls_slots ( cls )
for mixin in cls . mixins :
merge_definitions ( mixin )
merge_definitions ( cls . is_a )
else :
merge_definitions ( cls . is_a )
for mixin in cls . mixins :
merge_definitions ( mixin )
rval += self . cls_slots ( cls )
return rval |
def merge_options_to_dict ( options ) :
"""Given a collection of Option objects or partial option dictionaries ,
merge everything to a single dictionary .""" | merged_options = { }
for obj in options :
if isinstance ( obj , dict ) :
new_opts = obj
else :
new_opts = { obj . key : obj . kwargs }
merged_options = merge_option_dicts ( merged_options , new_opts )
return merged_options |
def _add_ticks ( xticks = True , yticks = True ) :
"""NAME :
_ add _ ticks
PURPOSE :
add minor axis ticks to a plot
INPUT :
( none ; works on the current axes )
OUTPUT :
( none ; works on the current axes )
HISTORY :
2009-12-23 - Written - Bovy ( NYU )""" | ax = pyplot . gca ( )
if xticks :
xstep = ax . xaxis . get_majorticklocs ( )
xstep = xstep [ 1 ] - xstep [ 0 ]
ax . xaxis . set_minor_locator ( ticker . MultipleLocator ( xstep / 5. ) )
if yticks :
ystep = ax . yaxis . get_majorticklocs ( )
ystep = ystep [ 1 ] - ystep [ 0 ]
ax . yaxis . set_minor_locator ( ticker . MultipleLocator ( ystep / 5. ) ) |
def shannon_entropy ( data , iterator ) :
"""Borrowed from http : / / blog . dkbza . org / 2007/05 / scanning - data - for - entropy - anomalies . html""" | if not data :
return 0
entropy = 0
for x in iterator :
p_x = float ( data . count ( x ) ) / len ( data )
if p_x > 0 :
entropy += - p_x * math . log ( p_x , 2 )
return entropy |
def convert_weights ( wgts : Weights , stoi_wgts : Dict [ str , int ] , itos_new : Collection [ str ] ) -> Weights :
"Convert the model ` wgts ` to go with a new vocabulary ." | dec_bias , enc_wgts = wgts . get ( '1.decoder.bias' , None ) , wgts [ '0.encoder.weight' ]
wgts_m = enc_wgts . mean ( 0 )
if dec_bias is not None :
bias_m = dec_bias . mean ( 0 )
new_w = enc_wgts . new_zeros ( ( len ( itos_new ) , enc_wgts . size ( 1 ) ) ) . zero_ ( )
if dec_bias is not None :
new_b = dec_bias . new_zeros ( ( len ( itos_new ) , ) ) . zero_ ( )
for i , w in enumerate ( itos_new ) :
r = stoi_wgts [ w ] if w in stoi_wgts else - 1
new_w [ i ] = enc_wgts [ r ] if r >= 0 else wgts_m
if dec_bias is not None :
new_b [ i ] = dec_bias [ r ] if r >= 0 else bias_m
wgts [ '0.encoder.weight' ] = new_w
if '0.encoder_dp.emb.weight' in wgts :
wgts [ '0.encoder_dp.emb.weight' ] = new_w . clone ( )
wgts [ '1.decoder.weight' ] = new_w . clone ( )
if dec_bias is not None :
wgts [ '1.decoder.bias' ] = new_b
return wgts |
def SetConsoleTextAttribute ( stream_id , attrs ) :
"""Set a console text attribute .""" | handle = handles [ stream_id ]
return windll . kernel32 . SetConsoleTextAttribute ( handle , attrs ) |
def initial_finall_mass_relation ( self , marker = 'o' , linestyle = '--' ) :
'''INtiial to final mass relation''' | final_m = [ ]
ini_m = [ ]
for i in range ( len ( self . runs_H5_surf ) ) :
sefiles = se ( self . runs_H5_out [ i ] )
ini_m . append ( sefiles . get ( "mini" ) )
h1 = sefiles . get ( int ( sefiles . se . cycles [ - 2 ] ) , 'H-1' )
mass = sefiles . get ( int ( sefiles . se . cycles [ - 2 ] ) , 'mass' )
idx = - 1
for k in range ( len ( h1 ) ) :
if h1 [ k ] > 0.1 :
idx = k
break
final_m . append ( mass [ idx ] )
label = 'Z=' + str ( sefiles . get ( 'zini' ) )
plt . plot ( ini_m , final_m , label = label , marker = marker , linestyle = linestyle )
plt . xlabel ( '$M_{Initial} [M_{\odot}]$' , size = 23 )
plt . ylabel ( '$M_{Final} [M_{\odot}]$' , size = 23 ) |
def set_vars ( env ) :
"""Set MWCW _ VERSION , MWCW _ VERSIONS , and some codewarrior environment vars
MWCW _ VERSIONS is set to a list of objects representing installed versions
MWCW _ VERSION is set to the version object that will be used for building .
MWCW _ VERSION can be set to a string during Environment
construction to influence which version is chosen , otherwise
the latest one from MWCW _ VERSIONS is used .
Returns true if at least one version is found , false otherwise""" | desired = env . get ( 'MWCW_VERSION' , '' )
# return right away if the variables are already set
if isinstance ( desired , MWVersion ) :
return 1
elif desired is None :
return 0
versions = find_versions ( )
version = None
if desired :
for v in versions :
if str ( v ) == desired :
version = v
elif versions :
version = versions [ - 1 ]
env [ 'MWCW_VERSIONS' ] = versions
env [ 'MWCW_VERSION' ] = version
if version is None :
return 0
env . PrependENVPath ( 'PATH' , version . clpath )
env . PrependENVPath ( 'PATH' , version . dllpath )
ENV = env [ 'ENV' ]
ENV [ 'CWFolder' ] = version . path
ENV [ 'LM_LICENSE_FILE' ] = version . license
plus = lambda x : '+%s' % x
ENV [ 'MWCIncludes' ] = os . pathsep . join ( map ( plus , version . includes ) )
ENV [ 'MWLibraries' ] = os . pathsep . join ( map ( plus , version . libs ) )
return 1 |
def conformPadding ( cls , chars ) :
"""Ensure alternate input padding formats are conformed
to formats defined in PAD _ MAP
If chars is already a format defined in PAD _ MAP , then
it is returned unmodified .
Example : :
' % 04d ' - > ' # '
Args :
chars ( str ) : input padding chars
Returns :
str : conformed padding chars
Raises :
ValueError : If chars contains invalid padding characters""" | pad = chars
if pad and pad [ 0 ] not in PAD_MAP :
pad = cls . getPaddingChars ( cls . getPaddingNum ( pad ) )
return pad |
def uncamel ( name ) :
"""converts camelcase to underscore
> > > uncamel ( ' fooBar ' )
' foo _ bar '
> > > uncamel ( ' FooBar ' )
' foo _ bar '
> > > uncamel ( ' _ fooBar ' )
' _ foo _ bar '
> > > uncamel ( ' _ FooBar ' )
' _ _ foo _ bar '""" | response , name = name [ 0 ] . lower ( ) , name [ 1 : ]
for n in name :
if n . isupper ( ) :
response += '_' + n . lower ( )
else :
response += n
return response |
def read_str ( delim = ',' , * lines ) :
"""This function is similar to read _ csv , but it reads data from the
list of < lines > .
fd = open ( " foo " , " r " )
data = chart _ data . read _ str ( " , " , fd . readlines ( ) )""" | data = [ ]
for line in lines :
com = parse_line ( line , delim )
data . append ( com )
return data |
def changes ( self ) :
"""Dumber version of ' patch ' method""" | deprecation_msg = 'Model.changes will be removed in warlock v2'
warnings . warn ( deprecation_msg , DeprecationWarning , stacklevel = 2 )
return copy . deepcopy ( self . __dict__ [ 'changes' ] ) |
def write_pascal_results ( self , all_boxes ) :
"""write results files in pascal devkit path
Parameters :
all _ boxes : list
boxes to be processed [ bbox , confidence ]
Returns :
None""" | for cls_ind , cls in enumerate ( self . classes ) :
print ( 'Writing {} VOC results file' . format ( cls ) )
filename = self . get_result_file_template ( ) . format ( cls )
with open ( filename , 'wt' ) as f :
for im_ind , index in enumerate ( self . image_set_index ) :
dets = all_boxes [ im_ind ]
if dets . shape [ 0 ] < 1 :
continue
h , w = self . _get_imsize ( self . image_path_from_index ( im_ind ) )
# the VOCdevkit expects 1 - based indices
for k in range ( dets . shape [ 0 ] ) :
if ( int ( dets [ k , 0 ] ) == cls_ind ) :
f . write ( '{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n' . format ( index , dets [ k , 1 ] , int ( dets [ k , 2 ] * w ) + 1 , int ( dets [ k , 3 ] * h ) + 1 , int ( dets [ k , 4 ] * w ) + 1 , int ( dets [ k , 5 ] * h ) + 1 ) ) |
def apci_contents ( self , use_dict = None , as_class = dict ) :
"""Return the contents of an object as a dict .""" | if _debug :
APCI . _debug ( "apci_contents use_dict=%r as_class=%r" , use_dict , as_class )
# make / extend the dictionary of content
if use_dict is None :
use_dict = as_class ( )
# copy the source and destination to make it easier to search
if self . pduSource :
use_dict . __setitem__ ( 'source' , str ( self . pduSource ) )
if self . pduDestination :
use_dict . __setitem__ ( 'destination' , str ( self . pduDestination ) )
# loop through the elements
for attr in APCI . _debug_contents :
value = getattr ( self , attr , None )
if value is None :
continue
if attr == 'apduType' :
mapped_value = apdu_types [ self . apduType ] . __name__
elif attr == 'apduService' :
if self . apduType in ( ConfirmedRequestPDU . pduType , SimpleAckPDU . pduType , ComplexAckPDU . pduType ) :
mapped_value = confirmed_request_types [ self . apduService ] . __name__
elif ( self . apduType == UnconfirmedRequestPDU . pduType ) :
mapped_value = unconfirmed_request_types [ self . apduService ] . __name__
elif ( self . apduType == ErrorPDU . pduType ) :
mapped_value = error_types [ self . apduService ] . __name__
else :
mapped_value = value
# save the mapped value
use_dict . __setitem__ ( attr , mapped_value )
# return what we built / updated
return use_dict |
def parse_nni_function ( code ) :
"""Parse ` nni . function _ choice ` expression .
Return the AST node of annotated expression and a list of dumped function call expressions .
code : annotation string""" | name , call = parse_annotation_function ( code , 'function_choice' )
funcs = [ ast . dump ( func , False ) for func in call . args ]
convert_args_to_dict ( call , with_lambda = True )
name_str = astor . to_source ( name ) . strip ( )
call . keywords [ 0 ] . value = ast . Str ( s = name_str )
return call , funcs |
def gen_hot_url ( hot_index , page = 1 ) :
"""拼接 首页热门文章 URL
Parameters
hot _ index : WechatSogouConst . hot _ index
首页热门文章的分类 ( 常量 ) : WechatSogouConst . hot _ index . xxx
page : int
页数
Returns
str
热门文章分类的url""" | assert hasattr ( WechatSogouConst . hot_index , hot_index )
assert isinstance ( page , int ) and page > 0
index_urls = { WechatSogouConst . hot_index . hot : 0 , # 热门
WechatSogouConst . hot_index . gaoxiao : 1 , # 搞笑
WechatSogouConst . hot_index . health : 2 , # 养生
WechatSogouConst . hot_index . sifanghua : 3 , # 私房话
WechatSogouConst . hot_index . gossip : 4 , # 八卦
WechatSogouConst . hot_index . technology : 5 , # 科技
WechatSogouConst . hot_index . finance : 6 , # 财经
WechatSogouConst . hot_index . car : 7 , # 汽车
WechatSogouConst . hot_index . life : 8 , # 生活
WechatSogouConst . hot_index . fashion : 9 , # 时尚
WechatSogouConst . hot_index . mummy : 10 , # 辣妈 / 育儿
WechatSogouConst . hot_index . travel : 11 , # 旅行
WechatSogouConst . hot_index . job : 12 , # 职场
WechatSogouConst . hot_index . food : 13 , # 美食
WechatSogouConst . hot_index . history : 14 , # 历史
WechatSogouConst . hot_index . study : 15 , # 学霸 / 教育
WechatSogouConst . hot_index . constellation : 16 , # 星座
WechatSogouConst . hot_index . sport : 17 , # 体育
WechatSogouConst . hot_index . military : 18 , # 军事
WechatSogouConst . hot_index . game : 19 , # 游戏
WechatSogouConst . hot_index . pet : 20 , # 萌宠
}
return 'http://weixin.sogou.com/wapindex/wap/0612/wap_{}/{}.html' . format ( index_urls [ hot_index ] , page - 1 ) |
def get_objective_objective_bank_session ( self , proxy ) :
"""Gets the session for retrieving objective to objective bank mappings .
: param proxy : a proxy
: type proxy : ` ` osid . proxy . Proxy ` `
: return : an ` ` ObjectiveObjectiveBankSession ` `
: rtype : ` ` osid . learning . ObjectiveObjectiveBankSession ` `
: raise : ` ` NullArgument ` ` - - ` ` proxy ` ` is ` ` null ` `
: raise : ` ` OperationFailed ` ` - - unable to complete request
: raise : ` ` Unimplemented ` ` - - ` ` supports _ objective _ objective _ bank ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if ` ` supports _ objective _ objective _ bank ( ) ` ` is ` ` true ` ` . *""" | if not self . supports_objective_objective_bank ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
proxy = self . _convert_proxy ( proxy )
try :
session = sessions . ObjectiveObjectiveBankSession ( proxy = proxy , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session |
def configure_widget_for_editing ( self , widget ) :
"""A widget have to be added to the editor , it is configured here in order to be conformant
to the editor""" | if not 'editor_varname' in widget . attributes :
return
widget . onclick . do ( self . on_widget_selection )
# setup of the on _ dropped function of the widget in order to manage the dragNdrop
widget . __class__ . on_dropped = on_dropped
# drag properties
# widget . style [ ' resize ' ] = ' both '
widget . style [ 'overflow' ] = 'auto'
widget . attributes [ 'draggable' ] = 'true'
widget . attributes [ 'tabindex' ] = str ( self . tabindex )
# if not ' position ' in widget . style . keys ( ) :
# widget . style [ ' position ' ] = ' absolute '
# if not ' left ' in widget . style . keys ( ) :
# widget . style [ ' left ' ] = ' 1px '
# if not ' top ' in widget . style . keys ( ) :
# widget . style [ ' top ' ] = ' 1px '
self . tabindex += 1 |
def suck_out_editions ( reporters ) :
"""Builds a dictionary mapping edition keys to their root name .
The dictionary takes the form of :
" A . " : " A . " ,
" A . 2d " : " A . " ,
" A . 3d " : " A . " ,
" A . D . " : " A . D . " ,
In other words , this lets you go from an edition match to its parent key .""" | editions_out = { }
for reporter_key , data_list in reporters . items ( ) : # For each reporter key . . .
for data in data_list : # For each book it maps to . . .
for edition_key , edition_value in data [ "editions" ] . items ( ) :
try :
editions_out [ edition_key ]
except KeyError : # The item wasn ' t there ; add it .
editions_out [ edition_key ] = reporter_key
return editions_out |
def key_pair_name ( i , region , project_id , ssh_user ) :
"""Returns the ith default gcp _ key _ pair _ name .""" | key_name = "{}_gcp_{}_{}_{}" . format ( RAY , region , project_id , ssh_user , i )
return key_name |
def _op_generic_StoU_saturation ( self , value , min_value , max_value ) : # pylint : disable = no - self - use
"""Return unsigned saturated BV from signed BV .
Min and max value should be unsigned .""" | return claripy . If ( claripy . SGT ( value , max_value ) , max_value , claripy . If ( claripy . SLT ( value , min_value ) , min_value , value ) ) |
def extend ( self , key , values , * , section = DataStoreDocumentSection . Data ) :
"""Extends a list in the data store with the elements of values .
Args :
key ( str ) : The key pointing to the value that should be stored / updated .
It supports MongoDB ' s dot notation for nested fields .
values ( list ) : A list of the values that should be used to extend the list
in the document .
section ( DataStoreDocumentSection ) : The section from which the data should
be retrieved .
Returns :
bool : ` ` True ` ` if the list in the database could be extended ,
otherwise ` ` False ` ` .""" | key_notation = '.' . join ( [ section , key ] )
if not isinstance ( values , list ) :
return False
result = self . _collection . update_one ( { "_id" : ObjectId ( self . _workflow_id ) } , { "$push" : { key_notation : { "$each" : self . _encode_value ( values ) } } , "$currentDate" : { "lastModified" : True } } )
return result . modified_count == 1 |
def get_connections ( self , data = True ) :
"""Return connections from all the agents in the environment .
: param bool data :
If ` ` True ` ` return also the dictionary associated with each
connection
: returns :
A list of ` ` ( addr , connections ) ` ` - tuples , where ` ` connections ` ` is
a list of addresses agent in ` ` addr ` ` is connected to . If
` ` data ` ` parameter is ` ` True ` ` , then the ` ` connections ` `
list contains tuples of ` ` ( nb _ addr , data ) ` ` - pairs , where ` ` data ` `
is a dictionary .
: rtype : dict
. . note : :
By design , potential manager agent is excluded from the returned
list .""" | connections = [ ]
for a in self . get_agents ( addr = False ) :
c = ( a . addr , a . get_connections ( data = data ) )
connections . append ( c )
return connections |
def decrypt ( keyfile_json , password ) :
'''Decrypts a private key that was encrypted using an Ethereum client or
: meth : ` ~ Account . encrypt ` .
: param keyfile _ json : The encrypted key
: type keyfile _ json : dict or str
: param str password : The password that was used to encrypt the key
: returns : the raw private key
: rtype : ~ hexbytes . main . HexBytes
. . code - block : : python
> > > encrypted = {
' address ' : ' 5ce9454909639d2d17a3f753ce7d93fa0b9ab12e ' ,
' crypto ' : { ' cipher ' : ' aes - 128 - ctr ' ,
' cipherparams ' : { ' iv ' : ' 78f214584844e0b241b433d7c3bb8d5f ' } ,
' ciphertext ' : ' d6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f ' ,
' kdf ' : ' pbkdf2 ' ,
' kdfparams ' : { ' c ' : 100000,
' dklen ' : 32,
' prf ' : ' hmac - sha256 ' ,
' salt ' : ' 45cf943b4de2c05c2c440ef96af914a2 ' } ,
' mac ' : ' f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3 ' } ,
' id ' : ' b812f3f9-78cc - 462a - 9e89-74418aa27cb0 ' ,
' version ' : 3}
> > > import getpass
> > > Account . decrypt ( encrypted , getpass . getpass ( ) )
HexBytes ( ' 0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364 ' )''' | if isinstance ( keyfile_json , str ) :
keyfile = json . loads ( keyfile_json )
elif is_dict ( keyfile_json ) :
keyfile = keyfile_json
else :
raise TypeError ( "The keyfile should be supplied as a JSON string, or a dictionary." )
password_bytes = text_if_str ( to_bytes , password )
return HexBytes ( decode_keyfile_json ( keyfile , password_bytes ) ) |
def removeComments ( self , comment = None ) :
"""Inserts comments into the editor based on the current selection . If no comment string is supplied , then the comment from the language will be used .
: param comment | < str > | | None
: return < bool > | success""" | if ( not comment ) :
lang = self . language ( )
if ( lang ) :
comment = lang . lineComment ( )
if ( not comment ) :
return False
startline , startcol , endline , endcol = self . getSelection ( )
len_comment = len ( comment )
line , col = self . getCursorPosition ( )
for lineno in range ( startline , endline + 1 ) :
self . setSelection ( lineno , 0 , lineno , len_comment )
if ( self . selectedText ( ) == comment ) :
self . removeSelectedText ( )
self . setSelection ( startline , startcol , endline , endcol )
self . setCursorPosition ( line , col )
return True |
def _send_to_all_rooms ( self , message ) :
"""Send a message to all connected rooms""" | for room in self . _rooms . values ( ) :
room . send_message ( message ) |
def dist0 ( n , method = 'lin_square' ) :
"""Compute standard cost matrices of size ( n , n ) for OT problems
Parameters
n : int
size of the cost matrix
method : str , optional
Type of loss matrix chosen from :
* ' lin _ square ' : linear sampling between 0 and n - 1 , quadratic loss
Returns
M : np . array ( n1 , n2)
distance matrix computed with given metric""" | res = 0
if method == 'lin_square' :
x = np . arange ( n , dtype = np . float64 ) . reshape ( ( n , 1 ) )
res = dist ( x , x )
return res |