src
stringlengths 75
47.4k
| cfg
stringlengths 32
2.5k
| ast
stringlengths 78
54.9k
|
---|---|---|
def _set_min_value(self, min_value):
"""Sets current minimum allowed value"""
# Check that the min value can be transformed if a transformation is present
if self._transformation is not None:
if min_value is not None:
try:
_ = self._transformation.forward(min_value)
except FloatingPointError:
raise ValueError("The provided minimum %s cannot be transformed with the transformation %s which "
"is defined for the parameter %s" % (min_value,
type(self._transformation),
self.path))
# Store the minimum as a pure float
self._external_min_value = min_value
# Check that the current value of the parameter is still within the boundaries. If not, issue a warning
if self._external_min_value is not None and self.value < self._external_min_value:
warnings.warn("The current value of the parameter %s (%s) "
"was below the new minimum %s." % (self.name, self.value, self._external_min_value),
exceptions.RuntimeWarning)
self.value = self._external_min_value | [2][SEP1][If][If][If][Try][None][None][None][SEP2][1,2][3,2][4][5,6][][2][][SEP3][0][0][0][0][1][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_LT_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ATTRIBUTE_NAME_LOAD_LOAD |
def find_schema_paths(schema_files_path=DEFAULT_SCHEMA_FILES_PATH):
"""Searches the locations in the `SCHEMA_FILES_PATH` to
try to find where the schema SQL files are located.
"""
paths = []
for path in schema_files_path:
if os.path.isdir(path):
paths.append(path)
if paths:
return paths
raise SchemaFilesNotFound("Searched " + os.pathsep.join(schema_files_path)) | [1][SEP1][None][For][If][If][None][Return][None][SEP2][1][2,3][4,1][5,6][1][][][SEP3][0][0][1][0][1][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_NAME_LOAD_EXPR_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_FOR_NAME_STORE_NAME_LOAD_IF_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_NAME_LOAD_RETURN_NAME_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_ADD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD |
def compress_delete_outdir(outdir):
"""Compress the contents of the passed directory to .tar.gz and delete."""
# Compress output in .tar.gz file and remove raw output
tarfn = outdir + ".tar.gz"
logger.info("\tCompressing output from %s to %s", outdir, tarfn)
with tarfile.open(tarfn, "w:gz") as fh:
fh.add(outdir)
logger.info("\tRemoving output directory %s", outdir)
shutil.rmtree(outdir) | [1][SEP1][None][None][None][SEP2][1][2][][SEP3][2][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_ADD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD_WITH_WITHITEM_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_NAME_STORE_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def add_coverage(self, qname, sname, qcover, scover=None):
"""Add percentage coverage values to self.alignment_coverage."""
self.alignment_coverage.loc[qname, sname] = qcover
if scover:
self.alignment_coverage.loc[sname, qname] = scover | [5][SEP1][If][None][SEP2][1][][SEP3][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_STORE_NAME_LOAD_IF_NAME_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_STORE_NAME_LOAD |
def run_dependency_graph(jobgraph, logger=None, jgprefix="ANIm_SGE_JG",
sgegroupsize=10000, sgeargs=None):
"""Creates and runs GridEngine scripts for jobs based on the passed
jobgraph.
- jobgraph - list of jobs, which may have dependencies.
- verbose - flag for multiprocessing verbosity
- logger - a logger module logger (optional)
- jgprefix - a prefix for the submitted jobs, in the scheduler
- sgegroupsize - the maximum size for an array job submission
- sgeargs - additional arguments to qsub
The strategy here is to loop over each job in the list of jobs (jobgraph),
and create/populate a series of Sets of commands, to be run in
reverse order with multiprocessing_run as asynchronous pools.
The strategy here is to loop over each job in the dependency graph, and
add the job to a new list of jobs, swapping out the Job dependency for
the name of the Job on which it depends.
"""
joblist = build_joblist(jobgraph)
# Try to be informative by telling the user what jobs will run
dep_count = 0 # how many dependencies are there
if logger:
logger.info("Jobs to run with scheduler")
for job in joblist:
logger.info("{0}: {1}".format(job.name, job.command))
if len(job.dependencies):
dep_count += len(job.dependencies)
for dep in job.dependencies:
logger.info("\t[^ depends on: %s]" % dep.name)
logger.info("There are %d job dependencies" % dep_count)
# If there are no job dependencies, we can use an array (or series of
# arrays) to schedule our jobs. This cuts down on problems with long
# job lists choking up the queue.
if dep_count == 0:
logger.info("Compiling jobs into JobGroups")
joblist = compile_jobgroups_from_joblist(joblist, jgprefix,
sgegroupsize)
# Send jobs to scheduler
logger.info("Running jobs with scheduler...")
logger.info("Jobs passed to scheduler in order:")
for job in joblist:
logger.info("\t%s" % job.name)
build_and_submit_jobs(os.curdir, joblist, sgeargs)
logger.info("Waiting for SGE-submitted jobs to finish (polling)")
for job in joblist:
job.wait() | [5][SEP1][If][None][If][For][None][None][If][For][None][None][None][For][For][None][None][SEP2][1,2][3][4,5][6,2][5][7][8,3][9,10][11][7][12][13,3][14][11][12][SEP3][6][1][1][0][2][2][4][0][1][1][2][0][0][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_AUGASSIGN_NAME_STORE_ADD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_NAME_LOAD_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def path(self):
"tuple: The full Datastore path represented by this key."
prefix = ()
if self.parent:
prefix = self.parent.path
if self.id_or_name:
return prefix + (self.kind, self.id_or_name)
return prefix + (self.kind,) | [1][SEP1][If][None][If][Return][Return][SEP2][1,2][2][3,4][][][SEP3][0][0][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_TUPLE_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_BINOP_NAME_LOAD_ADD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RETURN_BINOP_NAME_LOAD_ADD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD |
def colex (listoflists,cnums):
"""
Extracts from listoflists the columns specified in the list 'cnums'
(cnums can be an integer, a sequence of integers, or a string-expression that
corresponds to a slice operation on the variable x ... e.g., 'x[3:]' will colex
columns 3 onward from the listoflists).
Usage: colex (listoflists,cnums)
Returns: a list-of-lists corresponding to the columns from listoflists
specified by cnums, in the order the column numbers appear in cnums
"""
global index
column = 0
if type(cnums) in [ListType,TupleType]: # if multiple columns to get
index = cnums[0]
column = [x[index] for x in listoflists]
for col in cnums[1:]:
index = col
column = abut(column,[x[index] for x in listoflists])
elif type(cnums) == StringType: # if an 'x[3:]' type expr.
evalstring = 'map(lambda x: x'+cnums+', listoflists)'
column = eval(evalstring)
else: # else it's just 1 col to get
index = cnums
column = [x[index] for x in listoflists]
return column | [2][SEP1][If][None][If][For][None][None][None][Return][SEP2][1,2][3][4,5][6,7][7][7][3][][SEP3][2][0][1][0][1][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_GLOBAL_ASSIGN_NAME_STORE_CONSTANT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_IN_LIST_NAME_LOAD_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_LISTCOMP_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_FOR_NAME_STORE_SUBSCRIPT_NAME_LOAD_SLICE_CONSTANT_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_LISTCOMP_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_CONSTANT_ADD_NAME_LOAD_ADD_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_LISTCOMP_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_RETURN_NAME_LOAD |
def lincr(l,cap): # to increment a list up to a max-list of 'cap'
"""
Simulate a counting system from an n-dimensional list.
Usage: lincr(l,cap) l=list to increment, cap=max values for each list pos'n
Returns: next set of values for list l, OR -1 (if overflow)
"""
l[0] = l[0] + 1 # e.g., [0,0,0] --> [2,4,3] (=cap)
for i in range(len(l)):
if l[i] > cap[i] and i < len(l)-1: # if carryover AND not done
l[i] = 0
l[i+1] = l[i+1] + 1
elif l[i] > cap[i] and i == len(l)-1: # overflow past last column, must be finished
l = -1
return l | [2][SEP1][None][For][If][Return][None][If][None][SEP2][1][2,3][4,5][][1][6,1][1][SEP3][0][2][1][0][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_SUBSCRIPT_NAME_LOAD_CONSTANT_STORE_BINOP_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ADD_CONSTANT_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_IF_BOOLOP_AND_COMPARE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_GT_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_COMPARE_NAME_LOAD_LT_BINOP_CALL_NAME_LOAD_NAME_LOAD_SUB_CONSTANT_ASSIGN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_STORE_CONSTANT_ASSIGN_SUBSCRIPT_NAME_LOAD_BINOP_NAME_LOAD_ADD_CONSTANT_STORE_BINOP_SUBSCRIPT_NAME_LOAD_BINOP_NAME_LOAD_ADD_CONSTANT_LOAD_ADD_CONSTANT_IF_BOOLOP_AND_COMPARE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_GT_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_COMPARE_NAME_LOAD_EQ_BINOP_CALL_NAME_LOAD_NAME_LOAD_SUB_CONSTANT_ASSIGN_NAME_STORE_UNARYOP_USUB_CONSTANT_RETURN_NAME_LOAD |
def as_dict( self, key="id" ):
"""
Return a dictionary containing all remaining motifs, using `key`
as the dictionary key.
"""
rval = {}
for motif in self:
rval[ getattr( motif, key ) ] = motif
return rval | [2][SEP1][None][For][None][Return][SEP2][1][2,3][1][][SEP3][0][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_FOR_NAME_STORE_NAME_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_STORE_NAME_LOAD_RETURN_NAME_LOAD |
def transformString( self, instring ):
"""Extension to scanString, to modify matching text with modified tokens that may
be returned from a parse action. To use transformString, define a grammar and
attach a parse action to it that modifies the returned token list.
Invoking transformString() on a target string will then scan for matches,
and replace the matched text patterns according to the logic in the parse
action. transformString() returns the resulting transformed string."""
out = []
lastE = 0
# force preservation of <TAB>s, to minimize unwanted transformation of string, and to
# keep string locs straight between transformString and scanString
self.keepTabs = True
for t,s,e in self.scanString( instring ):
out.append( instring[lastE:s] )
if t:
if isinstance(t,ParseResults):
out += t.asList()
elif isinstance(t,list):
out += t
else:
out.append(t)
lastE = e
out.append(instring[lastE:])
return "".join(map(_ustr,out)) | [2][SEP1][None][For][If][Return][If][None][None][If][None][None][SEP2][1][2,3][4,5][][6,7][1][5][8,9][5][5][SEP3][0][1][1][3][1][0][1][1][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_ASSIGN_NAME_STORE_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_FOR_TUPLE_NAME_STORE_NAME_STORE_NAME_STORE_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_NAME_LOAD_SLICE_NAME_LOAD_NAME_LOAD_LOAD_IF_NAME_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_AUGASSIGN_NAME_STORE_ADD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_AUGASSIGN_NAME_STORE_ADD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_NAME_LOAD_SLICE_NAME_LOAD_LOAD_RETURN_CALL_ATTRIBUTE_CONSTANT_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def new( self, min, max ):
"""Create an empty index for intervals in the range min, max"""
# Ensure the range will fit given the shifting strategy
assert MIN <= min <= max <= MAX
self.min = min
self.max = max
# Determine offsets to use
self.offsets = offsets_for_max_size( max )
# Determine the largest bin we will actually use
self.bin_count = bin_for_range( max - 1, max, offsets = self.offsets ) + 1
# Create empty bins
self.bins = [ [] for i in range( self.bin_count ) ] | [3][SEP1][None][None][SEP2][1][][SEP3][0][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_EXPR_CONSTANT_ASSERT_COMPARE_NAME_LOAD_LTE_LTE_LTE_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_BINOP_CALL_NAME_LOAD_BINOP_NAME_LOAD_SUB_CONSTANT_NAME_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_LISTCOMP_LIST_LOAD_COMPREHENSION_NAME_STORE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def to_stormo_scoring_matrix( self, background=None ):
"""
Create a scoring matrix from this count matrix using the method from:
Hertz, G.Z. and G.D. Stormo (1999). Identifying DNA and protein patterns with statistically
significant alignments of multiple sequences. Bioinformatics 15(7): 563-577.
"""
alphabet_size = len( self.alphabet )
if background is None:
background = ones( alphabet_size, float32 ) / alphabet_size
# Row totals as a one column array
totals = numpy.sum( self.values, 1 )[:,newaxis]
values = log2( self.values + background ) \
- log2( totals + 1 ) - log2( background )
return ScoringMatrix.create_from_other( self, values.astype( float32 ) ) | [2][SEP1][If][None][Return][SEP2][1,2][2][][SEP3][1][1][6] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_BINOP_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_DIV_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_TUPLE_SLICE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_CALL_NAME_LOAD_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_NAME_LOAD_SUB_CALL_NAME_LOAD_BINOP_NAME_LOAD_ADD_CONSTANT_SUB_CALL_NAME_LOAD_NAME_LOAD_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def get_ip_report(self, this_ip, timeout=None):
""" Get IP address reports.
:param this_ip: a valid IPv4 address in dotted quad notation, for the time being only IPv4 addresses are
supported.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'ip': this_ip}
try:
response = requests.get(self.base + 'ip-address/report',
params=params,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response) | [3][SEP1][Try][None][Return][Return][SEP2][1,2][3][][][SEP3][0][1][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_CONSTANT_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_KEYWORD_NAME_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_NAME_LOAD_EXCEPTHANDLER_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_CALL_NAME_LOAD_KEYWORD_CALL_NAME_LOAD_NAME_LOAD_RETURN_CALL_NAME_LOAD_NAME_LOAD |
def web_hooks(self, include_global=True):
"""Get all web hooks for this project. Includes global hooks."""
from fabric_bolt.web_hooks.models import Hook
ors = [Q(project=self)]
if include_global:
ors.append(Q(project=None))
hooks = Hook.objects.filter(reduce(operator.or_, ors))
return hooks | [2][SEP1][If][None][Return][SEP2][1,2][2][][SEP3][1][2][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_IMPORTFROM_ALIAS_ASSIGN_NAME_STORE_LIST_CALL_NAME_LOAD_KEYWORD_NAME_LOAD_LOAD_IF_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_KEYWORD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def form_valid(self, form):
"""After the form is valid lets let people know"""
ret = super(ProjectCopy, self).form_valid(form)
self.copy_relations()
# Good to make note of that
messages.add_message(self.request, messages.SUCCESS, 'Project %s copied' % self.object.name)
return ret | [2][SEP1][Return][SEP2][][SEP3][4] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RETURN_NAME_LOAD |
def read_all(self, user=None):
"""
Marks all notifications as read for a user (if supplied)
:param user: Notification recipient.
:return: Updates QuerySet as read.
"""
qs = self.unread()
if user:
qs = qs.filter(recipient=user)
qs.update(read=True) | [2][SEP1][If][None][None][SEP2][1,2][2][][SEP3][1][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_CONSTANT |
def fetch_attributes(self, event, channel):
"""Returns attribute list for a given event/channel."""
try:
for sensor in self.event_states[event]:
if sensor[1] == int(channel):
return sensor
except KeyError:
return None | [3][SEP1][Try][For][Return][If][Return][SEP2][1,2][3][][4,1][][SEP3][0][0][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_EXPR_CONSTANT_TRY_FOR_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_IF_COMPARE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_EQ_CALL_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_RETURN_CONSTANT |
def parse_record(self, raw, indx=0):
"""Parse raw data (that is retrieved by "request") and return pandas.DataFrame.
Returns tuple (data, metadata)
data - pandas.DataFrame with retrieved data.
metadata - pandas.DataFrame with info about symbol, currency, frequency,
displayname and status of given request
"""
suffix = '' if indx == 0 else '_%i' % (indx + 1)
# Parsing status
status = self.status(raw)
# Testing if no errors
if status['StatusType'] != 'Connected':
if self.raise_on_error:
raise DatastreamException('%s (error %i): %s --> "%s"' %
(status['StatusType'], status['StatusCode'],
status['StatusMessage'], status['Request']))
else:
self._test_status_and_warn()
return pd.DataFrame(), {}
record = self.extract_data(raw)
get_field = lambda fldname: record[fldname + suffix]
try:
error = get_field('INSTERROR')
if self.raise_on_error:
raise DatastreamException('Error: %s --> "%s"' %
(error, status['Request']))
else:
self.last_status['StatusMessage'] = error
self.last_status['StatusType'] = 'INSTERROR'
self._test_status_and_warn()
metadata = {'Frequency': '', 'Currency': '', 'DisplayName': '',
'Symbol': '', 'Status': error}
except KeyError:
# Parsing metadata of the symbol
# NB! currency might be returned as symbol thus "unicode" should be used
metadata = {'Frequency': ustr(get_field('FREQUENCY')),
'Currency': ustr(get_field('CCY')),
'DisplayName': ustr(get_field('DISPNAME')),
'Symbol': ustr(get_field('SYMBOL')),
'Status': 'OK'}
# Fields with data
if suffix == '':
fields = [ustr(x) for x in record if '_' not in x]
else:
fields = [ustr(x) for x in record if suffix in x]
# Filter metadata
meta_fields = ['CCY', 'DISPNAME', 'FREQUENCY', 'SYMBOL', 'DATE', 'INSTERROR']
fields = [x.replace(suffix, '') for x in fields
if not any([y in x for y in meta_fields])]
if 'DATE' + suffix in record:
date = record['DATE' + suffix]
elif 'DATE' in record:
date = record['DATE']
else:
date = None
if len(fields) > 0 and date is not None:
# Check if we have a single value or a series
if isinstance(date, dt.datetime):
data = pd.DataFrame({x: [get_field(x)] for x in fields},
index=[date])
else:
data = pd.DataFrame({x: get_field(x)[0] for x in fields},
index=date[0])
else:
data = pd.DataFrame()
metadata = pd.DataFrame(metadata, index=[indx])
metadata = metadata[['Symbol', 'DisplayName', 'Currency', 'Frequency', 'Status']]
return data, metadata | [3][SEP1][If][If][Try][None][Return][If][None][None][None][If][None][None][If][None][If][If][None][None][If][None][None][None][Return][SEP2][1,2][3,4][5,6][][][7,8][9][][9][10,11][12][12][13,14][15][16,17][18,19][15][15][20,21][22][22][22][][SEP3][1][0][1][1][2][1][8][1][1][0][1][1][2][0][0][1][0][0][1][1][2][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_IFEXP_COMPARE_NAME_LOAD_EQ_CONSTANT_CONSTANT_BINOP_CONSTANT_MOD_BINOP_NAME_LOAD_ADD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NOTEQ_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_TUPLE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICT_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_LAMBDA_ARGUMENTS_ARG_SUBSCRIPT_NAME_LOAD_BINOP_NAME_LOAD_ADD_NAME_LOAD_LOAD_TRY_ASSIGN_NAME_STORE_CALL_NAME_LOAD_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_STORE_NAME_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_STORE_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CALL_NAME_LOAD_CALL_NAME_LOAD_CONSTANT_CALL_NAME_LOAD_CALL_NAME_LOAD_CONSTANT_CALL_NAME_LOAD_CALL_NAME_LOAD_CONSTANT_CALL_NAME_LOAD_CALL_NAME_LOAD_CONSTANT_CONSTANT_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_LISTCOMP_CALL_NAME_LOAD_NAME_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_COMPARE_CONSTANT_NOTIN_NAME_LOAD_ASSIGN_NAME_STORE_LISTCOMP_CALL_NAME_LOAD_NAME_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_COMPARE_NAME_LOAD_IN_NAME_LOAD_ASSIGN_NAME_STORE_LIST_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_ASSIGN_NAME_STORE_LISTCOMP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_COMPREHENSION_NAME_STORE_NAME_LOAD_UNARYOP_NOT_CALL_NAME_LOAD_LISTCOMP_COMPARE_NAME_LOAD_IN_NAME_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_IF_COMPARE_BINOP_CONSTANT_ADD_NAME_LOAD_IN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_BINOP_CONSTANT_ADD_NAME_LOAD_LOAD_IF_COMPARE_CONSTANT_IN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_BOOLOP_AND_COMPARE_CALL_NAME_LOAD_NAME_LOAD_GT_CONSTANT_COMPARE_NAME_LOAD_ISNOT_CONSTANT_IF_CALL_NAME_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICTCOMP_NAME_LOAD_LIST_CALL_NAME_LOAD_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_KEYWORD_LIST_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICTCOMP_NAME_LOAD_SUBSCRIPT_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_LOAD_COMPREHENSION_NAME_STORE_NAME_LOAD_KEYWORD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_KEYWORD_LIST_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_LIST_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_LOAD_RETURN_TUPLE_NAME_LOAD_NAME_LOAD_LOAD |
def remove(self, child):
"""
Remove the given child from both child list and child indexes
:type child: :class:`Element <hl7apy.core.Element>`
:param child: an instance of :class:`Element <hl7apy.core.Element>` subclass
"""
try:
if self.element == child.traversal_parent:
self._remove_from_traversal_index(child)
else:
self._remove_from_index(child)
self.list.remove(child)
except:
raise | [2][SEP1][Try][If][None][None][None][SEP2][1,2][3,4][][][][SEP3][0][0][0][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_TRY_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXCEPTHANDLER_RAISE |
def prepare(self):
'''Prepare to run the docker command'''
self.__make_scubadir()
if self.is_remote_docker:
'''
Docker is running remotely (e.g. boot2docker on OSX).
We don't need to do any user setup whatsoever.
TODO: For now, remote instances won't have any .scubainit
See:
https://github.com/JonathonReinhart/scuba/issues/17
'''
raise ScubaError('Remote docker not supported (DOCKER_HOST is set)')
# Docker is running natively
self.__setup_native_run()
# Apply environment vars from .scuba.yml
self.env_vars.update(self.context.environment) | [1][SEP1][If][None][None][SEP2][1,2][][][SEP3][1][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CONSTANT_RAISE_CALL_NAME_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD |
def get_country_long(self, ip):
''' Get country_long '''
rec = self.get_all(ip)
return rec and rec.country_long | [2][SEP1][Return][SEP2][][SEP3][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_BOOLOP_AND_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def rates_for_location(self, postal_code, location_deets=None):
"""Shows the sales tax rates for a given location."""
request = self._get("rates/" + postal_code, location_deets)
return self.responder(request) | [3][SEP1][Return][SEP2][][SEP3][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_ADD_NAME_LOAD_NAME_LOAD_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def rubles(amount, zero_for_kopeck=False):
"""Converts float value to in-words representation (for money)"""
try:
res = numeral.rubles(amount, zero_for_kopeck)
except Exception as err:
# because filter must die silently
res = default_value % {'error': err, 'value': str(amount)}
return res | [2][SEP1][Try][None][None][Return][SEP2][1,2][3][3][][SEP3][0][1][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_MOD_DICT_CONSTANT_CONSTANT_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def translify(text):
"""Translify russian text"""
try:
res = translit.translify(smart_text(text, encoding))
except Exception as err:
# because filter must die silently
res = default_value % {'error': err, 'value': text}
return res | [1][SEP1][Try][None][None][Return][SEP2][1,2][3][3][][SEP3][0][2][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_MOD_DICT_CONSTANT_CONSTANT_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def fromargskw(argskw, argspecs, slf_or_clsm = False):
"""Turns a linearized list of args into (args, keywords) form
according to given argspecs (like inspect module provides).
"""
res_args = argskw
try:
kwds = argspecs.keywords
except AttributeError:
kwds = argspecs.varkw
if not kwds is None:
res_kw = argskw[-1]
res_args = argskw[:-1]
else:
res_kw = None
if not argspecs.varargs is None:
vargs_pos = (len(argspecs.args)-1) \
if slf_or_clsm else len(argspecs.args)
if vargs_pos > 0:
res_lst = list(argskw[:vargs_pos])
res_lst.extend(argskw[vargs_pos])
res_args = tuple(res_lst)
else:
res_args = argskw[0]
try:
if len(argspecs.kwonlyargs) > 0:
res_kw = {} if res_kw is None else dict(res_kw)
ipos = -len(argspecs.kwonlyargs) - (0 if kwds is None else 1)
for name in argspecs.kwonlyargs:
res_kw[name] = argskw[ipos]
ipos += 1
except AttributeError:
pass
if res_kw is None:
res_kw = {}
return res_args, res_kw | [3][SEP1][Try][None][None][If][None][None][If][If][Try][None][None][If][None][None][If][For][None][Return][None][SEP2][1,2][3][3][4,5][6][6][7,8][9,10][11,12][8][8][13,14][14][15][16,17][18,14][17][][15][SEP3][0][0][0][0][0][0][0][2][0][3][0][1][0][2][0][0][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_NAME_LOAD_TRY_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_UNARYOP_USUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_SLICE_UNARYOP_USUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_UNARYOP_NOT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_IFEXP_NAME_LOAD_BINOP_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CONSTANT_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_GT_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_SUBSCRIPT_NAME_LOAD_SLICE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_TRY_IF_COMPARE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_GT_CONSTANT_ASSIGN_NAME_STORE_IFEXP_COMPARE_NAME_LOAD_IS_CONSTANT_DICT_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_UNARYOP_USUB_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_SUB_IFEXP_COMPARE_NAME_LOAD_IS_CONSTANT_CONSTANT_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_AUGASSIGN_NAME_STORE_ADD_CONSTANT_EXCEPTHANDLER_NAME_LOAD_PASS_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_DICT_RETURN_TUPLE_NAME_LOAD_NAME_LOAD_LOAD |
def _tp_relfq_name(tp, tp_name=None, assumed_globals=None, update_assumed_globals=None,
implicit_globals=None):
# _type: (type, Optional[Union[Set[Union[type, types.ModuleType]], Mapping[Union[type, types.ModuleType], str]]], Optional[bool]) -> str
"""Provides the fully qualified name of a type relative to a set of
modules and types that is assumed as globally available.
If assumed_globals is None this always returns the fully qualified name.
If update_assumed_globals is True, this will return the plain type name,
but will add the type to assumed_globals (expected to be a set).
This way a caller can query how to generate an appropriate import section.
If update_assumed_globals is False, assumed_globals can alternatively be
a mapping rather than a set. In that case the mapping is expected to be
an alias table, mapping modules or types to their alias names desired for
displaying.
update_assumed_globals can be None (default). In that case this will return the
plain type name if assumed_globals is None as well (default).
This mode is there to have a less involved default behavior.
"""
if tp_name is None:
tp_name = util.get_class_qualname(tp)
if implicit_globals is None:
implicit_globals = _implicit_globals
else:
implicit_globals = implicit_globals.copy()
implicit_globals.update(_implicit_globals)
if assumed_globals is None:
if update_assumed_globals is None:
return tp_name
md = sys.modules[tp.__module__]
if md in implicit_globals:
return tp_name
name = tp.__module__+'.'+tp_name
pck = None
if not (md.__package__ is None or md.__package__ == ''
or name.startswith(md.__package__)):
pck = md.__package__
return name if pck is None else pck+'.'+name
if tp in assumed_globals:
try:
return assumed_globals[tp]
except:
return tp_name
elif hasattr(tp, '__origin__') and tp.__origin__ in assumed_globals:
try:
return assumed_globals[tp.__origin__]
except:
return tp_name
# For some reason Callable does not have __origin__, so we special-case
# it here. Todo: Find a cleaner solution.
elif is_Callable(tp) and typing.Callable in assumed_globals:
try:
return assumed_globals[typing.Callable]
except:
return tp_name
elif update_assumed_globals == True:
if not assumed_globals is None:
if hasattr(tp, '__origin__') and not tp.__origin__ is None:
toadd = tp.__origin__
elif is_Callable(tp):
toadd = typing.Callable
else:
toadd = tp
if not sys.modules[toadd.__module__] in implicit_globals:
assumed_globals.add(toadd)
return tp_name
else:
md = sys.modules[tp.__module__]
if md in implicit_globals:
return tp_name
md_name = tp.__module__
if md in assumed_globals:
try:
md_name = assumed_globals[md]
except:
pass
else:
if not (md.__package__ is None or md.__package__ == ''
or md_name.startswith(md.__package__)):
md_name = md.__package__+'.'+tp.__module__
return md_name+'.'+tp_name | [5][SEP1][If][None][If][None][None][If][If][If][Return][If][Try][If][Return][If][Return][Return][Try][If][None][Return][Return][Return][Try][If][Return][Return][If][If][If][Return][Return][If][None][If][Try][If][If][None][None][None][None][None][Return][None][SEP2][1,2][2][3,4][5][5][6,7][8,9][10,11][][12,13][14,15][16,17][][18,19][][][20,21][22,23][19][][][][24,25][26,27][][][28,29][30,31][32,33][][][34,35][36][37,38][39,40][41,42][43,29][36][36][42][42][42][][29][SEP3][0][1][0][0][2][1][0][0][0][0][0][1][0][1][0][0][0][1][0][0][0][0][0][1][0][0][1][0][1][0][0][0][0][1][0][1][0][0][0][0][0][0][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_IF_COMPARE_NAME_LOAD_IS_CONSTANT_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_ADD_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_UNARYOP_NOT_BOOLOP_OR_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_IFEXP_COMPARE_NAME_LOAD_IS_CONSTANT_NAME_LOAD_BINOP_BINOP_NAME_LOAD_ADD_CONSTANT_ADD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_TRY_RETURN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_EXCEPTHANDLER_RETURN_NAME_LOAD_IF_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_NAME_LOAD_TRY_RETURN_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXCEPTHANDLER_RETURN_NAME_LOAD_IF_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_NAME_LOAD_TRY_RETURN_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXCEPTHANDLER_RETURN_NAME_LOAD_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_IF_UNARYOP_NOT_COMPARE_NAME_LOAD_IS_CONSTANT_IF_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_UNARYOP_NOT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_IF_UNARYOP_NOT_COMPARE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IN_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_TRY_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_EXCEPTHANDLER_PASS_IF_UNARYOP_NOT_BOOLOP_OR_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_ADD_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_BINOP_BINOP_NAME_LOAD_ADD_CONSTANT_ADD_NAME_LOAD |
def combine_type(observations):
"""Combines a list of types into one.
Basically these are combined into a Union with some
additional unification effort (e.g. can apply PEP 484 style numeric tower).
"""
assert len(observations) > 0
if len(observations) == 1:
return observations[0]
else:
if simplify:
simplify_for_Union(observations)
return Union[tuple(observations)] | [1][SEP1][None][If][Return][If][None][Return][SEP2][1][2,3][][4,5][5][][SEP3][1][2][0][0][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSERT_COMPARE_CALL_NAME_LOAD_NAME_LOAD_GT_CONSTANT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_CONSTANT_RETURN_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_NAME_LOAD_EXPR_CALL_NAME_LOAD_NAME_LOAD_RETURN_SUBSCRIPT_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_LOAD |
def is_no_type_check(memb):
"""Checks if an object was annotated with @no_type_check
(from typing or pytypes.typechecker).
"""
try:
return hasattr(memb, '__no_type_check__') and memb.__no_type_check__ or \
memb in _not_type_checked
except TypeError:
return False | [1][SEP1][Try][Return][Return][SEP2][1,2][][][SEP3][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_RETURN_BOOLOP_OR_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_COMPARE_NAME_LOAD_IN_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_RETURN_CONSTANT |
def import_parallel_gateway_to_graph(diagram_graph, process_id, process_attributes, element):
"""
Adds to graph the new element that represents BPMN parallel gateway.
Parallel gateway doesn't have additional attributes. Separate method is used to improve code readability.
:param diagram_graph: NetworkX graph representing a BPMN process diagram,
:param process_id: string object, representing an ID of process element,
:param process_attributes: dictionary that holds attribute values of 'process' element, which is parent of
imported flow node,
:param element: object representing a BPMN XML 'parallelGateway'.
"""
BpmnDiagramGraphImport.import_gateway_to_graph(diagram_graph, process_id, process_attributes, element) | [4][SEP1][None][SEP2][][SEP3][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def export_child_lane_set(parent_xml_element, child_lane_set, plane_element):
"""
Creates 'childLaneSet' element for exported BPMN XML file.
:param parent_xml_element: an XML element, parent of exported 'childLaneSet' element,
:param child_lane_set: dictionary with exported 'childLaneSet' element attributes and child elements,
:param plane_element: XML object, representing 'plane' element of exported BPMN 2.0 XML.
"""
lane_set_xml = eTree.SubElement(parent_xml_element, consts.Consts.lane_set)
for key, value in child_lane_set[consts.Consts.lanes].items():
BpmnDiagramGraphExport.export_lane(lane_set_xml, key, value, plane_element) | [3][SEP1][None][For][None][SEP2][1][2][1][SEP3][1][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_ATTRIBUTE_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def export_element(bpmn_graph, export_elements, node, nodes_classification, order=0, prefix="", condition="",
who="", add_join=False):
"""
Export a node with "Element" classification (task, subprocess or gateway)
:param bpmn_graph: an instance of BpmnDiagramGraph class,
:param export_elements: a dictionary object. The key is a node ID, value is a dictionary of parameters that
will be used in exported CSV document,
:param node: networkx.Node object,
:param nodes_classification: dictionary of classification labels. Key - node id. Value - a list of labels,
:param order: the order param of exported node,
:param prefix: the prefix of exported node - if the task appears after some gateway, the prefix will identify
the branch
:param condition: the condition param of exported node,
:param who: the condition param of exported node,
:param add_join: boolean flag. Used to indicate if "Join" element should be added to CSV.
:return: None or the next node object if the exported node was a gateway join.
"""
node_type = node[1][consts.Consts.type]
node_classification = nodes_classification[node[0]]
outgoing_flows = node[1].get(consts.Consts.outgoing_flow)
if node_type != consts.Consts.parallel_gateway and consts.Consts.default in node[1] \
and node[1][consts.Consts.default] is not None:
default_flow_id = node[1][consts.Consts.default]
else:
default_flow_id = None
if BpmnDiagramGraphCsvExport.classification_join in node_classification and not add_join:
# If the node is a join, then retract the recursion back to the split.
# In case of activity - return current node. In case of gateway - return outgoing node
# (we are making assumption that join has only one outgoing node)
if node_type == consts.Consts.task or node_type == consts.Consts.subprocess:
return node
else:
outgoing_flow_id = outgoing_flows[0]
outgoing_flow = bpmn_graph.get_flow_by_id(outgoing_flow_id)
outgoing_node = bpmn_graph.get_node_by_id(outgoing_flow[2][consts.Consts.target_ref])
return outgoing_node
else:
if node_type == consts.Consts.task:
export_elements.append({"Order": prefix + str(order), "Activity": node[1][consts.Consts.node_name],
"Condition": condition, "Who": who, "Subprocess": "", "Terminated": ""})
elif node_type == consts.Consts.subprocess:
export_elements.append({"Order": prefix + str(order), "Activity": node[1][consts.Consts.node_name],
"Condition": condition, "Who": who, "Subprocess": "yes", "Terminated": ""})
if BpmnDiagramGraphCsvExport.classification_split in node_classification:
next_node = None
alphabet_suffix_index = 0
for outgoing_flow_id in outgoing_flows:
outgoing_flow = bpmn_graph.get_flow_by_id(outgoing_flow_id)
outgoing_node = bpmn_graph.get_node_by_id(outgoing_flow[2][consts.Consts.target_ref])
# This will work only up to 26 outgoing flows
suffix = string.ascii_lowercase[alphabet_suffix_index]
next_prefix = prefix + str(order) + suffix
alphabet_suffix_index += 1
# parallel gateway does not uses conditions
if node_type != consts.Consts.parallel_gateway and consts.Consts.name in outgoing_flow[2] \
and outgoing_flow[2][consts.Consts.name] is not None:
condition = outgoing_flow[2][consts.Consts.name]
else:
condition = ""
if BpmnDiagramGraphCsvExport.classification_join in nodes_classification[outgoing_node[0]]:
export_elements.append(
{"Order": next_prefix + str(1), "Activity": "goto " + prefix + str(order + 1),
"Condition": condition, "Who": who, "Subprocess": "", "Terminated": ""})
elif outgoing_flow_id == default_flow_id:
tmp_next_node = BpmnDiagramGraphCsvExport.export_node(bpmn_graph, export_elements, outgoing_node,
nodes_classification, 1, next_prefix, "else",
who)
if tmp_next_node is not None:
next_node = tmp_next_node
else:
tmp_next_node = BpmnDiagramGraphCsvExport.export_node(bpmn_graph, export_elements, outgoing_node,
nodes_classification, 1, next_prefix,
condition, who)
if tmp_next_node is not None:
next_node = tmp_next_node
if next_node is not None:
return BpmnDiagramGraphCsvExport.export_node(bpmn_graph, export_elements, next_node,
nodes_classification, order=(order + 1), prefix=prefix,
who=who, add_join=True)
elif len(outgoing_flows) == 1:
outgoing_flow_id = outgoing_flows[0]
outgoing_flow = bpmn_graph.get_flow_by_id(outgoing_flow_id)
outgoing_node = bpmn_graph.get_node_by_id(outgoing_flow[2][consts.Consts.target_ref])
return BpmnDiagramGraphCsvExport.export_node(bpmn_graph, export_elements, outgoing_node,
nodes_classification, order=(order + 1), prefix=prefix,
who=who)
else:
return None | [9][SEP1][If][None][None][If][If][If][Return][Return][None][If][If][None][None][If][For][Return][Return][If][If][None][None][Return][If][None][If][If][If][None][None][SEP2][1,2][3][3][4,5][6,7][8,9][][][10][11,10][12,13][10][14][15,16][17,18][][][19,20][21][22][22][][23,24][14][25,26][27,14][28,14][14][14][SEP3][1][0][0][0][0][0][0][2][2][0][9][2][0][1][5][3][0][3][0][0][0][1][0][3][0][1][1][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_BOOLOP_AND_COMPARE_NAME_LOAD_NOTEQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_COMPARE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IN_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_COMPARE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_NAME_LOAD_UNARYOP_NOT_NAME_LOAD_IF_BOOLOP_OR_COMPARE_NAME_LOAD_EQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_COMPARE_NAME_LOAD_EQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_RETURN_NAME_LOAD_IF_COMPARE_NAME_LOAD_EQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_BINOP_NAME_LOAD_ADD_CALL_NAME_LOAD_NAME_LOAD_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_IF_COMPARE_NAME_LOAD_EQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_BINOP_NAME_LOAD_ADD_CALL_NAME_LOAD_NAME_LOAD_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_ASSIGN_NAME_STORE_CONSTANT_FOR_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_NAME_LOAD_ADD_CALL_NAME_LOAD_NAME_LOAD_ADD_NAME_LOAD_AUGASSIGN_NAME_STORE_ADD_CONSTANT_IF_BOOLOP_AND_COMPARE_NAME_LOAD_NOTEQ_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_COMPARE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IN_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_COMPARE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_SUBSCRIPT_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_BINOP_NAME_LOAD_ADD_CALL_NAME_LOAD_CONSTANT_BINOP_BINOP_CONSTANT_ADD_NAME_LOAD_ADD_CALL_NAME_LOAD_BINOP_NAME_LOAD_ADD_CONSTANT_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_IF_COMPARE_NAME_LOAD_EQ_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_NAME_LOAD_CONSTANT_NAME_LOAD_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_NAME_LOAD_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_KEYWORD_BINOP_NAME_LOAD_ADD_CONSTANT_KEYWORD_NAME_LOAD_KEYWORD_NAME_LOAD_KEYWORD_CONSTANT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_KEYWORD_BINOP_NAME_LOAD_ADD_CONSTANT_KEYWORD_NAME_LOAD_KEYWORD_NAME_LOAD_RETURN_CONSTANT |
def set_process_ref(self, value):
"""
Setter for 'process_ref' field.
:param value - a new value of 'process_ref' field. Must be either None (process_ref is optional according to
BPMN 2.0 XML Schema) or String.
"""
if not isinstance(value, str):
raise TypeError("ProcessRef must be set to a String")
self.__process_ref = value | [2][SEP1][If][None][None][SEP2][1,2][][][SEP3][1][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD |
def add_exclusive_gateway_to_diagram(self, process_id, gateway_name="", gateway_direction="Unspecified",
default=None, node_id=None):
"""
Adds an exclusiveGateway element to BPMN diagram.
:param process_id: string object. ID of parent process,
:param gateway_name: string object. Name of exclusive gateway,
:param gateway_direction: string object. Accepted values - "Unspecified", "Converging", "Diverging", "Mixed".
Default value - "Unspecified".
:param default: string object. ID of flow node, target of gateway default path. Default value - None,
:param node_id: string object. ID of node. Default value - None.
:return: a tuple, where first value is exculusiveGateway ID, second a reference to created object.
"""
exclusive_gateway_id, exclusive_gateway = self.add_gateway_to_diagram(process_id,
consts.Consts.exclusive_gateway,
gateway_name=gateway_name,
gateway_direction=gateway_direction,
node_id=node_id)
self.diagram_graph.node[exclusive_gateway_id][consts.Consts.default] = default
return exclusive_gateway_id, exclusive_gateway | [6][SEP1][Return][SEP2][][SEP3][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_ASSIGN_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_KEYWORD_NAME_LOAD_KEYWORD_NAME_LOAD_KEYWORD_NAME_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_STORE_NAME_LOAD_RETURN_TUPLE_NAME_LOAD_NAME_LOAD_LOAD |
def _check_random_state(random_state):
"""Checks and processes user input for seeding random numbers.
Parameters
----------
random_state : int, RandomState instance or None
If int, a RandomState instance is created with this integer seed.
If RandomState instance, random_state is returned;
If None, a RandomState instance is created with arbitrary seed.
Returns
-------
scipy.random.RandomState instance
Raises
------
TypeError
If ``random_state`` is not appropriately set.
"""
if random_state is None or isinstance(random_state, int):
return sci.random.RandomState(random_state)
elif isinstance(random_state, sci.random.RandomState):
return random_state
else:
raise TypeError('Seed should be None, int or np.random.RandomState') | [1][SEP1][If][Return][If][Return][None][SEP2][1,2][][3,4][][][SEP3][1][1][1][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_BOOLOP_OR_COMPARE_NAME_LOAD_IS_CONSTANT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_RETURN_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RETURN_NAME_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT |
def check_value(self, value):
"""Check the validity of a value for the field."""
#if self.readonly:
# raise error.Error(
# "'{field_name}' field is readonly".format(
# field_name=self.name))
if value and self.size:
if not is_string(value):
raise ValueError("Value supplied has to be a string")
if len(value) > self.size:
raise ValueError(
"Lenght of the '{0}' is limited to {1}".format(
self.name, self.size))
if not value and self.required:
raise ValueError("'{0}' field is required".format(self.name))
return value | [2][SEP1][If][If][If][None][If][None][Return][None][SEP2][1,2][3,4][5,6][][7,2][][][][SEP3][3][1][0][1][1][2][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_BOOLOP_AND_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_GT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_AND_UNARYOP_NOT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_NAME_LOAD |
def get_msi_token_webapp(resource):
"""Get a MSI token from inside a webapp or functions.
Env variable will look like:
- MSI_ENDPOINT = http://127.0.0.1:41741/MSI/token/
- MSI_SECRET = 69418689F1E342DD946CB82994CDA3CB
"""
try:
msi_endpoint = os.environ['MSI_ENDPOINT']
msi_secret = os.environ['MSI_SECRET']
except KeyError as err:
err_msg = "{} required env variable was not found. You might need to restart your app/function.".format(err)
_LOGGER.critical(err_msg)
raise RuntimeError(err_msg)
request_uri = '{}/?resource={}&api-version=2017-09-01'.format(msi_endpoint, resource)
headers = {
'secret': msi_secret
}
err = None
try:
result = requests.get(request_uri, headers=headers)
_LOGGER.debug("MSI: Retrieving a token from %s", request_uri)
if result.status_code != 200:
err = result.text
# Workaround since not all failures are != 200
if 'ExceptionMessage' in result.text:
err = result.text
except Exception as ex: # pylint: disable=broad-except
err = str(ex)
if err:
err_msg = "MSI: Failed to retrieve a token from '{}' with an error of '{}'.".format(
request_uri, err
)
_LOGGER.critical(err_msg)
raise RuntimeError(err_msg)
_LOGGER.debug('MSI: token retrieved')
token_entry = result.json()
return token_entry['token_type'], token_entry['access_token'], token_entry | [1][SEP1][Try][None][None][Try][If][None][None][If][If][None][None][Return][SEP2][1,2][3][][4,5][6,7][8][7][9,8][10,11][8][][][SEP3][0][0][3][1][2][1][0][0][0][0][3][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_DICT_CONSTANT_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_KEYWORD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_CONSTANT_IN_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_IF_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_TUPLE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD |
def remove_done_callback(self, func):
"""Remove a callback from the long running operation.
:param callable func: The function to be removed from the callbacks.
:raises: ValueError if the long running operation has already
completed.
"""
if self._done is None or self._done.is_set():
raise ValueError("Process is complete.")
self._callbacks = [c for c in self._callbacks if c != func] | [2][SEP1][If][None][None][SEP2][1,2][][][SEP3][1][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_BOOLOP_OR_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_LISTCOMP_NAME_LOAD_COMPREHENSION_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_COMPARE_NAME_LOAD_NOTEQ_NAME_LOAD |
def switch_window(self, window_id: int):
"""
Switches currently active tmux window for given task. 0 is the default window
Args:
window_id: integer id of tmux window to use
"""
# windows are numbered sequentially 0, 1, 2, ...
# create any missing windows and make them point to the same directory
if window_id not in self.tmux_available_window_ids:
for i in range(max(self.tmux_available_window_ids)+1, window_id+1):
self._run_raw(f'tmux new-window -t {self.tmux_session} -d')
tmux_window = self.tmux_session + ':' + str(i)
cmd = shlex.quote(f'cd {self.taskdir}')
tmux_cmd = f'tmux send-keys -t {tmux_window} {cmd} Enter'
self._run_raw(tmux_cmd)
self.tmux_available_window_ids.append(i)
self.tmux_window_id = window_id | [2][SEP1][If][For][None][None][SEP2][1,2][3,2][][1][SEP3][0][2][0][5] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_NAME_LOAD_EXPR_CONSTANT_IF_COMPARE_NAME_LOAD_NOTIN_ATTRIBUTE_NAME_LOAD_LOAD_FOR_NAME_STORE_CALL_NAME_LOAD_BINOP_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_BINOP_NAME_LOAD_ADD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_JOINEDSTR_CONSTANT_FORMATTEDVALUE_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_ADD_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_JOINEDSTR_CONSTANT_FORMATTEDVALUE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_JOINEDSTR_CONSTANT_FORMATTEDVALUE_NAME_LOAD_CONSTANT_FORMATTEDVALUE_NAME_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD |
def validate_aws_name(name):
"""Validate resource name using AWS name restrictions from # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions"""
assert len(name) <= 127
# disallow unicode characters to avoid pain
assert name == name.encode('ascii').decode('ascii')
assert aws_name_regexp.match(name) | [1][SEP1][None][None][None][SEP2][1][2][][SEP3][1][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSERT_COMPARE_CALL_NAME_LOAD_NAME_LOAD_LTE_CONSTANT_ASSERT_COMPARE_NAME_LOAD_EQ_CALL_ATTRIBUTE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_CONSTANT_ASSERT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def pop_ctx():
"""Removes the test context(s) from the current stack(s)
"""
if getattr(_request_ctx_stack.top, 'fixtures_request_context', False):
_request_ctx_stack.pop()
if _app_ctx_stack is not None and getattr(_app_ctx_stack.top, 'fixtures_app_context', False):
_app_ctx_stack.pop() | [0][SEP1][If][None][If][None][SEP2][1,2][2][3][][SEP3][1][1][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_EXPR_CONSTANT_IF_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def parse(self, ddl=None, source_database=None):
"""
Parse DDL script.
:param ddl: DDL script
:return: DdlParseTable, Parsed table define info.
"""
if ddl is not None:
self._ddl = ddl
if source_database is not None:
self.source_database = source_database
if self._ddl is None:
raise ValueError("DDL is not specified")
ret = self._DDL_PARSE_EXPR.parseString(self._ddl)
# print(ret.dump())
if "schema" in ret:
self._table.schema = ret["schema"]
self._table.name = ret["table"]
self._table.is_temp = True if "temp" in ret else False
for ret_col in ret["columns"]:
if ret_col.getName() == "column":
# add column
col = self._table.columns.append(
column_name=ret_col["name"],
data_type_array=ret_col["type"],
array_brackets=ret_col['array_brackets'] if "array_brackets" in ret_col else None)
if "constraint" in ret_col:
col.constraint = ret_col["constraint"]
elif ret_col.getName() == "constraint":
# set column constraint
for col_name in ret_col["constraint_columns"]:
col = self._table.columns[col_name]
if ret_col["type"] == "PRIMARY KEY":
col.not_null = True
col.primary_key = True
elif ret_col["type"] in ["UNIQUE", "UNIQUE KEY"]:
col.unique = True
elif ret_col["type"] == "NOT NULL":
col.not_null = True
return self._table | [3][SEP1][If][None][If][None][If][None][If][None][None][For][If][Return][If][If][None][For][If][None][If][None][If][None][SEP2][1,2][2][3,4][4][5,6][][7,8][8][9][10,11][12,13][][14,9][15,9][9][16,9][17,18][15][19,20][15][21,15][15][SEP3][0][0][0][0][0][1][1][0][0][0][1][0][1][1][0][0][0][0][0][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_CONSTANT_EXPR_CONSTANT_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IS_CONSTANT_RAISE_CALL_NAME_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_CONSTANT_IN_NAME_LOAD_ASSIGN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_STORE_IFEXP_COMPARE_CONSTANT_IN_NAME_LOAD_CONSTANT_CONSTANT_FOR_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_COMPARE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_KEYWORD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_KEYWORD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_KEYWORD_IFEXP_COMPARE_CONSTANT_IN_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_CONSTANT_IF_COMPARE_CONSTANT_IN_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_COMPARE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_FOR_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_LOAD_IF_COMPARE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_EQ_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_IF_COMPARE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IN_LIST_CONSTANT_CONSTANT_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_IF_COMPARE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_EQ_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_RETURN_ATTRIBUTE_NAME_LOAD_LOAD |
def _checks(self, do_checks=False, do_actions=False, poller_tags=None,
reactionner_tags=None, worker_name='none', module_types=None):
"""Get checks from scheduler, used by poller or reactionner when they are
in active mode (passive = False)
This function is not intended for external use. Let the poller and reactionner
manage all this stuff by themselves ;)
:param do_checks: used for poller to get checks
:type do_checks: bool
:param do_actions: used for reactionner to get actions
:type do_actions: bool
:param poller_tags: poller tags to filter on this poller
:type poller_tags: list
:param reactionner_tags: reactionner tags to filter on this reactionner
:type reactionner_tags: list
:param worker_name: Worker name asking (so that the scheduler add it to actions objects)
:type worker_name: str
:param module_types: Module type to filter actions/checks
:type module_types: list
:return: serialized check/action list
:rtype: str
"""
if poller_tags is None:
poller_tags = ['None']
if reactionner_tags is None:
reactionner_tags = ['None']
if module_types is None:
module_types = ['fork']
do_checks = (do_checks == 'True')
do_actions = (do_actions == 'True')
res = self.app.sched.get_to_run_checks(do_checks, do_actions, poller_tags, reactionner_tags,
worker_name, module_types)
return serialize(res, True) | [7][SEP1][If][None][If][None][If][None][Return][SEP2][1,2][2][3,4][4][5,6][6][][SEP3][0][0][0][0][0][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_LIST_CONSTANT_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_LIST_CONSTANT_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_LIST_CONSTANT_LOAD_ASSIGN_NAME_STORE_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_RETURN_CALL_NAME_LOAD_NAME_LOAD_CONSTANT |
def get_next_invalid_time_from_t(self, timestamp):
"""Get next invalid time for time range
:param timestamp: time we compute from
:type timestamp: int
:return: timestamp of the next invalid time (LOCAL TIME)
:rtype: int
"""
if not self.is_time_valid(timestamp):
return timestamp
# First we search for the day of time range
t_day = self.get_next_invalid_day(timestamp)
# We search for the min of all tr.start > sec_from_morning
# if it's the next day, use a start of the day search for timerange
if timestamp < t_day:
sec_from_morning = self.get_next_future_timerange_invalid(t_day)
else: # it is in this day, so look from t (can be in the evening or so)
sec_from_morning = self.get_next_future_timerange_invalid(timestamp)
# tr can't be valid, or it will be return at the beginning
# sec_from_morning = self.get_next_future_timerange_invalid(t)
# Ok we've got a next invalid day and a invalid possibility in
# timerange, so the next invalid is this day+sec_from_morning
if t_day is not None and sec_from_morning is not None:
return t_day + sec_from_morning + 1
# We've got a day but no sec_from_morning: the timerange is full (0->24h)
# so the next invalid is this day at the day_start
if t_day is not None and sec_from_morning is None:
return t_day
# Then we search for the next day of t
# The sec will be the min of the day
timestamp = get_day(timestamp) + 86400
t_day2 = self.get_next_invalid_day(timestamp)
sec_from_morning = self.get_next_future_timerange_invalid(t_day2)
if t_day2 is not None and sec_from_morning is not None:
return t_day2 + sec_from_morning + 1
if t_day2 is not None and sec_from_morning is None:
return t_day2
# I did not found any valid time
return None | [2][SEP1][If][Return][If][None][None][If][Return][If][Return][If][Return][If][Return][Return][SEP2][1,2][][3,4][5][5][6,7][][8,9][][10,11][][12,13][][][SEP3][1][0][1][1][1][0][0][0][0][3][0][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_LT_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_COMPARE_NAME_LOAD_ISNOT_CONSTANT_RETURN_BINOP_BINOP_NAME_LOAD_ADD_NAME_LOAD_ADD_CONSTANT_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_COMPARE_NAME_LOAD_IS_CONSTANT_RETURN_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_CALL_NAME_LOAD_NAME_LOAD_ADD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_COMPARE_NAME_LOAD_ISNOT_CONSTANT_RETURN_BINOP_BINOP_NAME_LOAD_ADD_NAME_LOAD_ADD_CONSTANT_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_COMPARE_NAME_LOAD_IS_CONSTANT_RETURN_NAME_LOAD_RETURN_CONSTANT |
def change_custom_contact_var(self, contact, varname, varvalue):
"""Change custom contact variable
Format of the line that triggers function call::
CHANGE_CUSTOM_CONTACT_VAR;<contact_name>;<varname>;<varvalue>
:param contact: contact to edit
:type contact: alignak.objects.contact.Contact
:param varname: variable name to change
:type varname: str
:param varvalue: variable new value
:type varvalue: str
:return: None
"""
if varname.upper() in contact.customs:
contact.modified_attributes |= DICT_MODATTR["MODATTR_CUSTOM_VARIABLE"].value
contact.customs[varname.upper()] = varvalue
self.send_an_element(contact.get_update_status_brok()) | [4][SEP1][If][None][SEP2][1][][SEP3][1][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_EXPR_CONSTANT_IF_COMPARE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IN_ATTRIBUTE_NAME_LOAD_LOAD_AUGASSIGN_ATTRIBUTE_NAME_LOAD_STORE_BITOR_ATTRIBUTE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def delay_svc_notification(self, service, notification_time):
"""Modify service first notification delay
Format of the line that triggers function call::
DELAY_SVC_NOTIFICATION;<host_name>;<service_description>;<notification_time>
:param service: service to edit
:type service: alignak.objects.service.Service
:param notification_time: new value to set
:type notification_time:
:return: None
"""
service.first_notification_delay = notification_time
self.send_an_element(service.get_update_status_brok()) | [3][SEP1][None][SEP2][][SEP3][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def disable_hostgroup_svc_checks(self, hostgroup):
"""Disable service checks for a hostgroup
Format of the line that triggers function call::
DISABLE_HOSTGROUP_SVC_CHECKS;<hostgroup_name>
:param hostgroup: hostgroup to disable
:type hostgroup: alignak.objects.hostgroup.Hostgroup
:return: None
"""
for host_id in hostgroup.get_hosts():
if host_id in self.daemon.hosts:
for service_id in self.daemon.hosts[host_id].services:
if service_id in self.daemon.services:
self.disable_svc_check(self.daemon.services[service_id]) | [2][SEP1][None][For][If][For][If][None][SEP2][1][2][3,1][4,1][5,3][3][SEP3][0][1][0][0][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_FOR_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_LOAD |
def disable_service_flap_detection(self, service):
"""Disable flap detection for a service
Format of the line that triggers function call::
DISABLE_SERVICE_FLAP_DETECTION;<host_name>;<service_description>
:param service: service to edit
:type service: alignak.objects.service.Service
:return: None
"""
if service.flap_detection_enabled:
service.modified_attributes |= DICT_MODATTR["MODATTR_FLAP_DETECTION_ENABLED"].value
service.flap_detection_enabled = False
# Maybe the service was flapping, if so, stop flapping
if service.is_flapping:
service.is_flapping = False
service.flapping_changes = []
self.send_an_element(service.get_update_status_brok()) | [2][SEP1][If][If][None][None][SEP2][1][2,3][3][][SEP3][2][0][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_AUGASSIGN_ATTRIBUTE_NAME_LOAD_STORE_BITOR_ATTRIBUTE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def enable_host_freshness_checks(self):
"""Enable freshness checks (globally)
Format of the line that triggers function call::
ENABLE_HOST_FRESHNESS_CHECKS
:return: None
"""
if not self.my_conf.check_host_freshness:
self.my_conf.modified_attributes |= \
DICT_MODATTR["MODATTR_FRESHNESS_CHECKS_ENABLED"].value
self.my_conf.check_host_freshness = True
self.my_conf.explode_global_conf()
self.daemon.update_program_status() | [1][SEP1][If][None][SEP2][1][][SEP3][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_AUGASSIGN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_STORE_BITOR_ATTRIBUTE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_ASSIGN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_STORE_CONSTANT_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD |
def process_host_check_result(self, host, status_code, plugin_output):
"""Process host check result
Format of the line that triggers function call::
PROCESS_HOST_CHECK_RESULT;<host_name>;<status_code>;<plugin_output>
:param host: host to process check to
:type host: alignak.objects.host.Host
:param status_code: exit code of plugin
:type status_code: int
:param plugin_output: plugin output
:type plugin_output: str
:return: None
TODO: say that check is PASSIVE
"""
now = time.time()
cls = host.__class__
# If globally disabled OR host disabled, do not launch..
if not cls.accept_passive_checks or not host.passive_checks_enabled:
return
try:
plugin_output = plugin_output.decode('utf8', 'ignore')
logger.debug('%s > Passive host check plugin output: %s',
host.get_full_name(), plugin_output)
except AttributeError:
# Python 3 will raise an exception
pass
except UnicodeError:
pass
# Maybe the check is just too old, if so, bail out!
if self.current_timestamp < host.last_chk:
logger.debug('%s > Passive host check is too old (%.2f seconds). '
'Ignoring, check output: %s',
host.get_full_name(), self.current_timestamp < host.last_chk,
plugin_output)
return
chk = host.launch_check(now, self.hosts, self.services, self.timeperiods,
self.daemon.macromodulations, self.daemon.checkmodulations,
self.daemon.checks, force=True)
# We will not have a check if an host/service is checked but it has no defined check_command
if not chk:
return
# Now we 'transform the check into a result'
# So exit_status, output and status is eaten by the host
chk.exit_status = status_code
chk.get_outputs(plugin_output, host.max_plugins_output_length)
chk.status = ACT_STATUS_WAIT_CONSUME
chk.check_time = self.current_timestamp # we are using the external command timestamps
# Set the corresponding host's check type to passive
chk.set_type_passive()
# self.daemon.nb_check_received += 1
self.send_an_element(chk)
# Ok now this result will be read by the scheduler the next loop
# raise a passive check log only if needed
if self.my_conf.log_passive_checks:
log_level = 'info'
if status_code == 1: # DOWN
log_level = 'error'
if status_code == 2: # UNREACHABLE
log_level = 'warning'
self.send_an_element(make_monitoring_log(
log_level, 'PASSIVE HOST CHECK: %s;%d;%s;%s;%s' % (
host.get_name(), status_code, chk.output, chk.long_output, chk.perf_data))) | [4][SEP1][If][Return][Try][None][None][None][If][Return][If][Return][If][If][None][If][None][None][SEP2][1,2][][3,4,5][6][6][6][7,8][][9,10][][11][12,13][13][14,15][15][][SEP3][1][0][0][3][0][0][0][2][1][0][6][0][0][0][0][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_OR_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_PASS_EXCEPTHANDLER_NAME_LOAD_PASS_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_LT_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_LT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_KEYWORD_CONSTANT_IF_UNARYOP_NOT_NAME_LOAD_RETURN_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_CONSTANT_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_NAME_STORE_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD |
def schedule_svc_downtime(self, service, start_time, end_time, fixed,
trigger_id, duration, author, comment):
"""Schedule a service downtime
Format of the line that triggers function call::
SCHEDULE_SVC_DOWNTIME;<host_name>;<service_description><start_time>;<end_time>;
<fixed>;<trigger_id>;<duration>;<author>;<comment>
:param service: service to check
:type service: alignak.object.service.Service
:param start_time: downtime start time
:type start_time:
:param end_time: downtime end time
:type end_time:
:param fixed: is downtime fixed
:type fixed: bool
:param trigger_id: downtime id that triggered this one
:type trigger_id: int
:param duration: downtime duration
:type duration: int
:param author: downtime author
:type author: str
:param comment: downtime comment
:type comment: str
:return: None
"""
data = {'ref': service.uuid, 'ref_type': service.my_type, 'start_time': start_time,
'end_time': end_time, 'fixed': fixed, 'trigger_id': trigger_id,
'duration': duration, 'author': author, 'comment': comment}
downtime = Downtime(data)
downtime.add_automatic_comment(service)
service.add_downtime(downtime)
self.send_an_element(service.get_update_status_brok())
if trigger_id not in ('', 0):
for item in self.daemon.services:
if trigger_id in item.downtimes:
service.downtimes[trigger_id].trigger_me(downtime.uuid) | [9][SEP1][If][For][If][None][SEP2][1][2][3,1][1][SEP3][5][0][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_NOTIN_TUPLE_CONSTANT_CONSTANT_LOAD_FOR_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def post(self, path, args, wait=False):
"""POST an HTTP request to a daemon
:param path: path to do the request
:type path: str
:param args: args to add in the request
:type args: dict
:param wait: True for a long timeout
:type wait: bool
:return: Content of the HTTP response if server returned 200
:rtype: str
"""
uri = self.make_uri(path)
timeout = self.make_timeout(wait)
for (key, value) in list(args.items()):
args[key] = serialize(value, True)
try:
logger.debug("post: %s, timeout: %s, params: %s", uri, timeout, args)
rsp = self._requests_con.post(uri, json=args, timeout=timeout, verify=self.strong_ssl)
logger.debug("got: %d - %s", rsp.status_code, rsp.text)
if rsp.status_code != 200:
raise HTTPClientDataException(rsp.status_code, rsp.text, uri)
return rsp.content
except (requests.Timeout, requests.ConnectTimeout):
raise HTTPClientTimeoutException(timeout, uri)
except requests.ConnectionError as exp:
raise HTTPClientConnectionException(uri, exp.args[0])
except Exception as exp:
raise HTTPClientException('Request error to %s: %s' % (uri, exp)) | [4][SEP1][None][For][None][Try][If][None][None][None][None][Return][SEP2][1][2,3][1][4,5,6,7][8,9][][][][][][SEP3][2][2][1][0][3][1][1][1][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_STORE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_TRY_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_KEYWORD_NAME_LOAD_KEYWORD_NAME_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_CONSTANT_RAISE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_ATTRIBUTE_NAME_LOAD_LOAD_EXCEPTHANDLER_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXCEPTHANDLER_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_EXCEPTHANDLER_NAME_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD |
def get_broks(self):
"""Get brok list from satellite
:return: A copy of the broks list
:rtype: list
"""
res = copy.copy(self.broks)
del self.broks[:]
return res | [1][SEP1][Return][SEP2][][SEP3][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_DELETE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_SLICE_DEL_RETURN_NAME_LOAD |
def dict_to_serialized_dict(ref, the_dict):
"""Serialize the list of elements to a dictionary
Used for the retention store
:param ref: Not used
:type ref:
:param the_dict: dictionary to convert
:type the_dict: dict
:return: dict of serialized
:rtype: dict
"""
result = {}
for elt in list(the_dict.values()):
if not getattr(elt, 'serialize', None):
continue
result[elt.uuid] = elt.serialize()
return result | [2][SEP1][None][For][If][Return][None][SEP2][1][2,3][4,1][][1][SEP3][0][3][1][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_CONTINUE_ASSIGN_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_NAME_LOAD |
def load_global_conf(cls, global_configuration):
"""
Apply global Alignak configuration.
Some objects inherit some properties from the global configuration if they do not
define their own value. E.g. the global 'accept_passive_service_checks' is inherited
by the services as 'accept_passive_checks'
:param cls: parent object
:type cls: object
:param global_configuration: current object (child)
:type global_configuration: object
:return: None
"""
logger.debug("Propagate global parameter for %s:", cls)
for prop, entry in global_configuration.properties.items():
# If some global managed configuration properties have a class_inherit clause,
if not entry.managed or not getattr(entry, 'class_inherit'):
continue
for (cls_dest, change_name) in entry.class_inherit:
if cls_dest == cls: # ok, we've got something to get
value = getattr(global_configuration, prop)
logger.debug("- global parameter %s=%s -> %s=%s",
prop, getattr(global_configuration, prop),
change_name, value)
if change_name is None:
setattr(cls, prop, value)
else:
setattr(cls, change_name, value) | [2][SEP1][None][For][If][For][If][If][None][None][SEP2][1][2][3,1][4,1][5,3][6,7][3][3][SEP3][1][6][1][0][0][3][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_BOOLOP_OR_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONTINUE_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_EQ_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def index_template(self, tpl):
"""
Indexes a template by `name` into the `name_to_template` dictionary.
:param tpl: The template to index
:type tpl: alignak.objects.item.Item
:return: None
"""
objcls = self.inner_class.my_type
name = getattr(tpl, 'name', '')
if not name:
mesg = "a %s template has been defined without name, from: %s" % \
(objcls, tpl.imported_from)
tpl.add_error(mesg)
elif name in self.name_to_template:
tpl = self.manage_conflict(tpl, name)
self.name_to_template[name] = tpl
logger.debug("Indexed a %s template: %s, uses: %s",
tpl.my_type, name, getattr(tpl, 'use', 'Nothing'))
return tpl | [2][SEP1][If][None][If][Return][None][SEP2][1,2][3][4,3][][3][SEP3][1][1][0][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_IF_UNARYOP_NOT_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IN_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_RETURN_NAME_LOAD |
def get_hosts_from_hostgroups(hgname, hostgroups):
"""
Get hosts of hostgroups
:param hgname: hostgroup name
:type hgname: str
:param hostgroups: hostgroups object (all hostgroups)
:type hostgroups: alignak.objects.hostgroup.Hostgroups
:return: list of hosts
:rtype: list
"""
if not isinstance(hgname, list):
hgname = [e.strip() for e in hgname.split(',') if e.strip()]
host_names = []
for name in hgname:
hostgroup = hostgroups.find_by_name(name)
if hostgroup is None:
raise ValueError("the hostgroup '%s' is unknown" % hgname)
mbrs = [h.strip() for h in hostgroup.get_hosts() if h.strip()]
host_names.extend(mbrs)
return host_names | [2][SEP1][If][None][None][For][If][Return][None][None][SEP2][1,2][2][3][4,5][6,7][][][3][SEP3][1][3][0][4][1][0][1][4] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_LISTCOMP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_FOR_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_NAME_LOAD_ASSIGN_NAME_STORE_LISTCOMP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_COMPREHENSION_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def list_all_elements(self):
"""Get all host/service uuid in our node and below
:return: list of hosts/services uuids
:rtype: list
"""
res = []
# We are a host/service
if self.operand in ['host', 'service']:
return [self.sons[0]]
for son in self.sons:
res.extend(son.list_all_elements())
# and returns a list of unique uuids
return list(set(res)) | [1][SEP1][If][Return][For][None][Return][SEP2][1,2][][3,4][2][][SEP3][0][0][0][2][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_IN_LIST_CONSTANT_CONSTANT_LOAD_RETURN_LIST_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_CALL_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD |
def is_correct(self):
"""
check if each properties of timeperiods are valid
:return: True if is correct, otherwise False
:rtype: bool
"""
valid = True
# We do not want a same hg to be explode again and again
# so we tag it
for timeperiod in list(self.items.values()):
timeperiod.rec_tag = False
for timeperiod in list(self.items.values()):
for tmp_tp in list(self.items.values()):
tmp_tp.rec_tag = False
valid = timeperiod.check_exclude_rec() and valid
# We clean the tags and collect the warning/erro messages
for timeperiod in list(self.items.values()):
del timeperiod.rec_tag
# Now other checks
if not timeperiod.is_correct():
valid = False
source = getattr(timeperiod, 'imported_from', "unknown source")
msg = "Configuration in %s::%s is incorrect; from: %s" % (
timeperiod.my_type, timeperiod.get_name(), source
)
self.add_error(msg)
self.configuration_errors += timeperiod.configuration_errors
self.configuration_warnings += timeperiod.configuration_warnings
# And check all timeperiods for correct (sunday is false)
for timeperiod in self:
valid = timeperiod.is_correct() and valid
return valid | [1][SEP1][None][For][None][For][For][For][None][None][If][For][None][None][None][Return][SEP2][1][2,3][1][4,5][6,7][8,9][4][3][10,11][12,13][11][5][9][][SEP3][0][2][0][3][2][2][0][1][1][0][3][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CONSTANT_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_ASSIGN_NAME_STORE_BOOLOP_AND_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_DELETE_ATTRIBUTE_NAME_LOAD_DEL_IF_UNARYOP_NOT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_ASSIGN_NAME_STORE_BINOP_CONSTANT_MOD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_AUGASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ADD_ATTRIBUTE_NAME_LOAD_LOAD_AUGASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ADD_ATTRIBUTE_NAME_LOAD_LOAD_FOR_NAME_STORE_NAME_LOAD_ASSIGN_NAME_STORE_BOOLOP_AND_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def _delete_unwanted_caracters(self, chain):
"""Remove not wanted char from chain
unwanted char are illegal_macro_output_chars attribute
:param chain: chain to remove char from
:type chain: str
:return: chain cleaned
:rtype: str
"""
try:
chain = chain.decode('utf8', 'replace')
except UnicodeEncodeError:
# If it is still encoded correctly, ignore...
pass
except AttributeError:
# Python 3 will raise an exception because the line is still unicode
pass
for char in self.illegal_macro_output_chars:
chain = chain.replace(char, '')
return chain | [2][SEP1][Try][None][None][None][For][None][Return][SEP2][1,2,3][4][4][4][5,6][4][][SEP3][0][1][0][0][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CONSTANT_EXCEPTHANDLER_NAME_LOAD_PASS_EXCEPTHANDLER_NAME_LOAD_PASS_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_RETURN_NAME_LOAD |
def get_instances(self):
"""Create, init and then returns the list of module instances that the caller needs.
This method is called once the Python modules are loaded to initialize the modules.
If an instance can't be created or initialized then only log is doneand that
instance is skipped. The previous modules instance(s), if any, are all cleaned.
:return: module instances list
:rtype: list
"""
self.clear_instances()
for (alignak_module, python_module) in self.modules_assoc:
alignak_module.properties = python_module.properties.copy()
alignak_module.my_daemon = self.daemon
logger.info("Alignak starting module '%s'", alignak_module.get_name())
if getattr(alignak_module, 'modules', None):
modules = []
for module_uuid in alignak_module.modules:
if module_uuid in self.modules:
modules.append(self.modules[module_uuid])
alignak_module.modules = modules
logger.debug("Module '%s', parameters: %s",
alignak_module.get_name(), alignak_module.__dict__)
try:
instance = python_module.get_instance(alignak_module)
if not isinstance(instance, BaseModule): # pragma: no cover, simple protection
self.configuration_errors.append("Module %s instance is not a "
"BaseModule instance: %s"
% (alignak_module.get_name(),
type(instance)))
raise AttributeError
# pragma: no cover, simple protection
except Exception as exp: # pylint: disable=broad-except
logger.error("The module %s raised an exception on loading, I remove it!",
alignak_module.get_name())
logger.exception("Exception: %s", exp)
self.configuration_errors.append("The module %s raised an exception on "
"loading: %s, I remove it!"
% (alignak_module.get_name(), str(exp)))
else:
# Give the module the data to which daemon/module it is loaded into
instance.set_loaded_into(self.daemon.name)
self.instances.append(instance)
for instance in self.instances:
# External instances are not initialized now, but only when they are started
if not instance.is_external and not self.try_instance_init(instance):
# If the init failed, we put in in the restart queue
logger.warning("The module '%s' failed to initialize, "
"I will try to restart it later", instance.name)
self.set_to_restart(instance)
return self.instances | [1][SEP1][None][For][If][For][None][Try][If][Return][For][If][None][None][If][None][None][None][None][SEP2][1][2,3][4,5][6,7][8][9,10][11,3][][12,13][14,15][1][3][16,8][5][][1][8][SEP3][1][15][5][0][0][4][1][0][0][2][6][2][0][0][3][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_BINOP_CONSTANT_MOD_TUPLE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_LOAD_RAISE_NAME_LOAD_EXCEPTHANDLER_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_BINOP_CONSTANT_MOD_TUPLE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_AND_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_NOT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_ATTRIBUTE_NAME_LOAD_LOAD |
def check_dir(self, dirname):
"""Check and create directory
:param dirname: file name
:type dirname; str
:return: None
"""
try:
os.makedirs(dirname)
dir_stat = os.stat(dirname)
print("Created the directory: %s, stat: %s" % (dirname, dir_stat))
if not dir_stat.st_uid == self.uid:
os.chown(dirname, self.uid, self.gid)
os.chmod(dirname, 0o775)
dir_stat = os.stat(dirname)
print("Changed directory ownership and permissions: %s, stat: %s"
% (dirname, dir_stat))
self.pre_log.append(("DEBUG",
"Daemon '%s' directory %s checking... "
"User uid: %s, directory stat: %s."
% (self.name, dirname, os.getuid(), dir_stat)))
self.pre_log.append(("INFO",
"Daemon '%s' directory %s did not exist, I created it. "
"I set ownership for this directory to %s:%s."
% (self.name, dirname, self.user, self.group)))
except OSError as exp:
if exp.errno == errno.EEXIST and os.path.isdir(dirname):
# Directory still exists...
pass
else:
self.pre_log.append(("ERROR",
"Daemon directory '%s' did not exist, "
"and I could not create. Exception: %s"
% (dirname, exp)))
self.exit_on_error("Daemon directory '%s' did not exist, "
"and I could not create.'. Exception: %s"
% (dirname, exp), exit_code=3) | [2][SEP1][Try][If][If][None][None][None][None][SEP2][1,2][3,4][5,6][4][][][][SEP3][3][3][1][4][3][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_TRY_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_IF_UNARYOP_NOT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_CONSTANT_BINOP_CONSTANT_MOD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_CONSTANT_BINOP_CONSTANT_MOD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_PASS_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_CONSTANT_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_KEYWORD_CONSTANT |
def wait_for_initial_conf(self, timeout=1.0):
"""Wait initial configuration from the arbiter.
Basically sleep 1.0 and check if new_conf is here
:param timeout: timeout to wait
:type timeout: int
:return: None
"""
logger.info("Waiting for initial configuration")
# Arbiter do not already set our have_conf param
_ts = time.time()
while not self.new_conf and not self.interrupted:
# Make a pause and check if the system time changed
_, _ = self.make_a_pause(timeout, check_time_change=True)
if not self.interrupted:
logger.info("Got initial configuration, waited for: %.2f seconds", time.time() - _ts)
statsmgr.timer('configuration.initial', time.time() - _ts)
else:
logger.info("Interrupted before getting the initial configuration") | [2][SEP1][None][While][None][If][None][None][SEP2][1][2,3][1][4,5][][][SEP3][2][0][1][0][4][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_CONSTANT_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_WHILE_BOOLOP_AND_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_KEYWORD_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_BINOP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUB_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_BINOP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUB_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT |
def get_instance(mod_conf):
"""
Return a module instance for the modules manager
:param mod_conf: the module properties as defined globally in this file
:return:
"""
logger.info("Giving an instance of %s for alias: %s",
mod_conf.python_name, mod_conf.module_alias)
return InnerMetrics(mod_conf) | [1][SEP1][Return][SEP2][][SEP3][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_CALL_NAME_LOAD_NAME_LOAD |
def update_recurrent_works_tick(self, conf):
"""Modify the tick value for the scheduler recurrent work
A tick is an amount of loop of the scheduler before executing the recurrent work
The provided configuration may contain some tick-function_name keys that contain
a tick value to be updated. Those parameters are defined in the alignak environment file.
Indeed this function is called with the Scheduler daemon object. Note that the ``conf``
parameter may also be a dictionary.
:param conf: the daemon link configuration to search in
:type conf: alignak.daemons.schedulerdaemon.Alignak
:return: None
"""
for key in self.recurrent_works:
(name, fun, _) = self.recurrent_works[key]
if isinstance(conf, dict):
new_tick = conf.get('tick_%s' % name, None)
else:
new_tick = getattr(conf, 'tick_%s' % name, None)
if new_tick is not None:
logger.debug("Requesting to change the default tick to %d for the action %s",
int(new_tick), name)
else:
continue
# Update the default scheduler tick for this function
try:
new_tick = int(new_tick)
logger.info("Changing the default tick to %d for the action %s", new_tick, name)
self.recurrent_works[key] = (name, fun, new_tick)
except ValueError:
logger.warning("Changing the default tick for '%s' to '%s' failed!", new_tick, name) | [2][SEP1][None][For][If][None][None][If][None][Try][None][None][SEP2][1][2][3,4][5][5][6,1][7][8,9][1][1][SEP3][0][5][1][1][1][0][2][0][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_TUPLE_NAME_STORE_NAME_STORE_NAME_STORE_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_LOAD_IF_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_CONSTANT_MOD_NAME_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_BINOP_CONSTANT_MOD_NAME_LOAD_CONSTANT_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONTINUE_TRY_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD_ASSIGN_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_STORE_TUPLE_NAME_LOAD_NAME_LOAD_NAME_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD |
def get_retention_data(self): # pylint: disable=too-many-branches,too-many-statements
# pylint: disable=too-many-locals
"""Get all hosts and services data to be sent to the retention storage.
This function only prepares the data because a module is in charge of making
the data survive to the scheduler restart.
todo: Alignak scheduler creates two separate dictionaries: hosts and services
It would be better to merge the services into the host dictionary!
:return: dict containing host and service data
:rtype: dict
"""
retention_data = {
'hosts': {}, 'services': {}
}
for host in self.hosts:
h_dict = {}
# Get the hosts properties and running properties
properties = host.__class__.properties
properties.update(host.__class__.running_properties)
for prop, entry in list(properties.items()):
if not entry.retention:
continue
val = getattr(host, prop)
# If a preparation function exists...
prepare_retention = entry.retention_preparation
if prepare_retention:
val = prepare_retention(host, val)
h_dict[prop] = val
retention_data['hosts'][host.host_name] = h_dict
logger.info('%d hosts sent to retention', len(retention_data['hosts']))
# Same for services
for service in self.services:
s_dict = {}
# Get the services properties and running properties
properties = service.__class__.properties
properties.update(service.__class__.running_properties)
for prop, entry in list(properties.items()):
if not entry.retention:
continue
val = getattr(service, prop)
# If a preparation function exists...
prepare_retention = entry.retention_preparation
if prepare_retention:
val = prepare_retention(service, val)
s_dict[prop] = val
retention_data['services'][(service.host_name, service.service_description)] = s_dict
logger.info('%d services sent to retention', len(retention_data['services']))
return retention_data | [1][SEP1][None][For][None][None][For][For][If][None][None][Return][If][For][None][None][If][None][If][None][None][SEP2][1][2,3][4][5][6,7][8,9][10,4][1][11][][12,13][14,15][13][4][16,11][5][17,18][18][11][SEP3][0][4][1][2][4][4][0][0][1][2][1][4][1][0][0][0][1][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_CONSTANT_CONSTANT_DICT_DICT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_DICT_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_CONTINUE_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_STORE_NAME_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_DICT_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_CONTINUE_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_NAME_LOAD_STORE_NAME_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_STORE_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_RETURN_NAME_LOAD |
def get_return_from(self, check):
"""Update check data from action (notification for instance)
:param check: action to get data from
:type check: alignak.action.Action
:return: None
"""
for prop in ['exit_status', 'output', 'long_output', 'check_time', 'execution_time',
'perf_data', 'u_time', 's_time']:
setattr(self, prop, getattr(check, prop)) | [2][SEP1][None][For][None][SEP2][1][2][1][SEP3][0][0][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_FOR_NAME_STORE_LIST_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def apply_dependencies(self):
"""Loop on hosts and register dependency between parent and son
call Host.fill_parents_dependency()
:return: None
"""
for host in self:
for parent_id in getattr(host, 'parents', []):
if parent_id is None:
continue
parent = self[parent_id]
if parent.active_checks_enabled:
# Add parent in the list
host.act_depend_of.append((parent_id, ['d', 'x', 's', 'f'], '', True))
# Add child in the parent
parent.act_depend_of_me.append((host.uuid, ['d', 'x', 's', 'f'], '', True))
# And add the parent/child dep filling too, for broking
parent.child_dependencies.add(host.uuid)
host.parent_dependencies.add(parent_id) | [1][SEP1][None][For][For][If][If][None][SEP2][1][2][3,1][4,2][5,2][2][SEP3][0][0][5][0][0][4] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_FOR_NAME_STORE_NAME_LOAD_FOR_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_LIST_LOAD_IF_COMPARE_NAME_LOAD_IS_CONSTANT_CONTINUE_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_NAME_LOAD_LIST_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_CONSTANT_CONSTANT_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_LIST_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_CONSTANT_CONSTANT_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD |
def create_connection(self):
"""Initialize HTTP connection with a satellite (con attribute) and
set its uri attribute
This is called on the satellite link initialization
:return: None
"""
# Create the HTTP client for the connection
try:
self.con = HTTPClient(address=self.satellite_map['address'],
port=self.satellite_map['port'],
short_timeout=self.short_timeout, long_timeout=self.long_timeout,
use_ssl=self.satellite_map['use_ssl'],
strong_ssl=self.satellite_map['hard_ssl_name_check'])
self.uri = self.con.uri
except HTTPClientException as exp:
# logger.error("Error with '%s' when creating client: %s", self.name, str(exp))
# Set the satellite as dead
self.set_dead()
raise LinkError("Error with '%s' when creating client: %s" % (self.name, str(exp))) | [1][SEP1][Try][None][None][SEP2][1,2][][][SEP3][0][1][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CALL_NAME_LOAD_KEYWORD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_KEYWORD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_KEYWORD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_TUPLE_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_LOAD |
def linkify(self, modules):
"""Link modules and Satellite links
:param modules: Module object list
:type modules: alignak.objects.module.Modules
:return: None
"""
logger.debug("Linkify %s with %s", self, modules)
self.linkify_s_by_module(modules) | [2][SEP1][None][SEP2][][SEP3][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def monitoring_problems(self):
"""Get Alignak detailed monitoring status
This will return an object containing the properties of the `identity`, plus a `problems`
object which contains 2 properties for each known scheduler:
- _freshness, which is the timestamp when the provided data were fetched
- problems, which is an object with the scheduler known problems:
{
...
"problems": {
"scheduler-master": {
"_freshness": 1528903945,
"problems": {
"fdfc986d-4ab4-4562-9d2f-4346832745e6": {
"last_state": "CRITICAL",
"service": "dummy_critical",
"last_state_type": "SOFT",
"last_state_update": 1528902442,
"last_hard_state": "CRITICAL",
"last_hard_state_change": 1528902442,
"last_state_change": 1528902381,
"state": "CRITICAL",
"state_type": "HARD",
"host": "host-all-8",
"output": "Hi, checking host-all-8/dummy_critical -> exit=2"
},
"2445f2a3-2a3b-4b13-96ed-4cfb60790e7e": {
"last_state": "WARNING",
"service": "dummy_warning",
"last_state_type": "SOFT",
"last_state_update": 1528902463,
"last_hard_state": "WARNING",
"last_hard_state_change": 1528902463,
"last_state_change": 1528902400,
"state": "WARNING",
"state_type": "HARD",
"host": "host-all-6",
"output": "Hi, checking host-all-6/dummy_warning -> exit=1"
},
...
}
}
}
}
:return: schedulers live synthesis list
:rtype: dict
"""
res = self.identity()
res['problems'] = {}
for scheduler_link in self.app.conf.schedulers:
sched_res = scheduler_link.con.get('monitoring_problems', wait=True)
res['problems'][scheduler_link.name] = {}
if '_freshness' in sched_res:
res['problems'][scheduler_link.name].update({'_freshness': sched_res['_freshness']})
if 'problems' in sched_res:
res['problems'][scheduler_link.name].update({'problems': sched_res['problems']})
res['_freshness'] = int(time.time())
return res | [1][SEP1][None][For][If][Return][None][If][None][SEP2][1][2,3][4,5][][5][6,1][1][SEP3][1][1][1][2][1][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_CONSTANT_STORE_DICT_FOR_NAME_STORE_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_CONSTANT_KEYWORD_CONSTANT_ASSIGN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_DICT_IF_COMPARE_CONSTANT_IN_NAME_LOAD_EXPR_CALL_ATTRIBUTE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_DICT_CONSTANT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_COMPARE_CONSTANT_IN_NAME_LOAD_EXPR_CALL_ATTRIBUTE_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_DICT_CONSTANT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_CONSTANT_STORE_CALL_NAME_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_NAME_LOAD |
def create_objects(self, raw_objects):
"""Create all the objects got after the post configuration file initialization
:param raw_objects: dict with all object with str values
:type raw_objects: dict
:return: None
"""
types_creations = self.__class__.types_creations
early_created_types = self.__class__.early_created_types
logger.info("Creating objects...")
# Before really creating the objects, we add some ghost
# ones like the bp_rule for correlation
self.add_self_defined_objects(raw_objects)
for o_type in sorted(types_creations):
if o_type not in early_created_types:
self.create_objects_for_type(raw_objects, o_type)
logger.info("Done") | [2][SEP1][None][For][If][None][None][SEP2][1][2,3][4,1][][1][SEP3][2][1][0][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTIN_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT |
def show_errors(self):
"""
Loop over configuration warnings and log them as INFO log
Loop over configuration errors and log them as INFO log
Note that the warnings and errors are logged on the fly during the configuration parsing.
It is not necessary to log as WARNING and ERROR in this function which is used as a sum-up
on the end of configuration parsing when an error has been detected.
:return: None
"""
if self.configuration_warnings:
logger.warning("Configuration warnings:")
for msg in self.configuration_warnings:
logger.warning(msg)
if self.configuration_errors:
logger.warning("Configuration errors:")
for msg in self.configuration_errors:
logger.warning(msg) | [1][SEP1][If][None][If][For][None][None][For][None][SEP2][1,2][3][4][5,2][6][3][7][6][SEP3][1][1][1][0][1][1][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD |
def get_monitoring_problems(self):
"""Get the schedulers satellites problems list
:return: problems dictionary
:rtype: dict
"""
res = self.get_id()
res['problems'] = {}
# Report our schedulers information, but only if a dispatcher exists
if getattr(self, 'dispatcher', None) is None:
return res
for satellite in self.dispatcher.all_daemons_links:
if satellite.type not in ['scheduler']:
continue
if not satellite.active:
continue
if satellite.statistics and 'problems' in satellite.statistics:
res['problems'][satellite.name] = {
'_freshness': satellite.statistics['_freshness'],
'problems': satellite.statistics['problems']
}
return res | [1][SEP1][If][Return][For][If][Return][If][If][None][SEP2][1,2][][3,4][5,2][][6,2][7,2][2][SEP3][2][0][0][0][0][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_CONSTANT_STORE_DICT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_IS_CONSTANT_RETURN_NAME_LOAD_FOR_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTIN_LIST_CONSTANT_LOAD_CONTINUE_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_CONTINUE_IF_BOOLOP_AND_ATTRIBUTE_NAME_LOAD_LOAD_COMPARE_CONSTANT_IN_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_DICT_CONSTANT_CONSTANT_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_RETURN_NAME_LOAD |
def apply_implicit_inheritance(self, hosts):
"""Apply implicit inheritance for special properties:
contact_groups, notification_interval , notification_period
So service will take info from host if necessary
:param hosts: hosts list needed to look for a simple host
:type hosts: alignak.objects.host.Hosts
:return: None
"""
for prop in ('contacts', 'contact_groups', 'notification_interval',
'notification_period', 'resultmodulations', 'business_impact_modulations',
'escalations', 'poller_tag', 'reactionner_tag', 'check_period',
'business_impact', 'maintenance_period'):
for serv in self:
if hasattr(serv, 'host_name') and not getattr(serv, prop, None):
host = hosts.find_by_name(serv.host_name)
if host is not None and hasattr(host, prop):
logger.debug("Implicit inheritance for %s/%s: %s = %s",
serv.host_name, serv, prop, getattr(host, prop))
setattr(serv, prop, getattr(host, prop)) | [2][SEP1][None][For][For][If][If][None][SEP2][1][2][3,1][4,2][5,2][2][SEP3][0][0][0][2][2][4] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_FOR_NAME_STORE_TUPLE_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_LOAD_FOR_NAME_STORE_NAME_LOAD_IF_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_UNARYOP_NOT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_AND_COMPARE_NAME_LOAD_ISNOT_CONSTANT_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD |
def start_module(self):
"""Wrapper for _main function.
Catch and raise any exception occurring in the main function
:return: None
"""
try:
self._main()
except Exception as exp:
logger.exception('%s', traceback.format_exc())
raise Exception(exp) | [1][SEP1][Try][None][None][SEP2][1,2][][][SEP3][0][1][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD |
def set_myself_as_problem(self, hosts, services, timeperiods, bi_modulations):
# pylint: disable=too-many-locals
""" Raise all impact from my error. I'm setting myself
as a problem, and I register myself as this in all
hosts/services that depend_on_me. So they are now my
impacts
:param hosts: hosts objects, used to get impacts
:type hosts: alignak.objects.host.Hosts
:param services: services objects, used to get impacts
:type services: alignak.objects.service.Services
:param timeperiods: Timeperiods objects, used to get act_depend_of_me timeperiod
:type timeperiods: alignak.objects.timeperiod.Timeperiods
:param bi_modulations: business impact modulations objects
:type bi_modulations: alignak.object.businessimpactmodulation.Businessimpactmodulations
:return: None
"""
now = time.time()
self.is_problem = True
# we should warn potentials impact of our problem
# and they should be cool to register them so I've got
# my impacts list
impacts = list(self.impacts)
for (impact_id, status, timeperiod_id, _) in self.act_depend_of_me:
# Check if the status is ok for impact
if impact_id in hosts:
impact = hosts[impact_id]
elif impact_id in services:
impact = services[impact_id]
else:
logger.warning("Problem with my impacts: %s", self)
timeperiod = timeperiods[timeperiod_id]
for stat in status:
if self.is_state(stat):
# now check if we should bailout because of a
# not good timeperiod for dep
if timeperiod is None or timeperiod.is_time_valid(now):
new_impacts = impact.register_a_problem(self, hosts, services, timeperiods,
bi_modulations)
impacts.extend(new_impacts)
# Only update impacts and create new brok if impacts changed.
s_impacts = set(impacts)
if s_impacts == set(self.impacts):
return
self.impacts = list(s_impacts)
# We can update our business_impact value now
self.update_business_impact_value(hosts, services, timeperiods, bi_modulations)
# And we register a new broks for update status
self.broks.append(self.get_update_status_brok()) | [5][SEP1][None][For][If][If][None][If][Return][None][None][None][None][For][If][If][None][SEP2][1][2,3][4,5][6,7][8][9,10][][][11][8][8][12,1][13,11][14,11][11][SEP3][2][4][0][2][0][0][0][4][0][0][1][0][1][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_FOR_TUPLE_NAME_STORE_NAME_STORE_NAME_STORE_NAME_STORE_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_IN_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_NAME_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_FOR_NAME_STORE_NAME_LOAD_IF_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_BOOLOP_OR_COMPARE_NAME_LOAD_IS_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_EQ_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CALL_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD |
def get_perfdata_command(self, hosts, macromodulations, timeperiods):
"""Add event_handler to process performance data if necessary (not disabled)
:param macromodulations: Macro modulations objects, used in commands (notif, check)
:type macromodulations: alignak.objects.macromodulation.Macromodulations
:return: None
"""
cls = self.__class__
if not cls.process_performance_data or not self.process_perf_data:
return
if cls.perfdata_command is not None:
macroresolver = MacroResolver()
data = self.get_data_for_event_handler(hosts)
cmd = macroresolver.resolve_command(cls.perfdata_command, data, macromodulations,
timeperiods)
reactionner_tag = cls.perfdata_command.reactionner_tag
event_h = EventHandler({
'command': cmd,
'timeout': cls.perfdata_timeout,
'ref': self.uuid,
'reactionner_tag': reactionner_tag
})
# ok we can put it in our temp action queue
self.actions.append(event_h) | [4][SEP1][If][Return][If][None][SEP2][1,2][][3][][SEP3][0][0][0][5] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_OR_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_CALL_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_DICT_CONSTANT_CONSTANT_CONSTANT_CONSTANT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD |
def setup_logger(logger_configuration_file, log_dir=None, process_name='', log_file=''):
# pylint: disable=too-many-branches
"""
Configure the provided logger
- get and update the content of the Json configuration file
- configure the logger with this file
If a log_dir and process_name are provided, the format and filename in the configuration file
are updated with the provided values if they contain the patterns %(logdir)s and %(daemon)s
If no log_dir and process_name are provided, this function will truncate the log file
defined in the configuration file.
If a log file name is provided, it will override the default defined log file name.
At first, this function checks if the logger is still existing and initialized to
update the handlers and formatters. This mainly happens during the unit tests.
:param logger_configuration_file: Python Json logger configuration file
:rtype logger_configuration_file: str
:param log_dir: default log directory to update the defined logging handlers
:rtype log_dir: str
:param process_name: process name to update the defined logging formatters
:rtype process_name: str
:param log_file: log file name to update the defined log file
:rtype log_file: str
:return: None
"""
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if not process_name:
break
# Logger is already configured?
if getattr(handler, '_name', None) == 'daemons':
# Update the declared formats and file names with the process name
# This is for unit tests purpose only: alignak_tests will be replaced
# with the provided process name
for hdlr in logger_.handlers:
# print("- handler : %s (%s)" % (hdlr, hdlr.formatter._fmt))
if 'alignak_tests' in hdlr.formatter._fmt:
formatter = logging.Formatter(hdlr.formatter._fmt.replace("alignak_tests",
process_name))
hdlr.setFormatter(formatter)
if getattr(hdlr, 'filename', None) and 'alignak_tests' in hdlr.filename:
hdlr.filename = hdlr.filename._fmt.replace("alignak_tests", process_name)
# print("- handler : %s (%s) -> %s" % (hdlr, hdlr.formatter._fmt,
# hdlr.filename))
# else:
# print("- handler : %s (%s)" % (hdlr, hdlr.formatter._fmt))
break
else:
if not logger_configuration_file or not os.path.exists(logger_configuration_file):
print("The logger configuration file does not exist: %s" % logger_configuration_file)
return
with open(logger_configuration_file, 'rt') as _file:
config = json.load(_file)
truncate = False
if not process_name and not log_dir:
truncate = True
if not process_name:
process_name = 'alignak_tests'
if not log_dir:
log_dir = '/tmp'
# Update the declared formats with the process name
for formatter in config['formatters']:
if 'format' not in config['formatters'][formatter]:
continue
config['formatters'][formatter]['format'] = \
config['formatters'][formatter]['format'].replace("%(daemon)s", process_name)
# Update the declared log file names with the log directory
for hdlr in config['handlers']:
if 'filename' not in config['handlers'][hdlr]:
continue
if log_file and hdlr == 'daemons':
config['handlers'][hdlr]['filename'] = log_file
else:
config['handlers'][hdlr]['filename'] = \
config['handlers'][hdlr]['filename'].replace("%(logdir)s", log_dir)
config['handlers'][hdlr]['filename'] = \
config['handlers'][hdlr]['filename'].replace("%(daemon)s", process_name)
if truncate and os.path.exists(config['handlers'][hdlr]['filename']):
with open(config['handlers'][hdlr]['filename'], "w") as file_log_file:
file_log_file.truncate()
# Configure the logger, any error will raise an exception
logger_dictConfig(config) | [4][SEP1][None][For][If][If][For][If][None][If][None][SEP2][1][2][3][4,1][5][6,7][7][8,4][4][SEP3][1][17][0][1][2][0][3][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_UNARYOP_NOT_NAME_LOAD_BREAK_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_EQ_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_CONSTANT_IN_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_CONSTANT_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_BOOLOP_AND_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_COMPARE_CONSTANT_IN_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_CALL_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_CONSTANT_NAME_LOAD_BREAK_IF_BOOLOP_OR_UNARYOP_NOT_NAME_LOAD_UNARYOP_NOT_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXPR_CALL_NAME_LOAD_BINOP_CONSTANT_MOD_NAME_LOAD_RETURN_WITH_WITHITEM_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_NAME_STORE_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_BOOLOP_AND_UNARYOP_NOT_NAME_LOAD_UNARYOP_NOT_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_UNARYOP_NOT_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_UNARYOP_NOT_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_FOR_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_COMPARE_CONSTANT_NOTIN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONTINUE_ASSIGN_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_STORE_CALL_ATTRIBUTE_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_LOAD_LOAD_CONSTANT_NAME_LOAD_FOR_NAME_STORE_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_IF_COMPARE_CONSTANT_NOTIN_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONTINUE_IF_BOOLOP_AND_NAME_LOAD_COMPARE_NAME_LOAD_EQ_CONSTANT_ASSIGN_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_STORE_NAME_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_STORE_CALL_ATTRIBUTE_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_LOAD_LOAD_CONSTANT_NAME_LOAD_ASSIGN_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_STORE_CALL_ATTRIBUTE_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_LOAD_LOAD_CONSTANT_NAME_LOAD_IF_BOOLOP_AND_NAME_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_LOAD_WITH_WITHITEM_CALL_NAME_LOAD_SUBSCRIPT_SUBSCRIPT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD_LOAD_CONSTANT_LOAD_CONSTANT_NAME_STORE_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_NAME_LOAD_NAME_LOAD |
def _send_int(self,value):
"""
Convert a numerical value into an integer, then to a bytes object Check
bounds for signed int.
"""
# Coerce to int. This will throw a ValueError if the value can't
# actually be converted.
if type(value) != int:
new_value = int(value)
if self.give_warnings:
w = "Coercing {} into int ({})".format(value,new_value)
warnings.warn(w,Warning)
value = new_value
# Range check
if value > self.board.int_max or value < self.board.int_min:
err = "Value {} exceeds the size of the board's int.".format(value)
raise OverflowError(err)
return struct.pack(self.board.int_type,value) | [2][SEP1][If][If][If][None][None][Return][SEP2][1,2][3,2][4,5][2][][][SEP3][1][1][0][2][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_COMPARE_CALL_NAME_LOAD_NAME_LOAD_NOTEQ_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_IF_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_IF_BOOLOP_OR_COMPARE_NAME_LOAD_GT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_COMPARE_NAME_LOAD_LT_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD |
def emit(self, record):
"""Convert a :class:`logging.LogRecord` to GELF and emit it to Graylog
via an HTTP POST request
:param record: :class:`logging.LogRecord` to convert into a
Graylog GELF log and emit to Graylog via HTTP POST.
:type record: logging.LogRecord
"""
pickle = self.makePickle(record)
connection = httplib.HTTPConnection(
host=self.host,
port=self.port,
timeout=self.timeout
)
connection.request('POST', self.path, pickle, self.headers) | [2][SEP1][None][SEP2][][SEP3][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_KEYWORD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def query_param(self, key, value=None, default=None, as_list=False):
"""
Return or set a query parameter for the given key
The value can be a list.
:param string key: key to look for
:param string default: value to return if ``key`` isn't found
:param boolean as_list: whether to return the values as a list
:param string value: the new query parameter to use
"""
parse_result = self.query_params()
if value is not None:
# Need to ensure all strings are unicode
if isinstance(value, (list, tuple)):
value = list(map(to_unicode, value))
else:
value = to_unicode(value)
parse_result[to_unicode(key)] = value
return URL._mutate(
self, query=unicode_urlencode(parse_result, doseq=True))
try:
result = parse_result[key]
except KeyError:
return default
if as_list:
return result
return result[0] if len(result) == 1 else result | [5][SEP1][If][If][Try][None][None][None][Return][Return][If][Return][Return][SEP2][1,2][3,4][5,6][7][7][8][][][9,10][][][SEP3][4][1][0][2][1][0][0][3][0][0][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_ISNOT_CONSTANT_IF_CALL_NAME_LOAD_NAME_LOAD_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_SUBSCRIPT_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_STORE_NAME_LOAD_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_KEYWORD_CALL_NAME_LOAD_NAME_LOAD_KEYWORD_CONSTANT_TRY_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_NAME_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_RETURN_NAME_LOAD_IF_NAME_LOAD_RETURN_NAME_LOAD_RETURN_IFEXP_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_CONSTANT_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_NAME_LOAD |
def get_directory_as_zip(self, remote_path, local_file):
"""Downloads a remote directory as zip
:param remote_path: path to the remote directory to download
:param local_file: path and name of the target local file
:returns: True if the operation succeeded, False otherwise
:raises: HTTPResponseError in case an HTTP error status was returned
"""
remote_path = self._normalize_path(remote_path)
url = self.url + 'index.php/apps/files/ajax/download.php?dir=' \
+ parse.quote(remote_path)
res = self._session.get(url, stream=True)
if res.status_code == 200:
if local_file is None:
# use downloaded file name from Content-Disposition
# targetFile = res.headers['content-disposition']
local_file = os.path.basename(remote_path)
file_handle = open(local_file, 'wb', 8192)
for chunk in res.iter_content(8192):
file_handle.write(chunk)
file_handle.close()
return True
elif res.status_code >= 400:
raise HTTPResponseError(res)
return False | [3][SEP1][If][If][If][None][None][None][Return][For][None][Return][SEP2][1,2][3,4][5,6][4][7][][][8,9][7][][SEP3][7][0][0][1][1][1][0][1][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_CONSTANT_ADD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_KEYWORD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_IF_COMPARE_NAME_LOAD_IS_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_FOR_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RETURN_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_GTE_CONSTANT_RAISE_CALL_NAME_LOAD_NAME_LOAD_RETURN_CONSTANT |
def get_apps(self):
""" List all enabled apps through the provisioning api.
:returns: a dict of apps, with values True/False, representing the enabled state.
:raises: HTTPResponseError in case an HTTP error status was returned
"""
ena_apps = {}
res = self._make_ocs_request('GET', self.OCS_SERVICE_CLOUD, 'apps')
if res.status_code != 200:
raise HTTPResponseError(res)
tree = ET.fromstring(res.content)
self._check_ocs_status(tree)
# <data><apps><element>files</element><element>activity</element> ...
for el in tree.findall('data/apps/element'):
ena_apps[el.text] = False
res = self._make_ocs_request('GET', self.OCS_SERVICE_CLOUD,
'apps?filter=enabled')
if res.status_code != 200:
raise HTTPResponseError(res)
tree = ET.fromstring(res.content)
self._check_ocs_status(tree)
for el in tree.findall('data/apps/element'):
ena_apps[el.text] = True
return ena_apps | [1][SEP1][If][None][None][For][None][If][None][None][For][None][Return][SEP2][1,2][][3][4,5][3][6,7][][8][9,10][8][][SEP3][1][1][2][1][0][1][1][2][1][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_DICT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_CONSTANT_RAISE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_CONSTANT_RAISE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_FOR_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_SUBSCRIPT_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_STORE_CONSTANT_RETURN_NAME_LOAD |
def load_patterns(filename):
"""Loads the patters contained in the filename and puts them into a list
of patterns, each pattern being a list of occurrence, and each
occurrence being a list of (onset, midi) pairs.
The input file must be formatted as described in MIREX 2013:
http://www.music-ir.org/mirex/wiki/2013:Discovery_of_Repeated_Themes_%26_Sections
Parameters
----------
filename : str
The input file path containing the patterns of a given piece using the
MIREX 2013 format.
Returns
-------
pattern_list : list
The list of patterns, containing all their occurrences,
using the following format::
onset_midi = (onset_time, midi_number)
occurrence = [onset_midi1, ..., onset_midiO]
pattern = [occurrence1, ..., occurrenceM]
pattern_list = [pattern1, ..., patternN]
where ``N`` is the number of patterns, ``M[i]`` is the number of
occurrences of the ``i`` th pattern, and ``O[j]`` is the number of
onsets in the ``j``'th occurrence. E.g.::
occ1 = [(0.5, 67.0), (1.0, 67.0), (1.5, 67.0), (2.0, 64.0)]
occ2 = [(4.5, 65.0), (5.0, 65.0), (5.5, 65.0), (6.0, 62.0)]
pattern1 = [occ1, occ2]
occ1 = [(10.5, 67.0), (11.0, 67.0), (11.5, 67.0), (12.0, 64.0),
(12.5, 69.0), (13.0, 69.0), (13.5, 69.0), (14.0, 67.0),
(14.5, 76.0), (15.0, 76.0), (15.5, 76.0), (16.0, 72.0)]
occ2 = [(18.5, 67.0), (19.0, 67.0), (19.5, 67.0), (20.0, 62.0),
(20.5, 69.0), (21.0, 69.0), (21.5, 69.0), (22.0, 67.0),
(22.5, 77.0), (23.0, 77.0), (23.5, 77.0), (24.0, 74.0)]
pattern2 = [occ1, occ2]
pattern_list = [pattern1, pattern2]
"""
# List with all the patterns
pattern_list = []
# Current pattern, which will contain all occs
pattern = []
# Current occurrence, containing (onset, midi)
occurrence = []
with _open(filename, mode='r') as input_file:
for line in input_file.readlines():
if "pattern" in line:
if occurrence != []:
pattern.append(occurrence)
if pattern != []:
pattern_list.append(pattern)
occurrence = []
pattern = []
continue
if "occurrence" in line:
if occurrence != []:
pattern.append(occurrence)
occurrence = []
continue
string_values = line.split(",")
onset_midi = (float(string_values[0]), float(string_values[1]))
occurrence.append(onset_midi)
# Add last occurrence and pattern to pattern_list
if occurrence != []:
pattern.append(occurrence)
if pattern != []:
pattern_list.append(pattern)
return pattern_list | [1][SEP1][None][For][If][If][If][If][None][If][None][If][If][None][None][Return][None][None][None][None][SEP2][1][2,3][4,5][6,7][8,9][10,11][7][12,13][9][14,15][16,17][1][13][][15][1][17][1][SEP3][3][6][1][0][0][0][1][0][1][0][0][4][1][0][1][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_WITH_WITHITEM_CALL_NAME_LOAD_NAME_LOAD_KEYWORD_CONSTANT_NAME_STORE_FOR_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_CONSTANT_IN_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTEQ_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTEQ_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_CONTINUE_IF_COMPARE_CONSTANT_IN_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTEQ_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_CONTINUE_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_TUPLE_CALL_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_CALL_NAME_LOAD_SUBSCRIPT_NAME_LOAD_CONSTANT_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTEQ_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_IF_COMPARE_NAME_LOAD_NOTEQ_LIST_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def first_n_three_layer_P(reference_patterns, estimated_patterns, n=5):
"""First n three-layer precision.
This metric is basically the same as the three-layer FPR but it is only
applied to the first n estimated patterns, and it only returns the
precision. In MIREX and typically, n = 5.
Examples
--------
>>> ref_patterns = mir_eval.io.load_patterns("ref_pattern.txt")
>>> est_patterns = mir_eval.io.load_patterns("est_pattern.txt")
>>> P = mir_eval.pattern.first_n_three_layer_P(ref_patterns,
... est_patterns, n=5)
Parameters
----------
reference_patterns : list
The reference patterns in the format returned by
:func:`mir_eval.io.load_patterns()`
estimated_patterns : list
The estimated patterns in the same format
n : int
Number of patterns to consider from the estimated results, in
the order they appear in the matrix
(Default value = 5)
Returns
-------
precision : float
The first n three-layer Precision
"""
validate(reference_patterns, estimated_patterns)
# If no patterns were provided, metric is zero
if _n_onset_midi(reference_patterns) == 0 or \
_n_onset_midi(estimated_patterns) == 0:
return 0., 0., 0.
# Get only the first n patterns from the estimated results
fn_est_patterns = estimated_patterns[:min(len(estimated_patterns), n)]
# Compute the three-layer scores for the first n estimated patterns
F, P, R = three_layer_FPR(reference_patterns, fn_est_patterns)
return P | [3][SEP1][If][Return][Return][SEP2][1,2][][][SEP3][3][0][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_IF_BOOLOP_OR_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_CONSTANT_COMPARE_CALL_NAME_LOAD_NAME_LOAD_EQ_CONSTANT_RETURN_TUPLE_CONSTANT_CONSTANT_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_NAME_LOAD_SLICE_CALL_NAME_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_LOAD_ASSIGN_TUPLE_NAME_STORE_NAME_STORE_NAME_STORE_STORE_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def validate(reference_sources, estimated_sources):
"""Checks that the input data to a metric are valid, and throws helpful
errors if not.
Parameters
----------
reference_sources : np.ndarray, shape=(nsrc, nsampl)
matrix containing true sources
estimated_sources : np.ndarray, shape=(nsrc, nsampl)
matrix containing estimated sources
"""
if reference_sources.shape != estimated_sources.shape:
raise ValueError('The shape of estimated sources and the true '
'sources should match. reference_sources.shape '
'= {}, estimated_sources.shape '
'= {}'.format(reference_sources.shape,
estimated_sources.shape))
if reference_sources.ndim > 3 or estimated_sources.ndim > 3:
raise ValueError('The number of dimensions is too high (must be less '
'than 3). reference_sources.ndim = {}, '
'estimated_sources.ndim '
'= {}'.format(reference_sources.ndim,
estimated_sources.ndim))
if reference_sources.size == 0:
warnings.warn("reference_sources is empty, should be of size "
"(nsrc, nsample). sdr, sir, sar, and perm will all "
"be empty np.ndarrays")
elif _any_source_silent(reference_sources):
raise ValueError('All the reference sources should be non-silent (not '
'all-zeros), but at least one of the reference '
'sources is all 0s, which introduces ambiguity to the'
' evaluation. (Otherwise we can add infinitely many '
'all-zero sources.)')
if estimated_sources.size == 0:
warnings.warn("estimated_sources is empty, should be of size "
"(nsrc, nsample). sdr, sir, sar, and perm will all "
"be empty np.ndarrays")
elif _any_source_silent(estimated_sources):
raise ValueError('All the estimated sources should be non-silent (not '
'all-zeros), but at least one of the estimated '
'sources is all 0s. Since we require each reference '
'source to be non-silent, having a silent estimated '
'source will result in an underdetermined system.')
if (estimated_sources.shape[0] > MAX_SOURCES or
reference_sources.shape[0] > MAX_SOURCES):
raise ValueError('The supplied matrices should be of shape (nsrc,'
' nsampl) but reference_sources.shape[0] = {} and '
'estimated_sources.shape[0] = {} which is greater '
'than mir_eval.separation.MAX_SOURCES = {}. To '
'override this check, set '
'mir_eval.separation.MAX_SOURCES to a '
'larger value.'.format(reference_sources.shape[0],
estimated_sources.shape[0],
MAX_SOURCES)) | [2][SEP1][If][None][If][None][If][None][If][If][None][None][If][If][None][None][SEP2][1,2][][3,4][][5,6][7][8,7][9,10][][11][12,11][13][][][SEP3][0][2][0][2][0][1][1][0][1][1][1][0][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_NAME_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_OR_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_GT_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_GT_CONSTANT_RAISE_CALL_NAME_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_IF_CALL_NAME_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_IF_CALL_NAME_LOAD_NAME_LOAD_RAISE_CALL_NAME_LOAD_CONSTANT_IF_BOOLOP_OR_COMPARE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_GT_NAME_LOAD_COMPARE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_GT_NAME_LOAD_RAISE_CALL_NAME_LOAD_CALL_ATTRIBUTE_CONSTANT_LOAD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_LOAD_NAME_LOAD |
def pairwise(reference_intervals, reference_labels,
estimated_intervals, estimated_labels,
frame_size=0.1, beta=1.0):
"""Frame-clustering segmentation evaluation by pair-wise agreement.
Examples
--------
>>> (ref_intervals,
... ref_labels) = mir_eval.io.load_labeled_intervals('ref.lab')
>>> (est_intervals,
... est_labels) = mir_eval.io.load_labeled_intervals('est.lab')
>>> # Trim or pad the estimate to match reference timing
>>> (ref_intervals,
... ref_labels) = mir_eval.util.adjust_intervals(ref_intervals,
... ref_labels,
... t_min=0)
>>> (est_intervals,
... est_labels) = mir_eval.util.adjust_intervals(
... est_intervals, est_labels, t_min=0, t_max=ref_intervals.max())
>>> precision, recall, f = mir_eval.structure.pairwise(ref_intervals,
... ref_labels,
... est_intervals,
... est_labels)
Parameters
----------
reference_intervals : np.ndarray, shape=(n, 2)
reference segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
reference_labels : list, shape=(n,)
reference segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_intervals : np.ndarray, shape=(m, 2)
estimated segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_labels : list, shape=(m,)
estimated segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
frame_size : float > 0
length (in seconds) of frames for clustering
(Default value = 0.1)
beta : float > 0
beta value for F-measure
(Default value = 1.0)
Returns
-------
precision : float > 0
Precision of detecting whether frames belong in the same cluster
recall : float > 0
Recall of detecting whether frames belong in the same cluster
f : float > 0
F-measure of detecting whether frames belong in the same cluster
"""
validate_structure(reference_intervals, reference_labels,
estimated_intervals, estimated_labels)
# Check for empty annotations. Don't need to check labels because
# validate_structure makes sure they're the same size as intervals
if reference_intervals.size == 0 or estimated_intervals.size == 0:
return 0., 0., 0.
# Generate the cluster labels
y_ref = util.intervals_to_samples(reference_intervals,
reference_labels,
sample_size=frame_size)[-1]
y_ref = util.index_labels(y_ref)[0]
# Map to index space
y_est = util.intervals_to_samples(estimated_intervals,
estimated_labels,
sample_size=frame_size)[-1]
y_est = util.index_labels(y_est)[0]
# Build the reference label agreement matrix
agree_ref = np.equal.outer(y_ref, y_ref)
# Count the unique pairs
n_agree_ref = (agree_ref.sum() - len(y_ref)) / 2.0
# Repeat for estimate
agree_est = np.equal.outer(y_est, y_est)
n_agree_est = (agree_est.sum() - len(y_est)) / 2.0
# Find where they agree
matches = np.logical_and(agree_ref, agree_est)
n_matches = (matches.sum() - len(y_ref)) / 2.0
precision = n_matches / n_agree_est
recall = n_matches / n_agree_ref
f_measure = util.f_measure(precision, recall, beta=beta)
return precision, recall, f_measure | [6][SEP1][If][Return][Return][SEP2][1,2][][][SEP3][1][0][14] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_CONSTANT_CONSTANT_EXPR_CONSTANT_EXPR_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_IF_BOOLOP_OR_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_EQ_CONSTANT_RETURN_TUPLE_CONSTANT_CONSTANT_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_KEYWORD_NAME_LOAD_UNARYOP_USUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_KEYWORD_NAME_LOAD_UNARYOP_USUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CONSTANT_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CALL_NAME_LOAD_NAME_LOAD_DIV_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CALL_NAME_LOAD_NAME_LOAD_DIV_CONSTANT_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_CALL_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CALL_NAME_LOAD_NAME_LOAD_DIV_CONSTANT_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_DIV_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_DIV_NAME_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_KEYWORD_NAME_LOAD_RETURN_TUPLE_NAME_LOAD_NAME_LOAD_NAME_LOAD_LOAD |
def filter(self):
"""
Get a filtered list of file imports
:return: A list of file imports, with only the id set (you need to
refresh them if you want all the attributes to be filled in)
:rtype: list of :class:`carto.file_import.FileImportJob`
:raise: CartoException
"""
try:
response = self.send(self.get_collection_endpoint(), "get")
if self.json_collection_attribute is not None:
resource_ids = self.client.get_response_data(
response,
self.Meta.parse_json)[self.json_collection_attribute]
else:
resource_ids = self.client.get_response_data(
response, self.Meta.parse_json)
except Exception as e:
raise CartoException(e)
resources = []
for resource_id in resource_ids:
try:
resource = self.resource_class(self.client)
except (ValueError, TypeError):
continue
else:
setattr(resource, resource.Meta.id_field, resource_id)
resources.append(resource)
return resources | [1][SEP1][Try][If][None][None][None][None][For][Try][Return][None][None][SEP2][1,2][3,4][][5][5][6][7,8][9,6][][10][6][SEP3][0][2][1][1][1][0][0][2][0][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_SUBSCRIPT_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EXCEPTHANDLER_NAME_LOAD_RAISE_CALL_NAME_LOAD_NAME_LOAD_ASSIGN_NAME_STORE_LIST_LOAD_FOR_NAME_STORE_NAME_LOAD_TRY_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXCEPTHANDLER_TUPLE_NAME_LOAD_NAME_LOAD_LOAD_CONTINUE_EXPR_CALL_NAME_LOAD_NAME_LOAD_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def copyto_file_path(self, query, path, append=False):
"""
Gets data from a table into a writable file
:param query: The "COPY { table_name [(column_name[, ...])] | (query) }
TO STDOUT [WITH(option[,...])]" query to execute
:type query: str
:param path: A path to a writable file
:type path: str
:param append: Whether to append or not if the file already exists
Default value is False
:type append: bool
:raise CartoException:
"""
file_mode = 'wb' if not append else 'ab'
with open(path, file_mode) as f:
self.copyto_file_object(query, f) | [4][SEP1][None][None][SEP2][1][][SEP3][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_CONSTANT_EXPR_CONSTANT_ASSIGN_NAME_STORE_IFEXP_UNARYOP_NOT_NAME_LOAD_CONSTANT_CONSTANT_WITH_WITHITEM_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_STORE_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD |
def new_symlink(self, vd, name, parent, rr_target, seqnum, rock_ridge,
rr_name, xa):
# type: (headervd.PrimaryOrSupplementaryVD, bytes, DirectoryRecord, bytes, int, str, bytes, bool) -> None
'''
Create a new symlink Directory Record. This implies that the new
record will be Rock Ridge.
Parameters:
vd - The Volume Descriptor this record is part of.
name - The name for this directory record.
parent - The parent of this directory record.
rr_target - The symlink target for this directory record.
seqnum - The sequence number for this directory record.
rock_ridge - The version of Rock Ridge to use for this directory record.
rr_name - The Rock Ridge name for this directory record.
xa - True if this is an Extended Attribute record.
Returns:
Nothing.
'''
if self._initialized:
raise pycdlibexception.PyCdlibInternalError('Directory Record already initialized')
self._new(vd, name, parent, seqnum, False, 0, xa)
if rock_ridge:
self._rr_new(rock_ridge, rr_name, rr_target, False, False, False,
0o0120555) | [9][SEP1][If][None][If][None][SEP2][1,2][][3][][SEP3][0][1][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_ARG_EXPR_CONSTANT_IF_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_NAME_LOAD_IF_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_NAME_LOAD_CONSTANT_CONSTANT_CONSTANT_CONSTANT |
def record(self):
# type: () -> bytes
'''
Return a string representation of the Directory Record date.
Parameters:
None.
Returns:
A string representing this Directory Record Date.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('Directory Record Date not initialized')
return struct.pack(self.FMT, self.years_since_1900, self.month,
self.day_of_month, self.hour, self.minute,
self.second, self.gmtoffset) | [1][SEP1][If][None][Return][SEP2][1,2][][][SEP3][0][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def record(self):
# type: () -> bytes
'''
Generate a string representing the Rock Ridge Extensions Reference
record.
Parameters:
None.
Returns:
String containing the Rock Ridge record.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('ER record not yet initialized!')
return b'ER' + struct.pack('=BBBBBB', RRERRecord.length(self.ext_id, self.ext_des, self.ext_src), SU_ENTRY_VERSION, len(self.ext_id), len(self.ext_des), len(self.ext_src), self.ext_ver) + self.ext_id + self.ext_des + self.ext_src | [1][SEP1][If][None][Return][SEP2][1,2][][][SEP3][0][1][5] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_RETURN_BINOP_BINOP_BINOP_BINOP_CONSTANT_ADD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ADD_ATTRIBUTE_NAME_LOAD_LOAD_ADD_ATTRIBUTE_NAME_LOAD_LOAD_ADD_ATTRIBUTE_NAME_LOAD_LOAD |
def record(self):
# type: () -> bytes
'''
Generate a string representing the Rock Ridge Child Link record.
Parameters:
None.
Returns:
String containing the Rock Ridge record.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('CL record not yet initialized!')
return b'CL' + struct.pack('=BBLL', RRCLRecord.length(), SU_ENTRY_VERSION, self.child_log_block_num, utils.swab_32bit(self.child_log_block_num)) | [1][SEP1][If][None][Return][SEP2][1,2][][][SEP3][0][1][3] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_RETURN_BINOP_CONSTANT_ADD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def _record(self, entries):
# type: (RockRidgeEntries) -> bytes
'''
Return a string representing the Rock Ridge entry.
Parameters:
entries - The dr_entries or ce_entries to generate a record for.
Returns:
A string representing the Rock Ridge entry.
'''
outlist = []
if entries.sp_record is not None:
outlist.append(entries.sp_record.record())
if entries.rr_record is not None:
outlist.append(entries.rr_record.record())
for nm_record in entries.nm_records:
outlist.append(nm_record.record())
if entries.px_record is not None:
outlist.append(entries.px_record.record(self.rr_version))
for sl_record in entries.sl_records:
outlist.append(sl_record.record())
if entries.tf_record is not None:
outlist.append(entries.tf_record.record())
if entries.cl_record is not None:
outlist.append(entries.cl_record.record())
if entries.pl_record is not None:
outlist.append(entries.pl_record.record())
if entries.re_record is not None:
outlist.append(entries.re_record.record())
for es_record in entries.es_records:
outlist.append(es_record.record())
if entries.er_record is not None:
outlist.append(entries.er_record.record())
if entries.ce_record is not None:
outlist.append(entries.ce_record.record())
for pd_record in entries.pd_records:
outlist.append(pd_record.record())
if entries.st_record is not None:
outlist.append(entries.st_record.record())
if entries.sf_record is not None:
outlist.append(entries.sf_record.record())
return b''.join(outlist) | [2][SEP1][If][None][If][None][For][None][If][None][For][None][If][None][If][None][If][None][If][None][For][None][If][None][If][None][For][None][If][None][If][None][Return][SEP2][1,2][2][3,4][4][5,6][4][7,8][8][9,10][8][11,12][12][13,14][14][15,16][16][17,18][18][19,20][18][21,22][22][23,24][24][25,26][24][27,28][28][29,30][30][][SEP3][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][0][2][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_LIST_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_RETURN_CALL_ATTRIBUTE_CONSTANT_LOAD_NAME_LOAD |
def add_entry(self, length):
# type: (int) -> int
'''
Add a new entry to this Rock Ridge Continuation Block. This method
attempts to find a gap that fits the new length anywhere within this
Continuation Block. If successful, it returns the offset at which
it placed this entry. If unsuccessful, it returns None.
Parameters:
length - The length of the entry to find a gap for.
Returns:
The offset the entry was placed at, or None if no gap was found.
'''
offset = -1
# Need to find a gap
for index, entry in enumerate(self._entries):
if index == 0:
if entry.offset != 0 and length <= entry.offset:
# We can put it at the beginning!
offset = 0
break
else:
lastentry = self._entries[index - 1]
lastend = lastentry.offset + lastentry.length - 1
gapsize = entry.offset - lastend - 1
if gapsize >= length:
# We found a spot for it!
offset = lastend + 1
break
else:
# We reached the end without finding a gap for it. Look at the last
# entry and see if there is room at the end.
if self._entries:
lastentry = self._entries[-1]
lastend = lastentry.offset + lastentry.length - 1
left = self._max_block_size - lastend - 1
if left >= length:
offset = lastend + 1
else:
if self._max_block_size >= length:
offset = 0
if offset >= 0:
bisect.insort_left(self._entries,
RockRidgeContinuationEntry(offset, length))
return offset | [2][SEP1][None][For][If][If][If][If][None][Return][None][None][SEP2][1][2,3][4,5][6,7][8,1][9,1][7][][3][3][SEP3][0][1][0][0][0][0][2][0][0][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_ASSIGN_NAME_STORE_UNARYOP_USUB_CONSTANT_FOR_TUPLE_NAME_STORE_NAME_STORE_STORE_CALL_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_EQ_CONSTANT_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_NOTEQ_CONSTANT_COMPARE_NAME_LOAD_LTE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CONSTANT_BREAK_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_BINOP_NAME_LOAD_SUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CONSTANT_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_SUB_NAME_LOAD_SUB_CONSTANT_IF_COMPARE_NAME_LOAD_GTE_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_ADD_CONSTANT_BREAK_IF_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_SUBSCRIPT_ATTRIBUTE_NAME_LOAD_LOAD_UNARYOP_USUB_CONSTANT_LOAD_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_ADD_ATTRIBUTE_NAME_LOAD_LOAD_SUB_CONSTANT_ASSIGN_NAME_STORE_BINOP_BINOP_ATTRIBUTE_NAME_LOAD_LOAD_SUB_NAME_LOAD_SUB_CONSTANT_IF_COMPARE_NAME_LOAD_GTE_NAME_LOAD_ASSIGN_NAME_STORE_BINOP_NAME_LOAD_ADD_CONSTANT_IF_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_GTE_NAME_LOAD_ASSIGN_NAME_STORE_CONSTANT_IF_COMPARE_NAME_LOAD_GTE_CONSTANT_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CALL_NAME_LOAD_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def record(self):
# type: () -> bytes
'''
A method to generate the string representing this UDF Implementation Use
Volume Descriptor Implementation Use field.
Parameters:
None.
Returns:
A string representing this UDF Implementation Use Volume Descriptor.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('UDF Implementation Use Volume Descriptor Implementation Use field not initialized')
return struct.pack(self.FMT, self.char_set, self.log_vol_ident,
self.lv_info1, self.lv_info2, self.lv_info3,
self.impl_ident.record(), self.impl_use) | [1][SEP1][If][None][Return][SEP2][1,2][][][SEP3][0][1][2] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_RETURN_CALL_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ATTRIBUTE_NAME_LOAD_LOAD |
def set_extent_location(self, new_location):
# type: (int) -> None
'''
A method to set the location of this UDF File Set Descriptor.
Parameters:
new_location - The new extent this UDF File Set Descriptor should be located at.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('UDF File Set Descriptor not initialized')
self.new_extent_loc = new_location | [2][SEP1][If][None][None][SEP2][1,2][][][SEP3][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD |
def track_rr_ce_entry(self, extent, offset, length):
# type: (int, int, int) -> rockridge.RockRidgeContinuationBlock
'''
Start tracking a new Rock Ridge Continuation Entry entry in this Volume
Descriptor, at the extent, offset, and length provided. Since Rock
Ridge Continuation Blocks are shared across multiple Rock Ridge
Directory Records, the most logical place to track them is in the PVD.
This method is expected to be used during parse time, when an extent,
offset and length are already assigned to the entry.
Parameters:
extent - The extent that this Continuation Entry lives at.
offset - The offset within the extent that this Continuation Entry
lives at.
length - The length of this Continuation Entry.
Returns:
The object representing the block in which the Continuation Entry was
placed in.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('This Primary Volume Descriptor is not yet initialized')
for block in self.rr_ce_blocks:
if block.extent_location() == extent:
break
else:
# We didn't find it in the list, add it
block = rockridge.RockRidgeContinuationBlock(extent, self.log_block_size)
self.rr_ce_blocks.append(block)
block.track_entry(offset, length)
return block | [4][SEP1][If][None][For][If][Return][SEP2][1,2][][3,4][4,2][][SEP3][0][1][2][1][1] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_EQ_NAME_LOAD_BREAK_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_ATTRIBUTE_NAME_LOAD_LOAD_EXPR_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_NAME_LOAD_EXPR_CALL_ATTRIBUTE_NAME_LOAD_LOAD_NAME_LOAD_NAME_LOAD_RETURN_NAME_LOAD |
def set_inode(self, ino):
# type: (inode.Inode) -> None
'''
A method to set the Inode associated with this El Torito Entry.
Parameters:
ino - The Inode object corresponding to this entry.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('El Torito Entry not yet initialized')
self.inode = ino | [2][SEP1][If][None][None][SEP2][1,2][][][SEP3][0][1][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_ATTRIBUTE_NAME_LOAD_STORE_NAME_LOAD |
def _yield_children(rec):
# type: (dr.DirectoryRecord) -> Generator
'''
An internal function to gather and yield all of the children of a Directory
Record.
Parameters:
rec - The Directory Record to get all of the children from (must be a
directory)
Yields:
Children of this Directory Record.
Returns:
Nothing.
'''
if not rec.is_dir():
raise pycdlibexception.PyCdlibInvalidInput('Record is not a directory!')
last = b''
for child in rec.children:
# Check to see if the filename of this child is the same as the
# last one, and if so, skip the child. This can happen if we
# have very large files with more than one directory entry.
fi = child.file_identifier()
if fi == last:
continue
last = fi
if child.rock_ridge is not None and child.rock_ridge.child_link_record_exists() and child.rock_ridge.cl_to_moved_dr is not None and child.rock_ridge.cl_to_moved_dr.parent is not None:
# If this is the case, this is a relocated entry. We actually
# want to go find the entry this was relocated to; we do that
# by following the child_link, then going up to the parent and
# finding the entry that links to the same one as this one.
cl_parent = child.rock_ridge.cl_to_moved_dr.parent
for cl_child in cl_parent.children:
if cl_child.rock_ridge is not None and cl_child.rock_ridge.name() == child.rock_ridge.name():
child = cl_child
break
# If we ended up not finding the right one in the parent of the
# moved entry, weird, but just return the one we would have
# anyway.
yield child | [1][SEP1][If][None][None][For][If][If][None][None][For][If][None][SEP2][1,2][][3][4][5,3][6,7][8][3][9,7][10,8][7][SEP3][1][1][0][3][1][3][0][0][0][2][0] | MODULE_FUNCTIONDEF_ARGUMENTS_ARG_EXPR_CONSTANT_IF_UNARYOP_NOT_CALL_ATTRIBUTE_NAME_LOAD_LOAD_RAISE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_CONSTANT_ASSIGN_NAME_STORE_CONSTANT_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_ASSIGN_NAME_STORE_CALL_ATTRIBUTE_NAME_LOAD_LOAD_IF_COMPARE_NAME_LOAD_EQ_NAME_LOAD_CONTINUE_ASSIGN_NAME_STORE_NAME_LOAD_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_COMPARE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ISNOT_CONSTANT_COMPARE_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_ISNOT_CONSTANT_ASSIGN_NAME_STORE_ATTRIBUTE_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_LOAD_FOR_NAME_STORE_ATTRIBUTE_NAME_LOAD_LOAD_IF_BOOLOP_AND_COMPARE_ATTRIBUTE_NAME_LOAD_LOAD_ISNOT_CONSTANT_COMPARE_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_EQ_CALL_ATTRIBUTE_ATTRIBUTE_NAME_LOAD_LOAD_LOAD_ASSIGN_NAME_STORE_NAME_LOAD_BREAK_EXPR_YIELD_NAME_LOAD |