docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
sets the dot notated property to the passed in value args: prop: a string of the property to retreive "a.b.c" ~ dictionary['a']['b']['c'] value: the value to set the prop object
def set(self, prop, value): prop_parts = prop.split(".") if self.copy_dict: new_dict = copy.deepcopy(self.obj) else: new_dict = self.obj pointer = None parts_length = len(prop_parts) - 1 for i, part in enumerate(prop_parts): if pointer is None and i == parts_length: new_dict[part] = value elif pointer is None: pointer = new_dict.get(part) elif i == parts_length: pointer[part] = value else: pointer = pointer.get(part) return new_dict
1,120,939
Return a list of 2-tuples - the argument name and its default value or a special value that indicates there is no default value. Args: args: list of argument name defaults: tuple of default values
def _get_args_and_defaults(args, defaults): defaults = defaults or [] args_and_defaults = [(argument, default) for (argument, default) in zip_longest(args[::-1], defaults[::-1], fillvalue=NoDefault)] return args_and_defaults[::-1]
1,121,023
Return all methods of cls that are parseable i.e. have been decorated by '@create_parser'. Args: cls: the class currently being decorated Note: classmethods will not be included as they can only be referenced once the class has been defined Returns: a 2-tuple with the parser of the __init__ method if any and a dict of the form {'method_name': associated_parser}
def _get_parseable_methods(cls): _LOG.debug("Retrieving parseable methods for '%s'", cls.__name__) init_parser = None methods_to_parse = {} for name, obj in vars(cls).items(): # Every callable object that has a 'parser' attribute will be # added as a subparser. # This won't work for classmethods because reference to # classmethods are only possible once the class has been defined if callable(obj) and hasattr(obj, "parser"): _LOG.debug("Found method '%s'", name) if name == "__init__": # If we find the decorated __init__ method it will be # used as the top level parser init_parser = obj.parser else: methods_to_parse[obj.__name__] = obj.parser return (init_parser, methods_to_parse)
1,121,025
Create a default description for the parser and help message for the agurments if they are missing. Args: func: the method we are creating a parser for args: the argument names of the method description: a potentially existing description created from the function docstring args_help: a dict {arg_name: help} with potentially missing arguments Returns: a tuple (arg_parse_description, complete_args_help)
def _get_default_help_message(func, args, description=None, args_help=None): if description is None: description = "Argument parsing for %s" % func.__name__ args_help = args_help or {} # If an argument is missing a help message we create a simple one for argument in [arg_name for arg_name in args if arg_name not in args_help]: args_help[argument] = "Help message for %s" % argument return (description, args_help)
1,121,026
Return an ArgumentParser for the given function. Arguments are defined from the function arguments and their associated defaults. Args: func: function for which we want an ArgumentParser types: types to which the command line arguments should be converted to args_and_defaults: list of 2-tuples (arg_name, arg_default) delimiter_chars: characters used to separate the parameters from their help message in the docstring
def _get_arg_parser(func, types, args_and_defaults, delimiter_chars): _LOG.debug("Creating ArgumentParser for '%s'", func.__name__) (description, arg_help) = _prepare_doc( func, [x for (x, _) in args_and_defaults], delimiter_chars) parser = argparse.ArgumentParser(description=description) for ((arg, default), arg_type) in zip_longest(args_and_defaults, types): help_msg = arg_help[arg] if default is NoDefault: arg_type = arg_type or identity_type if arg_type == bool: _LOG.debug("Adding optional flag %s.%s", func.__name__, arg) parser.add_argument("--%s" % arg, default=True, required=False, action="store_false", help="%s. Defaults to True if not specified" % help_msg) else: _LOG.debug("Adding positional argument %s.%s", func.__name__, arg) parser.add_argument(arg, help=help_msg, type=arg_type) else: if default is None and arg_type is None: raise ParseThisError("To use default value of 'None' you need " "to specify the type of the argument '{}' " "for the method '{}'" .format(arg, func.__name__)) arg_type = arg_type or type(default) if arg_type == bool: action = "store_false" if default else "store_true" _LOG.debug("Adding optional flag %s.%s", func.__name__, arg) parser.add_argument("--%s" % arg, help=help_msg, default=default, action=action) else: _LOG.debug( "Adding optional argument %s.%s", func.__name__, arg) parser.add_argument("--%s" % arg, help=help_msg, default=default, type=arg_type) return parser
1,121,027
Return the given arguments if it is not None else sys.argv if it contains something, an empty list otherwise. Args: args: argument to be parsed sys_argv: arguments of the command line i.e. sys.argv
def _get_args_to_parse(args, sys_argv): arguments = args if args is not None else sys_argv[1:] _LOG.debug("Parsing arguments: %s", arguments) return arguments
1,121,028
Returns the method that is linked to the 'call' method of the parser Args: func: the decorated function Raises: ParseThisError if the decorated method is __init__, __init__ can only be decorated in a class decorated by parse_class
def _get_parser_call_method(func): func_name = func.__name__ parser = func.parser def inner_call(instance=None, args=None): _LOG.debug("Calling %s.parser.call", func_name) # Defer this check in the method call so that __init__ can be # decorated in class decorated with parse_class if func_name == "__init__": raise ParseThisError(("To use 'create_parser' on the" "'__init__' you need to decorate the " "class with '@parse_class'")) namespace = parser.parse_args(_get_args_to_parse(args, sys.argv)) if instance is None: # If instance is None we are probably decorating a function not a # method and don't need the instance args_name = _get_args_name_from_parser(parser) return _call(func, args_name, namespace) return _call_method_from_namespace(instance, func_name, namespace) return inner_call
1,121,030
Retrieve the name of the function argument linked to the given parser. Args: parser: a function parser
def _get_args_name_from_parser(parser): # Retrieve the 'action' destination of the method parser i.e. its # argument name. The HelpAction is ignored. return [action.dest for action in parser._actions if not isinstance(action, argparse._HelpAction)]
1,121,031
Actually calls the callable with the namespace parsed from the command line. Args: callable_obj: a callable object arg_names: name of the function arguments namespace: the namespace object parsed from the command line
def _call(callable_obj, arg_names, namespace): arguments = {arg_name: getattr(namespace, arg_name) for arg_name in arg_names} return callable_obj(**arguments)
1,121,032
Call the method, retrieved from obj, with the correct arguments via the namespace Args: obj: any kind of object method_name: method to be called namespace: an argparse.Namespace object containing parsed command line arguments
def _call_method_from_namespace(obj, method_name, namespace): method = getattr(obj, method_name) method_parser = method.parser arg_names = _get_args_name_from_parser(method_parser) if method_name == "__init__": return _call(obj, arg_names, namespace) return _call(method, arg_names, namespace)
1,121,033
Create an instance of Action Arguments: name: -- the name of this action Keyword arguments: action -- the function that will be performed when this action is called
def __init__(self, name, action, *args, **kwargs): self.args = args self.kwargs = kwargs self.name = name self._action = action pass
1,121,260
Carries out the action on the provided flow_unit Arguments: flow_unit - the flow_unit instance that will have the processing carried out upon it. Keyword arguments: as required by the action function for this Action instance.
def __call__(self, flow_unit, *args, **kwargs): self._action(flow_unit, *args, **kwargs)
1,121,261
Create an instance of Transition Arguments: state: -- a State object associated with this transition action: -- a Action object that is allowed for the state
def __init__(self, state, action, *args, **kwargs): self.state = state self.action = action
1,121,264
Scan `path` for viruses using ``clamscan`` program. Args: path (str): Relative or absolute path of file/directory you need to scan. Returns: dict: ``{filename: ("FOUND", "virus type")}`` or blank dict. Raises: AssertionError: When the internal file doesn't exists.
def scan_file(path): path = os.path.abspath(path) assert os.path.exists(path), "Unreachable file '%s'." % path result = sh.clamscan(path, no_summary=True, infected=True, _ok_code=[0, 1]) return _parse_result(result)
1,121,371
Save cahce to the disk. Args: cache (set): Set with cached data.
def save_cache(cache): with open(settings.DUP_FILTER_FILE, "w") as f: f.write( json.dumps(list(cache)) )
1,121,391
Deduplication function, which compares `publication` with samples stored in `cache`. If the match NOT is found, `publication` is returned, else None. Args: publication (obj): :class:`.Publication` instance. cache (obj): Cache which is used for lookups. Returns: obj/None: Depends whether the object is found in cache or not.
def filter_publication(publication, cache=_CACHE): if cache is None: cache = load_cache() if publication._get_hash() in cache: return None cache.update( [publication._get_hash()] ) save_cache(cache) return publication
1,121,393
Decorator for simple in-place decorator mocking for tests Args: to_patch: the string path of the function to patch module_name: complete string path of the module to reload decorator (optional): replacement decorator. By default a pass-through will be used. Returns: A wrapped test function, during the context of execution the specified path is patched.
def tinsel(to_patch, module_name, decorator=mock_decorator): def fn_decorator(function): def wrapper(*args, **kwargs): with patch(to_patch, decorator): m = importlib.import_module(module_name) reload(m) function(*args, **kwargs) reload(m) return wrapper return fn_decorator
1,121,436
Render list of `trees` to HTML. Args: trees (list): List of :class:`.Tree`. path_composer (fn reference): Function used to compose paths from UUID. Look at :func:`.compose_tree_path` from :mod:`.web_tools`. Returns: str: HTML representation of trees.
def render_trees(trees, path_composer): trees = list(trees) # by default, this is set def create_pub_cache(trees): sub_pubs_uuids = sum((x.collect_publications() for x in trees), []) uuid_mapping = { uuid: search_pubs_by_uuid(uuid) for uuid in set(sub_pubs_uuids) } # cleaned dict without blank matches return { uuid: pub[0] for uuid, pub in uuid_mapping.iteritems() if pub } # create uuid -> DBPublication cache pub_cache = create_pub_cache(trees) def render_tree(tree, ind=1): if not tree.is_public: return "" rendered_tree = SimpleTemplate(TREE_TEMPLATE).render( tree=tree, render_tree=render_tree, ind=ind, path_composer=path_composer, pub_cache=pub_cache, ) # keep nice indentation ind_txt = ind * " " return ind_txt + ("\n" + ind_txt).join(rendered_tree.splitlines()) # this is used to get reference for back button parent = tree_handler().get_parent(trees[0]) link_up = path_composer(parent) if parent else None return SimpleTemplate(TREES_TEMPLATE).render( trees=trees, render_tree=render_tree, link_up=link_up, )
1,121,531
Cut a wire (undo a wire() call) Arguments: - name (str): name of the wire Keyword Arguments: - disconnect (bool): if True also disconnect all connections on the specified wire
def cut(self, name, disconnect=False): wire = getattr(self, name, None) if wire and isinstance(wire, Wire): if name != "main": delattr(self, name) if disconnect: wire.disconnect() wire.off("receive", self.on_receive) if self.main == wire: self.main = None self.set_main_wire()
1,121,615
Compose path to the ``resources`` directory for given `fn`. Args: fn (str): Filename of file in ``resources`` directory. Returns: str: Absolute path to the file in resources directory.
def _resource_context(fn): return os.path.join( os.path.dirname(__file__), DES_DIR, fn )
1,121,669
Compose contract and create PDF. Args: firma (str): firma pravni_forma (str): pravni_forma sidlo (str): sidlo ic (str): ic dic (str): dic zastoupen (str): zastoupen Returns: obj: StringIO file instance containing PDF file.
def get_contract(firma, pravni_forma, sidlo, ic, dic, zastoupen): contract_fn = _resource_context( "Licencni_smlouva_o_dodavani_elektronickych_publikaci" "_a_jejich_uziti.rst" ) # load contract with open(contract_fn) as f: contract = f.read()#.decode("utf-8").encode("utf-8") # make sure that `firma` has its heading mark firma = firma.strip() firma = firma + "\n" + ((len(firma) + 1) * "-") # patch template contract = Template(contract).substitute( firma=firma, pravni_forma=pravni_forma.strip(), sidlo=sidlo.strip(), ic=ic.strip(), dic=dic.strip(), zastoupen=zastoupen.strip(), resources_path=RES_PATH ) return gen_pdf( contract, open(_resource_context("style.json")).read(), )
1,121,670
Generate review from `review_struct`. Args: review_struct (obj): :class:`.GenerateReview` instance. Returns: obj: StringIO file instance containing PDF file.
def get_review(review_struct): review_fn = _resource_context("review.rst") # read review template with open(review_fn) as f: review = f.read() # generate qr code with NamedTemporaryFile(suffix=".png") as qr_file: url = pyqrcode.create(review_struct.internal_url) url.png(qr_file.name, scale=5) # save the file qr_file.flush() qr_file.seek(0) # generate template review = Template(review).substitute( content=review_struct.get_rst(), datum=time.strftime("%d.%m.%Y", time.localtime()), cas=time.strftime("%H:%M", time.localtime()), resources_path=RES_PATH, qr_path=qr_file.name, ) return gen_pdf( review, open(_resource_context("review_style.json")).read(), )
1,121,671
Calls the first function matching the urls pattern and method. Args: url (str): Url for which to call a matching function. method (str, optional): The method used while registering a function. Defaults to None args (dict, optional): Additional args to be passed to the matching function. Returns: The functions return value or `None` if no function was called.
def call(self, url, method=None, args=None): if not args: args = {} if sys.version_info.major == 3: data = urllib.parse.urlparse(url) path = data.path.rstrip('/') + '/' _args = dict(urllib.parse.parse_qs(data.query, keep_blank_values=True)) elif sys.version_info.major == 2: data = urlparse.urlparse(url) path = data.path.rstrip('/') + '/' _args = dict(urlparse.parse_qs(data.query, keep_blank_values=True)) for elem in self._data_store: pattern = elem['pattern'] function = elem['function'] _method = elem['method'] type_cast = elem['type_cast'] result = re.match(pattern, path) # Found matching method if result and _method == method: _args = dict(_args, **result.groupdict()) # Unpack value lists (due to urllib.parse.parse_qs) in case # theres only one value available for key, val in _args.items(): if isinstance(_args[key], list) and len(_args[key]) == 1: _args[key] = _args[key][0] # Apply typ-casting if necessary for key, val in type_cast.items(): # Not within available _args, no type-cast required if key not in _args: continue # Is None or empty, no type-cast required if not _args[key]: continue # Try and cast the values if isinstance(_args[key], list): for i, _val in enumerate(_args[key]): _args[key][i] = self._cast(_val, val) else: _args[key] = self._cast(_args[key], val) requiered_args = self._get_function_args(function) for key, val in args.items(): if key in requiered_args: _args[key] = val return function(**_args) return None
1,121,705
Initialise a new ScanResult. Args: addr (str): Device hardware address in xx:xx:xx:xx:xx:xx format. raw_addr (bytearray): Device hardware address as raw bytes. name (str): Device name (if available) as ASCII text. rssi (float): Latest RSSI from the scan result for the device, if any.
def __init__(self, addr, raw_addr, name=None, rssi=0): self.addr = addr self.raw_addr = raw_addr self.name = name self.rssi = rssi self._age = time.time()
1,121,766
Retrieve a device with a given address or name from the results. Args: addr_or_name (str): a string containing either a BLE address in xx:xx:xx:xx:xx:xx format, or a plain device name. The supplied value is checked as an address first and if that fails to produce a result, it is matched against each named device in the collection. Returns: The first matching :class:`ScanResult` instance, or None.
def get_device(self, addr_or_name): if addr_or_name in self._devices: return self._devices[addr_or_name] for v in self._devices.values(): if v == addr_or_name: return v return None
1,121,770
Set calibration state for attached IMUs. Args: enabled (bool): True to apply calibration to IMU data (if available). False to output uncalibrated data. imus (list): indicates which IMUs the calibration state should be set on. Empty list or [0, 1, 2, 3, 4] will apply to all IMUs, [0, 1] only to first 2 IMUs, etc.
def set_calibration(self, enabled, imus): if len(imus) == 0: imus = list(range(MAX_IMUS)) for i in imus: if i < 0 or i >= MAX_IMUS: logger.warn('Invalid IMU index {} in set_calibration'.format(i)) continue self.imus[i]._use_calibration = enabled
1,121,773
Returns the current (R, G, B) colour of the SK8-ExtAna LED. Args: cached (bool): if True, returns the locally cached state of the LED (based on the last call to :meth:`set_extana_led`). Otherwise query the device for the current state. Returns: a 3-tuple (r, g, b) (all unsigned integers) in the range 0-255, or `None` on error.
def get_extana_led(self, cached=True): if cached and self.led_state is not None: return self.led_state extana_led = self.get_characteristic_handle_from_uuid(UUID_EXTANA_LED) if extana_led is None: logger.warn('Failed to find handle for ExtAna LED') return None rgb = self.dongle._read_attribute(self.conn_handle, extana_led, israw=True) if rgb is None: return rgb return list(map(lambda x: int(x * (LED_MAX / INT_LED_MAX)), struct.unpack('<HHH', rgb)))
1,121,778
Returns the SK8 device BLE name. Args: cached (bool): if True, returns the locally cached copy of the name. If this is set to False, or the name is not cached, it will read from the device instead. Returns: str. The current device name. May be `None` if an error occurs.
def get_device_name(self, cached=True): if cached and self.name is not None: return self.name device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return None self.name = self.dongle._read_attribute(self.conn_handle, device_name) return self.name
1,121,784
Sets a new BLE device name for this SK8. Args: new_name (str): the new device name as an ASCII string, max 20 characters. Returns: True if the name was updated successfully, False otherwise.
def set_device_name(self, new_name): device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return False if len(new_name) > MAX_DEVICE_NAME_LEN: logger.error('Device name exceeds maximum length ({} > {})'.format(len(new_name), MAX_DEVICE_NAME_LEN)) return False if self.dongle._write_attribute(self.conn_handle, device_name, new_name.encode('ascii')): self.name = new_name return True return False
1,121,785
Returns the SK8 device firmware version. Args: cached (bool): if True, returns the locally cached copy of the firmware version. If this is set to False, or the version is not cached, it will read from the device instead. Returns: str. The current firmware version string. May be `None` if an error occurs.
def get_firmware_version(self, cached=True): if cached and self.firmware_version != 'unknown': return self.firmware_version firmware_version = self.get_characteristic_handle_from_uuid(UUID_FIRMWARE_REVISION) if firmware_version is None: logger.warn('Failed to find handle for firmware version') return None self.firmware_version = self.dongle._read_attribute(self.conn_handle, firmware_version) return self.firmware_version
1,121,786
Can be used to check if an SK8-ExtAna device is currently connected. NOTE: do not attempt to call while data streaming is active! Args: cached (bool): if True, use the cached value of the connected hardware state rather than querying the device. Set to False to force a query. Returns: bool. True if the SK8 currently has an SK8-ExtAna device attached, False otherwise.
def has_extana(self, cached=True): if cached and self.hardware != -1: return True if (self.hardware & EXT_HW_EXTANA) else False result = self._check_hardware() return True if (result & EXT_HW_EXTANA) != 0 else False
1,121,790
Can be used to check if an external IMU chain is currently connected. NOTE: do not attempt to call while data streaming is active! Args: cached (bool): if True, use the cached value of the connected hardware state rather than querying the device. Set to False to force a query. Returns: bool. True if the SK8 currently has an IMU chain attached, False otherwise.
def has_imus(self, cached=True): if cached and self.hardware != -1: return True if (self.hardware & EXT_HW_IMUS) else False result = self._check_hardware() return True if (result & EXT_HW_IMUS) != 0 else False
1,121,791
Given a characteristic UUID, return its handle. Args: uuid (str): a string containing the hex-encoded UUID Returns: None if an error occurs, otherwise an integer handle.
def get_characteristic_handle_from_uuid(self, uuid): ch = self.get_characteristic_from_uuid(uuid) return None if ch is None else ch.char_handle
1,121,792
Given a characteristic UUID, return a :class:`Characteristic` object containing information about that characteristic Args: uuid (str): a string containing the hex-encoded UUID Returns: None if an error occurs, otherwise a :class:`Characteristic` object
def get_characteristic_from_uuid(self, uuid): if uuid in self.uuid_chars: logger.debug('Returning cached info for char: {}'.format(uuid)) return self.uuid_chars[uuid] for service in self.services.values(): char = service.get_characteristic_by_uuid(uuid) if char is not None: self.uuid_chars[uuid] = char logger.debug('Found char for UUID: {}'.format(uuid)) return char logger.info('Failed to find char for UUID: {}'.format(uuid)) return None
1,121,793
Utility function to retrieve the client characteristic configuration descriptor handle for a given characteristic. Args: uuid (str): a string containing the hex-encoded UUID Returns: None if an error occurs, otherwise an integer handle.
def get_ccc_handle_from_uuid(self, uuid): if uuid in self.uuid_cccds: return self.uuid_cccds[uuid].handle char = self.get_characteristic_from_uuid(uuid) if char is None: return None ccc = char.get_descriptor_by_uuid(UUID_GATT_CCC) if ccc is not None: self.uuid_cccds[uuid] = ccc return None if ccc is None else ccc.handle
1,121,794
Lookup information about a given GATT service. Args: uuid (str): a string containing the hex-encoded service UUID Returns: None if an error occurs, otherwise a :class:`Service` object.
def get_service(self, uuid): if uuid in self.services: return self.services[uuid] if pp_hex(uuid) in self.services: return self.services[pp_hex(uuid)] return None
1,121,797
Given a characteristic handle, return the :class:`Service` object that the handle belongs to. Args: handle (int): the characteristic handle Returns: None if no service matches the given handle, otherwise a :class:`Service` object.
def get_service_for_handle(self, handle): for s in self.services.values(): if s.start_handle <= handle and s.end_handle >= handle: return s return None
1,121,798
Open the serial connection to a dongle at the supplied address. Args: address (str): the serial port address of the BLED112 dongle, e.g. 'COM5' hard_reset (bool): not currently used Returns: True if a connection with the dongle was established, False otherwise.
def init(self, address, hard_reset=False): self.address = address if hard_reset: # TODO (needs more work to be usable) # if not Dongle._hard_reset(address): # return False # time.sleep(2.0) pass # TODO timeout not working if opened on valid, non Bluegiga port for i in range(Dongle.PORT_RETRIES): try: logger.debug('Setting up BGAPI, attempt {}/{}'.format(i + 1, Dongle.PORT_RETRIES)) self.api = BlueGigaAPI(port=self.address, callbacks=self, baud=Dongle.BAUDRATE, timeout=DEF_TIMEOUT) self.api.start_daemon() break except serial.serialutil.SerialException as e: logger.debug('Failed to init BlueGigaAPI: {}, attempt {}/{}'.format(e, i + 1, Dongle.PORT_RETRIES)) time.sleep(0.1) if self.api is None: return False time.sleep(0.5) # TODO self.get_supported_connections() logger.info('Dongle supports {} connections'.format(self.supported_connections)) if self.supported_connections == -1: logger.error('Failed to retrieve number of supported connections from the dongle! (try reinserting it)') return False self.conn_state = {x: self._STATE_IDLE for x in range(self.supported_connections)} self.reset() self._cbthread = threading.Thread(target=self._cbthreadfunc) self._cbthread.setDaemon(True) self._cbthread_q = Queue() self._cbthread.start() return True
1,121,802
Run a BLE scan for a defined interval and return results. Alternative to :meth:`begin_scan/:meth:`end_scan`. Args: timeout (float): time in seconds to run the scanning process for interval (int): BLE scan interval, in units of 625us window (int): BLE scan window, in units of 625us Returns: a :class:`ScanResults` object containing the scan results.
def scan_devices(self, devnames, timeout=DEF_TIMEOUT, interval=DEF_SCAN_INTERVAL, window=DEF_SCAN_WINDOW): # TODO validate params and state logger.debug('configuring scan parameters') self.api.ble_cmd_gap_set_scan_parameters(interval, window, 1) self._set_state(self._STATE_CONFIGURE_SCAN) self.api.ble_cmd_gap_discover(1) # any discoverable devices self._wait_for_state(self._STATE_CONFIGURE_SCAN) logger.debug('starting scan for devices {}'.format(devnames)) self.scan_targets = devnames self._set_state(self._STATE_SCANNING) self._wait_for_state(self._STATE_SCANNING, timeout) self._set_state(self._STATE_GAP_END) self.api.ble_cmd_gap_end_procedure() self._wait_for_state(self._STATE_GAP_END) logger.debug('scanning completed') return self.scan_responses
1,121,812
If possible, set the input buffer to a previous history item. Parameters: ----------- substring : str, optional If specified, search for an item with this substring. as_prefix : bool, optional If True, the substring must match at the beginning (default). Returns: -------- Whether the input buffer was changed.
def history_previous(self, substring='', as_prefix=True): index = self._history_index replace = False while index > 0: index -= 1 history = self._get_edited_history(index) if (as_prefix and history.startswith(substring)) \ or (not as_prefix and substring in history): replace = True break if replace: self._store_edits() self._history_index = index self.input_buffer = history return replace
1,121,855
If possible, set the input buffer to a subsequent history item. Parameters: ----------- substring : str, optional If specified, search for an item with this substring. as_prefix : bool, optional If True, the substring must match at the beginning (default). Returns: -------- Whether the input buffer was changed.
def history_next(self, substring='', as_prefix=True): index = self._history_index replace = False while self._history_index < len(self._history): index += 1 history = self._get_edited_history(index) if (as_prefix and history.startswith(substring)) \ or (not as_prefix and substring in history): replace = True break if replace: self._store_edits() self._history_index = index self.input_buffer = history return replace
1,121,856
Initialize class and spawn self as Base Class w/o nargs Args: option_strings (list): list of str giving command line flags that call this action dest (str): Namespace reference to value nargs (str): number of args as special char or int **kwargs (various): optional arguments to pass to super call
def __init__(self, option_strings, dest, nargs=None, **kwargs): # Only accept a single value to analyze if nargs is not None: raise ValueError('nargs not allowed for ThreadCheck') # Call self again but without nargs super(CheckThreads, self).__init__(option_strings, dest, **kwargs)
1,121,914
Adds new children nodes after filtering for duplicates Args: children (list): list of OmniTree nodes to add as children
def add_children(self, children): self._children += [c for c in children if c not in self._children]
1,121,934
Adds new parent nodes after filtering for duplicates Args: parents (list): list of OmniTree nodes to add as parents
def add_parents(self, parents): self._parents += [p for p in parents if p not in self._parents]
1,121,935
View the details of a manager position. Parameters: request is an HTTP request managerTitle is the URL title of the manager.
def manager_view(request, managerTitle): targetManager = get_object_or_404(Manager, url_title=managerTitle) if not targetManager.active: messages.add_message(request, messages.ERROR, MESSAGES['INACTIVE_MANAGER'].format(managerTitle=targetManager.title)) return HttpResponseRedirect(reverse('managers:list_managers')) else: return render_to_response('view_manager.html', { 'page_name': "View Manager", 'targetManager': targetManager, }, context_instance=RequestContext(request))
1,122,106
View to modify an existing manager. Parameters: request is an HTTP request managerTitle is URL title of the manager.
def edit_manager_view(request, managerTitle): targetManager = get_object_or_404(Manager, url_title=managerTitle) form = ManagerForm( request.POST or None, instance=targetManager, ) if form.is_valid(): manager = form.save() messages.add_message(request, messages.SUCCESS, MESSAGES['MANAGER_SAVED'].format(managerTitle=manager.title)) return HttpResponseRedirect(reverse('managers:meta_manager')) return render_to_response('edit_manager.html', { 'page_name': "Admin - Edit Manager", 'form': form, "managerset": Manager.objects.all(), 'manager_title': targetManager.title, }, context_instance=RequestContext(request))
1,122,109
View to edit a new request type. Restricted to presidents and superadmins. Parameters: request is an HTTP request typeName is the request type's URL name.
def edit_request_type_view(request, typeName): requestType = get_object_or_404(RequestType, url_name=typeName) form = RequestTypeForm( request.POST or None, instance=requestType, ) if form.is_valid(): rtype = form.save() messages.add_message(request, messages.SUCCESS, MESSAGES['REQUEST_TYPE_SAVED'].format(typeName=rtype.name)) return HttpResponseRedirect(reverse('managers:manage_request_types')) return render_to_response('edit_request_type.html', { 'page_name': "Admin - Edit Request Type", 'request_types': RequestType.objects.all(), 'form': form, 'requestType': requestType, }, context_instance=RequestContext(request))
1,122,112
An flask application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/ Arguments: object_name: the python path of the config object, e.g. webapp.settings.ProdConfig env: The name of the current environment, e.g. prod or dev
def create_app(object_name): app = Flask(__name__) app.config.from_object(object_name) # initialize the cache cache.init_app(app) # initialize the debug tool bar debug_toolbar.init_app(app) # initialize SQLAlchemy db.init_app(app) return app
1,122,141
Loads the file_locations into the triplestores args: file_locations: list of tuples to load [('vocabularies', [list of vocabs to load]) ('directory', '/directory/path') ('filepath', '/path/to/a/file') ('package_all', 'name.of.a.package.with.defs') ('package_file','name.of.package', 'filename')] custom: list of custom definitions to load
def load(self, file_locations=[], **kwargs): self.__set_cache_dir__(**kwargs) conn = self.__get_conn__(**kwargs) self.set_load_state(**kwargs) super(DefinitionManager, self).load(file_locations, **kwargs) if not file_locations: file_locations = self.__file_locations__ if file_locations: log.info("loading vocabs into conn '%s'", conn) for item in file_locations: if item[0] == 'vocabularies': vocabs = item[1] if item[1] == "all": vocabs = self.vocab_map for vocab in vocabs: self.load_vocab(vocab) self.loaded_files(reset=True) self.loaded_times = self.load_times(**kwargs)
1,122,290
sets the cache directory by test write permissions for various locations args: directories: list of directories to test. First one with read-write permissions is selected.
def __set_cache_dir__(self, cache_dirs=[], **kwargs): # add a path for a subfolder 'vocabularies' test_dirs = [self.vocab_dir] + cache_dirs try: test_dirs += [os.path.join(__CFG__.CACHE_DATA_PATH, "vocabularies")] except (RuntimeWarning, TypeError): pass super(DefinitionManager, self).__set_cache_dir__(test_dirs, **kwargs)
1,122,291
loads a vocabulary into the defintion triplestore args: vocab_name: the prefix, uri or filename of a vocabulary
def load_vocab(self, vocab_name, **kwargs): log.setLevel(kwargs.get("log_level", self.log_level)) vocab = self.get_vocab(vocab_name , **kwargs) if vocab['filename'] in self.loaded: if self.loaded_times.get(vocab['filename'], datetime.datetime(2001,1,1)).timestamp() \ < vocab['modified']: self.drop_file(vocab['filename'], **kwargs) else: return conn = kwargs.get("conn", self.conn) conn.load_data(graph=getattr(__NSM__.kdr, vocab['filename']).clean_uri, data=vocab['data'], datatype=vocab['filename'].split(".")[-1], log_level=logging.WARNING) self.__update_time__(vocab['filename'], **kwargs) log.warning("\n\tvocab: '%s' loaded \n\tconn: '%s'", vocab['filename'], conn) self.loaded.append(vocab['filename'])
1,122,292
dictionary for the specified vocabulary args: vocab_name: the name or uri of the vocab to return
def __get_vocab_dict__(self, vocab_name, **kwargs): try: vocab_dict = self.vocab_map[vocab_name].copy() except KeyError: vocab_dict = {key: value for key, value in self.vocab_map.items() if vocab_name in value.values()} vocab_name = list(vocab_dict)[0] vocab_dict = vocab_dict.pop(vocab_name) return vocab_dict
1,122,293
Returns data stream of an rdf vocabulary args: vocab_name: the name or uri of the vocab to return
def get_vocab(self, vocab_name, **kwargs): vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs) filepaths = list(set([os.path.join(self.cache_dir, vocab_dict['filename']), os.path.join(self.vocab_dir, vocab_dict['filename'])])) for path in filepaths: if os.path.exists(path): with open(path, 'rb') as f_obj: vocab_dict.update({"name": vocab_name, "data": f_obj.read(), "modified": os.path.getmtime(path)}) return vocab_dict download_locs = make_list(vocab_dict.get('download',[])) for loc in download_locs: loc_web = urllib.request.urlopen(loc) # loc_file_date = date_parse(loc_web.info()['Last-Modified']) urllib.request.urlretrieve(loc, filepaths[0]) with open(filepaths[0], 'rb') as f_obj: vocab_dict.update({"name": vocab_name, "data": f_obj.read(), "modified": os.path.getmtime(filepaths[0])}) return vocab_dict
1,122,294
Removes the vocab from the definiton triplestore args: vocab_name: the name or uri of the vocab to return
def drop_vocab(self, vocab_name, **kwargs): vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs) return self.drop_file(vocab_dict['filename'], **kwargs)
1,122,295
runs the extractor Args: ----- output: ['filepath', None]
def run(self, tag=None, output=None, **kwargs): start = datetime.datetime.now() count = 0 if tag: tag = Uri(tag) xml_generator = etree.iterparse(self.source, #events=("start", "end"), tag=tag.etree) else: xml_generator = etree.iterparse(self.source) #, #events=("start", "end")) i = 0 for event, element in xml_generator: type_tags = element.findall(_RDF_TYPE_TAG) rdf_types = [el.get(_RES_TAG) for el in type_tags if el.get(_RES_TAG)] # print(rdf_types) if str(self.filter_val) in rdf_types: pdb.set_trace() # print("%s - %s - %s - %s" % (event, # element.tag, # element.attrib, # element.text)) count += 1 # if i == 100: # break i += 1 element.clear() print("Found '{}' items in {}".format(count, (datetime.datetime.now() - start)))
1,122,556
Initialize Keywords Args: name -- keyword name value -- Optional value, otherwise name is used value is setup as *value to detect if the parameter is supplied, while still supporting None. If no value is supplied then name should be used. If any value is supplied (even None), then that value is used instead
def __init__(self, name, *value): self.name = name self.key = name self.value = name if len(value) != 1 else value[0] self.description = "Matches {!r} and maps it to {!r}".format(name, self.value)
1,122,654
Create a dictonary type from a dictionary of other types Args: validator_map -- a mapping from names to types Examples: >>> Dict({'a': int, 'b': int})('a:1,b:2') {'a': 1, 'b': 2} >>> Dict({'a': str, 'b': int})('a:asdf b=1234') {'a': 'asdf', 'b': 1234} >>> Dict({'a': Int() | Keyword('', None), 'b': Int()})('a,b=1') {'a': None, 'b': 1}
def __init__(self, validator_map): self.validators = dict(validator_map) v_sorted = sorted(self.validators.items(), key=lambda t: t[0]) self.validator_descriptions = ['{}:<{}>'.format(k, v) for k, v in v_sorted] self.name = 'dict({})'.format(', '.join(self.validator_descriptions)) self.description = '\nDict options: \n ' self.description += '\n '.join(self.validator_descriptions) self.kv_regex = re.compile(r'[=:]+')
1,122,659
Send an email from the user (a gmail) to the receiver. Args: subject (str): Subject of the email. message (str): A message. filepaths (list(str)): Filepaths to files to be attached. config (defaultdict): A defaultdict.
async def send_with_attachments(subject, message, filepaths, config): email_ = MIMEMultipart() email_.attach(MIMEText(message)) email_["Subject"] = subject email_["From"] = get_attribute_from_config(config, EMAIL_SECTION_KEY, USER_KEY) email_["To"] = get_attribute_from_config(config, EMAIL_SECTION_KEY, RECEIVER_KEY) _attach_files(filepaths, email_) await _send_email(email_, config)
1,122,805
Take a list of filepaths and attach the files to a MIMEMultipart. Args: filepaths (list(str)): A list of filepaths. email_ (email.MIMEMultipart): A MIMEMultipart email_.
def _attach_files(filepaths, email_): for filepath in filepaths: base = os.path.basename(filepath) with open(filepath, "rb") as file: part = MIMEApplication(file.read(), Name=base) part["Content-Disposition"] = 'attachment; filename="%s"' % base email_.attach(part)
1,122,806
Send an email. Args: email_ (email.MIMEMultipart): The email to send. config (defaultdict): A defaultdict.
async def _send_email(email_, config, loop=asyncio.get_event_loop()): smtp_server = get_attribute_from_config(config, EMAIL_SECTION_KEY, SMTP_SERVER_KEY) smtp_port = int(get_attribute_from_config(config, EMAIL_SECTION_KEY, SMTP_PORT_KEY)) user = get_attribute_from_config(config, EMAIL_SECTION_KEY, USER_KEY) password = get_attribute_from_config(config, EMAIL_SECTION_KEY, PASSWORD_KEY) server = aiosmtplib.SMTP(hostname=smtp_server, port=smtp_port, loop=loop, use_tls=False) await server.connect() await server.starttls() await server.login(user, password) await server.send_message(email_) await server.quit()
1,122,807
Send files using the config.ini settings. Args: filepaths (list(str)): A list of filepaths.
async def send_files_preconf(filepaths, config_path=CONFIG_PATH): config = read_config(config_path) subject = "PDF files from pdfebc" message = "" await send_with_attachments(subject, message, filepaths, config)
1,122,808
return a submagic menu by name, and create it if needed parameters: ----------- menulabel : str Label for the menu Will infere the menu name from the identifier at creation if menulabel not given. To do so you have too give menuidentifier as a CamelCassedString
def _get_magic_menu(self,menuidentifier, menulabel=None): menu = self._magic_menu_dict.get(menuidentifier,None) if not menu : if not menulabel: menulabel = re.sub("([a-zA-Z]+)([A-Z][a-z])","\g<1> \g<2>",menuidentifier) menu = QtGui.QMenu(menulabel,self.magic_menu) self._magic_menu_dict[menuidentifier]=menu self.magic_menu.insertMenu(self.magic_menu_separator,menu) return menu
1,123,285
Method to procces subscribe channel's messages This method reads a message and processes the content in different outputs like stdout, stderr, pyout and status Arguments: sub_msg: message receive from kernel in the sub socket channel capture by kernel manager.
def handle_iopub(self): while self.km.sub_channel.msg_ready(): sub_msg = self.km.sub_channel.get_msg() msg_type = sub_msg['header']['msg_type'] parent = sub_msg["parent_header"] if (not parent) or self.session_id == parent['session']: if msg_type == 'status' : if sub_msg["content"]["execution_state"] == "busy" : pass elif msg_type == 'stream' : if sub_msg["content"]["name"] == "stdout": print(sub_msg["content"]["data"], file=io.stdout, end="") io.stdout.flush() elif sub_msg["content"]["name"] == "stderr" : print(sub_msg["content"]["data"], file=io.stderr, end="") io.stderr.flush() elif msg_type == 'pyout': self.execution_count = int(sub_msg["content"]["execution_count"]) format_dict = sub_msg["content"]["data"] # taken from DisplayHook.__call__: hook = self.displayhook hook.start_displayhook() hook.write_output_prompt() hook.write_format_data(format_dict) hook.log_output(format_dict) hook.finish_displayhook()
1,123,620
Summary: Validate file checksum using md5 hash Args: file: file object to verify integrity hash_file: md5 reference checksum file Returns: Valid (True) | False, TYPE: bool
def valid_checksum(file, hash_file): bits = 4096 # calc md5 hash hash_md5 = hashlib.md5() with open(file, "rb") as f: for chunk in iter(lambda: f.read(bits), b""): hash_md5.update(chunk) # locate hash signature for file, validate with open(hash_file) as c: for line in c.readlines(): if line.strip(): check_list = line.split() if file == check_list[1]: if check_list[0] == hash_md5.hexdigest(): return True return False
1,123,672
Create a daemon which is controllable via jsonrpc with decorator Args: pidfile (str): path to create pid file logger (logging.Logger): logger for the daemon port (int): host (str):
def __init__(self, pidfile, logger, port = 64042, host = 'localhost'): super(RemoteControllerDeamon, self).__init__(pidfile, logger) self.__port = port self.__host = host for name in dir(self): method = getattr(self, name) if hasattr(method, 'registered_for_rpc'): self.register_method(method, method.registered_for_rpc.__name__)
1,123,956
Set all configuration specified in :attr:`REQUIRED_SETTINGS`. Args: configuration (str): Configuration file content. Returns: str: Updated configuration.
def update_configuration(configuration): for key, val in REQUIRED_SETTINGS.items(): if val in ["$username", "$groupname"]: val = get_username() configuration = conf_writer.add_or_update(configuration, key, val) return configuration
1,124,411
Creates configuration file and the directory where it should be stored and set correct permissions. Args: cnf_file (str): Path to the configuration file. uid (int): User ID - will be used for chown. overwrite (bool): Overwrite the configuration with :attr:`CLEAN_CONFIG`.
def create_config(cnf_file, uid, overwrite): conf = None # needed also on suse, because pyClamd module if not os.path.exists(settings.DEB_CONF_PATH): os.makedirs(settings.DEB_CONF_PATH, 0755) os.chown(settings.DEB_CONF_PATH, uid, -1) if not os.path.exists(cnf_file): # create new conf file conf = CLEAN_CONFIG elif overwrite: # ovewrite old conf file backup_name = cnf_file + "_" if not os.path.exists(backup_name): shutil.copyfile(cnf_file, backup_name) os.chown(backup_name, uid, -1) conf = CLEAN_CONFIG else: # switch variables in existing file with open(cnf_file) as f: conf = f.read() # write the conf file with open(cnf_file, "w") as f: f.write(update_configuration(conf)) # permission check (uid) os.chown(cnf_file, uid, -1) os.chmod(cnf_file, 0644) symlink = settings.DEB_CONF_PATH + settings.CONF_FILE if not settings.is_deb_system() and not os.path.exists(symlink): os.symlink(cnf_file, symlink) os.chown(symlink, uid, -1) os.chmod(symlink, 0644)
1,124,412
Create log file and set necessary permissions. Args: log_file (str): Path to the log file. uid (int): User ID - will be used for chown.
def create_log(log_file, uid): if not os.path.exists(log_file): # create new log file dir_name = os.path.dirname(log_file) if not os.path.exists(dir_name): os.makedirs(dir_name, 0755) os.chown(dir_name, uid, -1) with open(log_file, "w") as f: f.write("") os.chown(log_file, uid, -1) os.chmod(log_file, 0640)
1,124,413
Create configuration and log file. Restart the daemon when configuration is done. Args: conf_file (str): Path to the configuration file. overwrite (bool): Overwrite the configuration file with `clean` config?
def main(conf_file, overwrite, logger): uid = pwd.getpwnam(get_username()).pw_uid # stop the daemon logger.info("Stopping the daemon.") sh.service(get_service_name(), "stop") # create files logger.info("Creating config file.") create_config( cnf_file=conf_file, uid=uid, overwrite=overwrite ) logger.info("Creating log file.") create_log( log_file=REQUIRED_SETTINGS["LogFile"], uid=uid ) # start the daemon logger.info("Starting the daemon..") sh.service(get_service_name(), "start")
1,124,414
If the callback is callable, format the string with the args and make a call. Otherwise, do nothing. Args: callback (function): May or may not be callable. formattable_string (str): A string with '{}'s inserted. *args: A variable amount of arguments for the string formatting. Must correspond to the amount of '{}'s in 'formattable_string'. Raises: ValueError
def if_callable_call_with_formatted_string(callback, formattable_string, *args): try: formatted_string = formattable_string.format(*args) except IndexError: raise ValueError("Mismatch metween amount of insertion points in the formattable string\n" "and the amount of args given.") if callable(callback): callback(formatted_string)
1,124,518
Convert text file to JSON. Arguments: domain: domain name of updating target action: True ; for PUT/POST HTTP method False; for DELETE HTTP method filename: text file of bulk updating (default is False) record: json record of updating single record (default is False)
def set_json(domain, action, filename=False, record=False): o = JSONConverter(domain) if filename: # for 'bulk_create/bulk_delete' with open(filename, 'r') as f: o.separate_input_file(f) for item in o.separated_list: o.read_records(item.splitlines()) o.generata_data(action) elif record: # for 'create/delete' o.read_records(record) o.generata_data(action) return o.dict_records
1,124,876
Set options of command line. Arguments: prs: parser object of argparse keyword: processing keyword required: True is required option (default is False)
def set_option(prs, keyword, required=False): if keyword == 'server': prs.add_argument( '-s', dest='server', required=True, help='specify TonicDNS Server hostname or IP address') if keyword == 'username': prs.add_argument('-u', dest='username', required=True, help='TonicDNS username') if keyword == 'password': group = prs.add_mutually_exclusive_group(required=True) group.add_argument('-p', dest='password', help='TonicDNS password') group.add_argument('-P', action='store_true', help='TonicDNS password prompt') if keyword == 'infile': prs.add_argument('infile', action='store', help='pre-converted text file') if keyword == 'domain': prs.add_argument('--domain', action='store', required=True, help='create record with specify domain') prs.add_argument('--name', action='store', required=True, help='specify with domain option') prs.add_argument('--rtype', action='store', required=True, help='specify with domain option') prs.add_argument('--content', action='store', required=True, help='specify with domain option') prs.add_argument('--ttl', action='store', default='3600', help='specify with domain option, default 3600') prs.add_argument('--priority', action='store', default=False, help='specify with domain and ' 'rtype options as MX|SRV') if keyword == 'update': prs.add_argument('--new-type', action='store', help='specify new value with domain option') prs.add_argument('--new-content', action='store', help='specify new value with domain option') prs.add_argument('--new-ttl', action='store', help='specify new value with domain option') prs.add_argument('--new-priority', action='store', help='specify new value with domain option') if keyword == 'template': msg = 'specify template identifier' if required: prs.add_argument('--template', action='store', required=True, help=msg) else: prs.add_argument('--template', action='store', help=msg) if keyword == 'search': prs.add_argument('--search', action='store', help='partial match search or refine search.\ latter syntax is "name,rtype,content"')
1,124,888
Set options of connecting to TonicDNS API server Arguments: prs: parser object of argparse conn: dictionary of connection information
def conn_options(prs, conn): if conn.get('server') and conn.get('username') and conn.get('password'): prs.set_defaults(server=conn.get('server'), username=conn.get('username'), password=conn.get('password')) elif conn.get('server') and conn.get('username'): prs.set_defaults(server=conn.get('server'), username=conn.get('username')) if conn.get('auto_update_soa'): prs.set_defaults(auto_update_soa=conn.get('auto_update_soa')) else: prs.set_defaults(auto_update_soa=False) if not conn.get('server'): set_option(prs, 'server') if not conn.get('username'): set_option(prs, 'username') if not conn.get('password'): set_option(prs, 'password')
1,124,889
Retrieve records. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_get(prs, conn): prs_get = prs.add_parser( 'get', help='retrieve all zones or records with a specific zone') prs_get.add_argument('--domain', action='store', help='specify domain FQDN') conn_options(prs_get, conn) set_option(prs_get, 'search') prs_get.set_defaults(func=get)
1,124,892
Create record. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_create(prs, conn): prs_create = prs.add_parser( 'create', help='create record of specific zone') set_option(prs_create, 'domain') conn_options(prs_create, conn) prs_create.set_defaults(func=create)
1,124,893
Create bulk_records. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_bulk_create(prs, conn): prs_create = prs.add_parser( 'bulk_create', help='create bulk records of specific zone') set_option(prs_create, 'infile') conn_options(prs_create, conn) prs_create.add_argument('--domain', action='store', help='create records with specify zone') prs_create.set_defaults(func=create)
1,124,894
Delete record. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_delete(prs, conn): prs_delete = prs.add_parser( 'delete', help='delete a record of specific zone') set_option(prs_delete, 'domain') conn_options(prs_delete, conn) prs_delete.set_defaults(func=delete)
1,124,895
Delete bulk_records. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_bulk_delete(prs, conn): prs_delete = prs.add_parser( 'bulk_delete', help='delete bulk records of specific zone') set_option(prs_delete, 'infile') conn_options(prs_delete, conn) prs_delete.add_argument('--domain', action='store', help='delete records with specify zone') prs_delete.set_defaults(func=delete)
1,124,896
Update a record. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_update(prs, conn): prs_update = prs.add_parser( 'update', help='update record of specific zone') set_option(prs_update, 'domain') set_option(prs_update, 'update') conn_options(prs_update, conn) prs_update.set_defaults(func=update)
1,124,897
Retrieve template. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_get_tmpl(prs, conn): prs_tmpl_get = prs.add_parser( 'tmpl_get', help='retrieve templates') set_option(prs_tmpl_get, 'template') conn_options(prs_tmpl_get, conn) prs_tmpl_get.set_defaults(func=retrieve_tmpl)
1,124,898
Delete template. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_delete_tmpl(prs, conn): prs_tmpl_delete = prs.add_parser( 'tmpl_delete', help='delete template') set_option(prs_tmpl_delete, 'template', required=True) conn_options(prs_tmpl_delete, conn) prs_tmpl_delete.set_defaults(func=delete_tmpl)
1,124,899
Update SOA serial. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_update_soa(prs, conn): prs_soa = prs.add_parser('soa', help='update SOA record') prs_soa.add_argument('--domain', action='store', required=True, help='specify domain FQDN') prs_soa.add_argument('--mname', action='store', help='specify MNAME of SOA record') prs_soa.add_argument('--rname', action='store', help='specify RNAME of SOA record') prs_soa.add_argument('--refresh', action='store', type=int, help='specify REFRESH of SOA record') prs_soa.add_argument('--retry', action='store', type=int, help='specify RETRY of SOA record') prs_soa.add_argument('--expire', action='store', type=int, help='specify EXPIRE of SOA record') prs_soa.add_argument('--minimum', action='store', type=int, help='specify MINIMUM of SOA record') conn_options(prs_soa, conn) prs_soa.set_defaults(func=update_soa_serial)
1,124,900
Create zone. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_create_zone(prs, conn): prs_zone_create = prs.add_parser('zone_create', help='create zone') prs_zone_create.add_argument( '--domain', action='store', required=True, help='specify zone') prs_zone_create.add_argument('--dnsaddr', action='store', required=True, help='specify IP address of DNS master') group_zone_create = prs_zone_create.add_mutually_exclusive_group() group_zone_create.add_argument('-S', action='store_true', help='create zone to SLAVE') group_zone_create.add_argument('-N', action='store_true', help='create zone to NATIVE') conn_options(prs_zone_create, conn) prs_zone_create.set_defaults(func=create_zone)
1,124,901
Delete zone. Arguments: prs: parser object of argparse conn: dictionary of connection information
def parse_delete_zone(prs, conn): prs_zone_delete = prs.add_parser('zone_delete', help='delete zone') prs_zone_delete.add_argument('--domain', action='store', required=True, help='specify zone') conn_options(prs_zone_delete, conn) prs_zone_delete.set_defaults(func=delete_zone)
1,124,902
event handler bound to the receive event of the link the server is wired too. Arguments: - message (message.Message): incoming message Keyword arguments: - event_origin (connection.Link)
def on_receive(self, message=None, wire=None, event_origin=None): self.trigger("before_call", message) fn_name = message.data pmsg = self.prepare_message try: for handler in self.handlers: handler.incoming(message, self) fn = self.get_function(fn_name, message.path) except Exception as inst: wire.respond(message, ErrorMessage(str(inst))) return if callable(fn) and getattr(fn, "exposed", False): try: r = fn(*message.args, **message.kwargs) if isinstance(r,Message): wire.respond(message, pmsg(r)) else: wire.respond(message, pmsg(Message(r))) except Exception as inst: if self.debug: wire.respond(message, pmsg(ErrorMessage(str(traceback.format_exc())))) else: wire.respond(message, pmsg(ErrorMessage(str(inst)))) else: wire.respond( message, pmsg( ErrorMessage("action '%s' not exposed on API (%s)" % (fn_name, self.__class__.__name__))) ) self.trigger("after_call", message)
1,124,923
destroy remote instance of widget Arguments: - id (str): widget id - name (str): widget type name
def detach_remote(self, id, name): if name in self.widgets: if id in self.widgets[name]: del self.widgets[name]
1,124,939
create remote instance of widget Arguments: - id (str): widget id - name (str): widget type name Keyword Arguments: - any further arguments you wish to pass to the widget constructor
def attach_remote(self, id, name, **kwargs): client_id = id.split(".")[0] widget = self.make_widget( id, name, dispatcher=ProxyDispatcher( self, link=getattr(self.clients[client_id], "link", None) ), **kwargs ) self.store_widget(widget) self.log_debug("Attached widget: %s" % id)
1,124,940
Get the signature of the current state of the repository TODO right now `get_signature` is an effectful process in that it adds all untracked file to staging. This is the only way to get accruate diff on new files. This is ok because we only use it on a disposable copy of the repo. Args: base_commit - the base commit ('HEAD', sha, etc.) Returns: str
def get_signature(self, base_commit=None): if base_commit is None: base_commit = 'HEAD' self.run('add', '-A', self.path) sha = self.run('rev-parse', '--verify', base_commit).strip() diff = self.run('diff', sha).strip() if len(diff) == 0: try: return self.get_signature(base_commit + '~1') except CommandError: pass h = hashlib.sha1() h.update(sha) h.update(diff) return h.hexdigest()
1,125,083
Install the repository hook for this repo. Args: hook_name (str) hook_content (str)
def install_hook(self, hook_name, hook_content): hook_path = os.path.join(self.path, '.git/hooks', hook_name) with open(hook_path, 'w') as f: f.write(hook_content) os.chmod(hook_path, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE)
1,125,084
This function reads the fixup array and apply the correct values to the underlying binary stream. This function changes the bin_view in memory. Args: bin_view (memoryview of bytearray) - The binary stream fx_offset (int) - Offset to the fixup array fx_count (int) - Number of elements in the fixup array entry_size (int) - Size of the MFT entry
def apply_fixup_array(bin_view, fx_offset, fx_count, entry_size): fx_array = bin_view[fx_offset:fx_offset+(2 * fx_count)] #the array is composed of the signature + substitutions, so fix that fx_len = fx_count - 1 #we can infer the sector size based on the entry size sector_size = int(entry_size / fx_len) index = 1 position = (sector_size * index) - 2 while (position <= entry_size): if bin_view[position:position+2].tobytes() == fx_array[:2].tobytes(): #the replaced part must always match the signature! bin_view[position:position+2] = fx_array[index * 2:(index * 2) + 2] else: _MOD_LOGGER.error("Error applying the fixup array") raise FixUpError(f"Signature {fx_array[:2].tobytes()} does not match {bin_view[position:position+2].tobytes()} at offset {position}.") index += 1 position = (sector_size * index) - 2 _MOD_LOGGER.info("Fix up array applied successfully.")
1,125,382
This function allows a simple a way to iterate over a "complex" iterable, for example, if the input [12, [23], (4, 3), "lkjasddf"], this will return an Iterable that returns 12, 23, 4, 3 and "lkjasddf". Args: iterable (Iterable) - A complex iterable that will be flattened Returns: (Iterable): An Iterable that flattens multiple interables
def flatten(iterable): return itertools.chain.from_iterable(a if isinstance(a,Iterable) and not isinstance(a, str) else [a] for a in iterable)
1,125,383
Returns the size, in bytes, of a file. Expects an object that supports seek and tell methods. Args: file_object (file_object) - The object that represents the file Returns: (int): size of the file, in bytes
def get_file_size(file_object): position = file_object.tell() file_object.seek(0, 2) file_size = file_object.tell() file_object.seek(position, 0) return file_size
1,125,384
Convert `name` to the ASCII vector. Example: >>> name_to_vector("ing. Franta Putšálek") ['putsalek', 'franta', 'ing'] Args: name (str): Name which will be vectorized. Returns: list: Vector created from name.
def name_to_vector(name): if not isinstance(name, unicode): name = name.decode("utf-8") name = name.lower() name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore') name = "".join(filter(lambda x: x.isalpha() or x == " ", list(name))) return sorted(name.split(), key=lambda x: len(x), reverse=True)
1,125,795
Compare two names in complicated, but more error prone way. Algorithm is using vector comparison. Example: >>> compare_names("Franta Putšálek", "ing. Franta Putšálek") 100.0 >>> compare_names("F. Putšálek", "ing. Franta Putšálek") 50.0 Args: first (str): Fisst name as string. second (str): Second name as string. Returns: float: Percentage of the similarity.
def compare_names(first, second): first = name_to_vector(first) second = name_to_vector(second) zipped = zip(first, second) if not zipped: return 0 similarity_factor = 0 for fitem, _ in zipped: if fitem in second: similarity_factor += 1 return (float(similarity_factor) / len(zipped)) * 100
1,125,796
Filter publications based at data from Aleph. Args: publication (obj): :class:`.Publication` instance. Returns: obj/None: None if the publication was found in Aleph or `publication` \ if not.
def filter_publication(publication, cmp_authors=True): query = None isbn_query = False # there can be ISBN query or book title query if publication.optionals and publication.optionals.ISBN: query = aleph.ISBNQuery(publication.optionals.ISBN) isbn_query = True else: query = aleph.TitleQuery(publication.title) result = aleph.reactToAMQPMessage(aleph.SearchRequest(query), "") if not result.records: return publication # book is not in database # if there was results with this ISBN, compare titles of the books # (sometimes, there are different books with same ISBN because of human # errors) if isbn_query: for record in result.records: epub = record.epublication # try to match title of the book if compare_names(epub.nazev, publication.title) >= 80: return None # book already in database return publication # checks whether the details from returned EPublication match Publication's for record in result.records: epub = record.epublication # if the title doens't match, go to next record from aleph if not compare_names(epub.nazev, publication.title) >= 80: continue if not cmp_authors: return None # book already in database # compare authors names for author in epub.autori: # convert Aleph's author structure to string author_str = "%s %s %s" % ( author.firstName, author.lastName, author.title ) # normalize author data from `publication` pub_authors = map(lambda x: x.name, publication.authors) if type(pub_authors) not in [list, tuple, set]: pub_authors = [pub_authors] # try to compare authors from `publication` and Aleph for pub_author in pub_authors: if compare_names(author_str, pub_author) >= 50: return None # book already in database return publication
1,125,797
A context manager that grabs the lock and releases it when done. This blocks until the lock can be acquired. Args: vcs (easyci.vcs.base.Vcs) lock_object (Lock) wait (boolean) - whether to wait for the lock or error out Raises: Timeout
def lock(vcs, lock_object, wait=True): if wait: timeout = -1 else: timeout = 0 lock_path = _get_lock_path(vcs, lock_object) lock = filelock.FileLock(lock_path) with lock.acquire(timeout=timeout): yield
1,125,890
Turns a string into a datetime.date object. This will only work if the format can be "guessed", so the string must have one of the formats from VALID_DATE_FORMATS_TEXT. Args: date_str (str) a string that represents a date Returns: datetime.date object Raises: ValueError if the input string does not have a valid format.
def datestr2date(date_str): if any(c not in '0123456789-/' for c in date_str): raise ValueError('Illegal character in date string') if '/' in date_str: try: m, d, y = date_str.split('/') except: raise ValueError('Date {} must have no or exactly 2 slashes. {}'. format(date_str, VALID_DATE_FORMATS_TEXT)) elif '-' in date_str: try: d, m, y = date_str.split('-') except: raise ValueError('Date {} must have no or exactly 2 dashes. {}'. format(date_str, VALID_DATE_FORMATS_TEXT)) elif len(date_str) == 8 or len(date_str) == 6: d = date_str[-2:] m = date_str[-4:-2] y = date_str[:-4] else: raise ValueError('Date format not recognised. {}'.format( VALID_DATE_FORMATS_TEXT)) if len(y) == 2: year = 2000 + int(y) elif len(y) == 4: year = int(y) else: raise ValueError('year must be 2 or 4 digits') for s in (m, d): if 1 <= len(s) <= 2: month, day = int(m), int(d) else: raise ValueError('m and d must be 1 or 2 digits') try: return datetime.date(year, month, day) except ValueError: raise ValueError('Invalid date {}. {}'.format(date_str, VALID_DATE_FORMATS_TEXT))
1,126,083
Turns a datetime.date object into a string. The string must have one of the formats from VALID_DATE_FORMATS_TEXT to make it compatible with datestr2date. Args: date (datetime.date) the date to be translated fmt (str) a format string. Returns: (str) that represents a date. Raises: ValueError if the format is not valid.
def date2datestr(date, fmt='yyyymmdd'): if '-' in fmt: if not fmt.index('d') < fmt.index('m') < fmt.index('y'): raise ValueError('Invalid format string. {}'.format( VALID_DATE_FORMATS_TEXT)) d, m, y = fmt.split('-') elif '/' in fmt: if not fmt.index('m') < fmt.index('d') < fmt.index('y'): raise ValueError('Invalid format string. {}'.format( VALID_DATE_FORMATS_TEXT)) m, d, y = fmt.split('/') elif any(c not in 'dmy' for c in fmt): raise ValueError('Invalid character in format string. {}'.format( VALID_DATE_FORMATS_TEXT)) else: if not fmt.index('y') < fmt.index('m') < fmt.index('d'): raise ValueError('Invalid format string. {}'.format( VALID_DATE_FORMATS_TEXT)) y, m, d = fmt[:-4], fmt[-4:-2], fmt[-2:] for string, char in ((d, 'd'), (m, 'm'), (y, 'y')): if any(c != char for c in string): raise ValueError('Invalid date format: {} is not {}'.\ format(char, string)) if len(y) == 4: fmt = fmt.replace('yyyy', '%Y', 1) elif len(y) == 2: fmt = fmt.replace('yy', '%y', 1) else: raise ValueError('Invalid format string, year must have 2 or 4 digits') if len(m) == 2: fmt = fmt.replace('mm', '%m', 1) elif len(m) == 1: fmt = fmt.replace('m', 'X%m', 1) else: raise ValueError('Invalid format string, month must have 1 or 2 digits') if len(d) == 2: fmt = fmt.replace('dd', '%d', 1) elif len(d) == 1: fmt = fmt.replace('d', 'X%d', 1) else: raise ValueError('Invalid format string, day must have 1 or 2 digits') return date.strftime(fmt).replace('X0','X').replace('X','')
1,126,084
Returns the last weekday before date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: -
def previous_weekday(date): weekday = date.weekday() if weekday == 0: n_days = 3 elif weekday == 6: n_days = 2 else: n_days = 1 return date - datetime.timedelta(days=n_days)
1,126,085
Return the first weekday after date Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: -
def next_weekday(date): n_days = 7 - date.weekday() if n_days > 3: n_days = 1 return date + datetime.timedelta(days=n_days)
1,126,086